From b93e2ce2c072e2d0f80a3f05b7a7ada34e6b80d5 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Wed, 7 Sep 2022 12:22:38 +0200 Subject: [PATCH 1/7] Skip additional tests under valgrind (#1863) Valgrind runs are taking too long again. * Let's skip some more tests under valgrind. (`python/tests/test_events.py::test_models[events_plus_heavisides]` alone takes ~1.5h) * Split Valgrind C++ and Python tests into 2 parallel jobs * Clean up running-under-valgrind test logic, to also work without GHA or setting extra env vars Valgrind jobs now finish in about 30min instead of 3h. --- .github/workflows/test_valgrind.yml | 48 +++++++++++++++++-- pytest.ini | 2 +- python/amici/testing.py | 19 +++++++- python/tests/test_bngl.py | 5 +- .../test_conserved_quantities_demartino.py | 18 +++---- .../tests/test_conserved_quantities_rref.py | 7 ++- python/tests/test_edata.py | 3 +- python/tests/test_events.py | 4 +- python/tests/test_misc.py | 8 ++-- python/tests/test_ode_export.py | 5 +- python/tests/test_parameter_mapping.py | 7 ++- python/tests/test_petab_import.py | 3 ++ python/tests/test_petab_simulate.py | 4 +- python/tests/test_pregenerated_models.py | 5 +- python/tests/test_pysb.py | 10 ++-- python/tests/test_sbml_import.py | 14 ++++-- .../test_sbml_import_special_functions.py | 9 ++-- 17 files changed, 124 insertions(+), 47 deletions(-) diff --git a/.github/workflows/test_valgrind.yml b/.github/workflows/test_valgrind.yml index 646506890e..4f560175af 100644 --- a/.github/workflows/test_valgrind.yml +++ b/.github/workflows/test_valgrind.yml @@ -1,4 +1,4 @@ -name: C++ Tests +name: Valgrind tests on: push: branches: @@ -12,15 +12,15 @@ on: - cron: '48 4 * * *' jobs: - valgrind: - name: Tests Valgrind + valgrind_cpp: + name: Valgrind C++ # TODO: prepare image with more deps preinstalled runs-on: ubuntu-22.04 strategy: matrix: - python-version: [ "3.8" ] + python-version: [ "3.9" ] env: ENABLE_AMICI_DEBUGGING: "TRUE" @@ -57,6 +57,46 @@ jobs: run: | scripts/run-valgrind-cpp.sh + valgrind_python: + name: Valgrind Python + + runs-on: ubuntu-22.04 + + strategy: + matrix: + python-version: [ "3.9" ] + + env: + ENABLE_AMICI_DEBUGGING: "TRUE" + + steps: + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - uses: actions/checkout@master + - run: git fetch --prune --unshallow + + # install amici dependencies + - name: apt + run: | + sudo apt-get update \ + && sudo apt-get install -y \ + cmake \ + g++ \ + libatlas-base-dev \ + libboost-serialization-dev \ + libhdf5-serial-dev \ + python3-venv \ + swig \ + valgrind \ + libboost-math-dev + + - name: Build AMICI + run: | + scripts/buildAll.sh + - name: Install python package run: | scripts/installAmiciSource.sh diff --git a/pytest.ini b/pytest.ini index 315ef3eef6..4bdaa3c9a1 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,6 +1,6 @@ [pytest] -addopts = -vv +addopts = -vv --strict-markers filterwarnings = # hundreds of SBML <=5.17 warnings diff --git a/python/amici/testing.py b/python/amici/testing.py index 7fa8d950aa..de1f69a1cc 100644 --- a/python/amici/testing.py +++ b/python/amici/testing.py @@ -1,8 +1,25 @@ """Test support functions""" - +import os import sys from tempfile import TemporaryDirectory +import pytest + +# Indicates whether we are currently running under valgrind +# see also https://stackoverflow.com/a/62364698 +ON_VALGRIND = any( + needle in haystack + for needle in ('valgrind', 'vgpreload') + for haystack in (os.getenv("LD_PRELOAD", ""), + os.getenv("DYLD_INSERT_LIBRARIES", "")) +) + +# Decorator to skip certain tests when we are under valgrind +# (those that are independent of the AMICI C++ parts, or that take too long, +# or that test performance) +skip_on_valgrind = pytest.mark.skipif( + ON_VALGRIND, reason="Takes too long or is meaningless under valgrind") + class TemporaryDirectoryWinSafe(TemporaryDirectory): """TemporaryDirectory that will not raise if cleanup fails. diff --git a/python/tests/test_bngl.py b/python/tests/test_bngl.py index 722359236d..8c409ddae6 100644 --- a/python/tests/test_bngl.py +++ b/python/tests/test_bngl.py @@ -9,6 +9,8 @@ from amici.bngl_import import bngl2amici from pysb.simulator import ScipyOdeSimulator from pysb.importers.bngl import model_from_bngl +from amici.testing import skip_on_valgrind + tests = [ 'CaOscillate_Func', 'deleteMolecules', 'empty_compartments_block', @@ -21,8 +23,7 @@ ] -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Takes too long under valgrind") +@skip_on_valgrind @pytest.mark.parametrize('example', tests) def test_compare_to_pysb_simulation(example): diff --git a/python/tests/test_conserved_quantities_demartino.py b/python/tests/test_conserved_quantities_demartino.py index 432fd88dd5..476d456411 100644 --- a/python/tests/test_conserved_quantities_demartino.py +++ b/python/tests/test_conserved_quantities_demartino.py @@ -12,6 +12,8 @@ compute_moiety_conservation_laws ) from amici.logging import get_logger, log_execution_time +from amici.testing import skip_on_valgrind + logger = get_logger(__name__) @@ -58,8 +60,7 @@ def data_demartino2014(): return S, row_names -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Python-only") +@skip_on_valgrind def test_kernel_demartino2014(data_demartino2014, quiet=True): """Invoke test case and benchmarking for De Martino's published results for E. coli network. Kernel-only.""" @@ -96,8 +97,7 @@ def test_kernel_demartino2014(data_demartino2014, quiet=True): f"Moiety #{i + 1} failed for test case (De Martino et al.)" -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Python-only") +@skip_on_valgrind def test_fill_demartino2014(data_demartino2014): """Test creation of interaction matrix""" stoichiometric_list, row_names = data_demartino2014 @@ -190,8 +190,7 @@ def test_fill_demartino2014(data_demartino2014): assert not any(fields[len(ref_for_fields):]) -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Python-only") +@skip_on_valgrind def test_compute_moiety_conservation_laws_demartino2014( data_demartino2014, quiet=False ): @@ -217,9 +216,7 @@ def test_compute_moiety_conservation_laws_demartino2014( return runtime -@pytest.mark.skipif( - os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Performance test under valgrind is not meaningful.") +@skip_on_valgrind @log_execution_time("Detecting moiety conservation laws", logger) def test_cl_detect_execution_time(data_demartino2014): """Test execution time stays within a certain predefined bound. @@ -239,8 +236,7 @@ def test_cl_detect_execution_time(data_demartino2014): assert runtime < max_time_seconds, "Took too long" -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Python-only") +@skip_on_valgrind def test_compute_moiety_conservation_laws_simple(): """Test a simple example, ensure the conservation laws are identified reliably. Requires the Monte Carlo to identify all.""" diff --git a/python/tests/test_conserved_quantities_rref.py b/python/tests/test_conserved_quantities_rref.py index 7a28f78b9a..2368b06b8a 100644 --- a/python/tests/test_conserved_quantities_rref.py +++ b/python/tests/test_conserved_quantities_rref.py @@ -5,6 +5,7 @@ import sympy as sp from amici.conserved_quantities_rref import nullspace_by_rref, pivots, rref +from amici.testing import skip_on_valgrind def random_matrix_generator(min_dim, max_dim, count): @@ -14,8 +15,7 @@ def random_matrix_generator(min_dim, max_dim, count): yield np.random.rand(rows, cols) -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Python-only") +@skip_on_valgrind @pytest.mark.parametrize("mat", random_matrix_generator(0, 10, 200)) def test_rref(mat): """Create some random matrices and compare output of ``rref`` and @@ -28,8 +28,7 @@ def test_rref(mat): assert np.allclose(expected_rref, actual_rref) -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Python-only") +@skip_on_valgrind @pytest.mark.parametrize("mat", random_matrix_generator(0, 50, 50)) def test_nullspace_by_rref(mat): """Test ``nullspace_by_rref`` on a number of random matrices and compare diff --git a/python/tests/test_edata.py b/python/tests/test_edata.py index 184f5e2ce1..7df799d4d9 100644 --- a/python/tests/test_edata.py +++ b/python/tests/test_edata.py @@ -2,10 +2,11 @@ import numpy as np import amici +from amici.testing import skip_on_valgrind from test_sbml_import import model_units_module - +@skip_on_valgrind def test_edata_sensi_unscaling(model_units_module): """ ExpData parameters should be used for unscaling initial state diff --git a/python/tests/test_events.py b/python/tests/test_events.py index ad264ab0a1..a6f5334b83 100644 --- a/python/tests/test_events.py +++ b/python/tests/test_events.py @@ -7,10 +7,11 @@ from util import (check_trajectories_with_forward_sensitivities, check_trajectories_without_sensitivities, create_amici_model, create_sbml_model) +from amici.testing import skip_on_valgrind @pytest.fixture(params=[ - 'events_plus_heavisides', + pytest.param('events_plus_heavisides', marks=skip_on_valgrind), 'nested_events', ]) def model(request): @@ -214,7 +215,6 @@ def sx_pected(t, parameters): ) - def model_definition_nested_events(): """Test model for state- and parameter-dependent heavisides. diff --git a/python/tests/test_misc.py b/python/tests/test_misc.py index c644aacade..663b40ccdc 100644 --- a/python/tests/test_misc.py +++ b/python/tests/test_misc.py @@ -3,14 +3,13 @@ import os import subprocess -import libsbml import pytest import sympy as sp import amici from amici.ode_export import _custom_pow_eval_derivative, _monkeypatched, \ smart_subs_dict -from amici.testing import TemporaryDirectoryWinSafe as TemporaryDirectory +from amici.testing import skip_on_valgrind def test_parameter_scaling_from_int_vector(): @@ -27,7 +26,7 @@ def test_parameter_scaling_from_int_vector(): assert scale_vector[1] == amici.ParameterScaling.ln assert scale_vector[2] == amici.ParameterScaling.none - +@skip_on_valgrind def test_hill_function_dwdx(): """Kinetic laws with Hill functions, may lead to NaNs in the Jacobian if involved states are zero if not properly arranged symbolically. @@ -49,6 +48,7 @@ def test_hill_function_dwdx(): _ = str(res) +@skip_on_valgrind @pytest.mark.skipif(os.environ.get('AMICI_SKIP_CMAKE_TESTS', '') == 'TRUE', reason='skipping cmake based test') def test_cmake_compilation(sbml_example_presimulation_module): @@ -64,6 +64,7 @@ def test_cmake_compilation(sbml_example_presimulation_module): stdout=subprocess.PIPE, stderr=subprocess.PIPE) +@skip_on_valgrind def test_smart_subs_dict(): expr_str = 'c + d' subs_dict = { @@ -85,6 +86,7 @@ def test_smart_subs_dict(): assert sp.simplify(result_reverse - expected_reverse).is_zero +@skip_on_valgrind def test_monkeypatch(): t = sp.Symbol('t') n = sp.Symbol('n') diff --git a/python/tests/test_ode_export.py b/python/tests/test_ode_export.py index 83843348e8..180425c023 100644 --- a/python/tests/test_ode_export.py +++ b/python/tests/test_ode_export.py @@ -2,8 +2,9 @@ import sympy as sp from amici.cxxcodeprinter import AmiciCxxCodePrinter +from amici.testing import skip_on_valgrind - +@skip_on_valgrind def test_csc_matrix(): """Test sparse CSC matrix creation""" printer = AmiciCxxCodePrinter() @@ -19,6 +20,7 @@ def test_csc_matrix(): assert str(sparse_matrix) == 'Matrix([[da1_db1, 0], [da2_db1, da2_db2]])' +@skip_on_valgrind def test_csc_matrix_empty(): """Test sparse CSC matrix creation for empty matrix""" printer = AmiciCxxCodePrinter() @@ -33,6 +35,7 @@ def test_csc_matrix_empty(): assert str(sparse_matrix) == 'Matrix(0, 0, [])' +@skip_on_valgrind def test_csc_matrix_vector(): """Test sparse CSC matrix creation from matrix slice""" printer = AmiciCxxCodePrinter() diff --git a/python/tests/test_parameter_mapping.py b/python/tests/test_parameter_mapping.py index 725e7e78cd..e2663f4409 100644 --- a/python/tests/test_parameter_mapping.py +++ b/python/tests/test_parameter_mapping.py @@ -5,10 +5,10 @@ from amici.parameter_mapping import (ParameterMapping, ParameterMappingForCondition) +from amici.testing import skip_on_valgrind -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Python-only") +@skip_on_valgrind def test_parameter_mapping_for_condition_default_args(): """Check we can initialize the mapping with default arguments.""" @@ -37,8 +37,7 @@ def test_parameter_mapping_for_condition_default_args(): expected_scale_map_sim_fix -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Python-only") +@skip_on_valgrind def test_parameter_mapping(): """Test :class:``amici.parameter_mapping.ParameterMapping``.""" diff --git a/python/tests/test_petab_import.py b/python/tests/test_petab_import.py index 907ac786a1..d8dfe06c7a 100644 --- a/python/tests/test_petab_import.py +++ b/python/tests/test_petab_import.py @@ -3,6 +3,8 @@ import libsbml import pytest import pandas as pd +from amici.testing import skip_on_valgrind + petab = pytest.importorskip("petab") SbmlModel = pytest.importorskip("petab.models.sbml_model.SbmlModel") @@ -35,6 +37,7 @@ def simple_sbml_model(): return document, model +@skip_on_valgrind def test_get_fixed_parameters(simple_sbml_model): """Check for correct identification of fixed parameters: diff --git a/python/tests/test_petab_simulate.py b/python/tests/test_petab_simulate.py index a992673702..1a35e36c2c 100644 --- a/python/tests/test_petab_simulate.py +++ b/python/tests/test_petab_simulate.py @@ -1,5 +1,4 @@ """Tests for petab_simulate.py.""" - from pathlib import Path import pytest import tempfile @@ -7,6 +6,7 @@ from amici.petab_simulate import PetabSimulator import petab import petabtests +from amici.testing import skip_on_valgrind @pytest.fixture @@ -18,6 +18,7 @@ def petab_problem() -> petab.Problem: return petab.Problem.from_yaml(str(petab_yaml_path)) +@skip_on_valgrind def test_simulate_without_noise(petab_problem): """Test the reproducibility of simulation without noise.""" simulator = PetabSimulator(petab_problem) @@ -37,6 +38,7 @@ def test_simulate_without_noise(petab_problem): assert synthetic_data_df_c.equals(synthetic_data_df_a) +@skip_on_valgrind def test_subset_call(petab_problem): """ Test the ability to customize AMICI methods, specifically: diff --git a/python/tests/test_pregenerated_models.py b/python/tests/test_pregenerated_models.py index 77d91714c3..d63e4b924b 100755 --- a/python/tests/test_pregenerated_models.py +++ b/python/tests/test_pregenerated_models.py @@ -11,6 +11,8 @@ import numpy as np import pytest from amici.gradient_check import check_derivatives, _check_results +from amici.testing import skip_on_valgrind + cpp_test_dir = Path(__file__).parents[2] / 'tests' / 'cpp' options_file = str(cpp_test_dir / 'testOptions.h5') @@ -22,8 +24,7 @@ for case in list(expected_results[sub_test].keys())] -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Takes too long under valgrind") +@skip_on_valgrind @pytest.mark.skipif(os.environ.get('AMICI_SKIP_CMAKE_TESTS', '') == 'TRUE', reason='skipping cmake based test') @pytest.mark.parametrize("sub_test,case", model_cases) diff --git a/python/tests/test_pysb.py b/python/tests/test_pysb.py index 21b3e92224..9be4b6b086 100644 --- a/python/tests/test_pysb.py +++ b/python/tests/test_pysb.py @@ -18,10 +18,10 @@ from pysb.simulator import ScipyOdeSimulator from amici.gradient_check import check_derivatives +from amici.testing import skip_on_valgrind -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Takes too long under valgrind") +@skip_on_valgrind def test_compare_to_sbml_import(pysb_example_presimulation_module, sbml_example_presimulation_module): # -------------- PYSB ----------------- @@ -101,8 +101,7 @@ def test_compare_to_sbml_import(pysb_example_presimulation_module, ] -@pytest.mark.skipif(os.environ.get('GITHUB_JOB') == 'valgrind', - reason="Takes too long under valgrind") +@skip_on_valgrind @pytest.mark.parametrize('example', pysb_models + custom_models) def test_compare_to_pysb_simulation(example): @@ -224,6 +223,7 @@ def get_results(model, edata): return amici.runAmiciSimulation(model, solver, edata) +@skip_on_valgrind def test_names_and_ids(pysb_example_presimulation_module): model_pysb = pysb_example_presimulation_module.getModel() expected = { @@ -268,6 +268,7 @@ def test_names_and_ids(pysb_example_presimulation_module): assert actual == cur_expected +@skip_on_valgrind def test_heavyside_and_special_symbols(): pysb.SelfExporter.cleanup() # reset pysb pysb.SelfExporter.do_export = True @@ -295,6 +296,7 @@ def test_heavyside_and_special_symbols(): assert amici_model.ne +@skip_on_valgrind # TODO: remove me @pytest.mark.skipif( not hasattr(pysb, 'EnergyPattern'), diff --git a/python/tests/test_sbml_import.py b/python/tests/test_sbml_import.py index 39f03d25b3..65a86062cb 100644 --- a/python/tests/test_sbml_import.py +++ b/python/tests/test_sbml_import.py @@ -6,14 +6,16 @@ from pathlib import Path from urllib.request import urlopen -import amici import libsbml import numpy as np import pytest +from numpy.testing import assert_allclose, assert_array_equal + +import amici from amici.gradient_check import check_derivatives from amici.sbml_import import SbmlImporter -from amici.testing import TemporaryDirectoryWinSafe as TemporaryDirectory -from numpy.testing import assert_allclose, assert_array_equal +from amici.testing import TemporaryDirectoryWinSafe as TemporaryDirectory, \ + skip_on_valgrind @pytest.fixture @@ -56,6 +58,7 @@ def test_sbml2amici_no_observables(simple_sbml_model): assert hasattr(module_module, 'getModel') +@skip_on_valgrind def test_sbml2amici_nested_observables_fail(simple_sbml_model): """Test model generation works for model without observables""" sbml_doc, sbml_model = simple_sbml_model @@ -129,6 +132,7 @@ def observable_dependent_error_model(simple_sbml_model): module_path=tmpdir) +@skip_on_valgrind def test_sbml2amici_observable_dependent_error(observable_dependent_error_model): """Check gradients for model with observable-dependent error""" model_module = observable_dependent_error_model @@ -375,6 +379,7 @@ def model_test_likelihoods(): shutil.rmtree(outdir, ignore_errors=True) +@skip_on_valgrind def test_likelihoods(model_test_likelihoods): """Test the custom noise distributions used to define cost functions.""" model = model_test_likelihoods.getModel() @@ -430,6 +435,7 @@ def test_likelihoods(model_test_likelihoods): ) +@skip_on_valgrind def test_likelihoods_error(): """Test whether wrong inputs lead to expected errors.""" sbml_file = os.path.join(os.path.dirname(__file__), '..', @@ -455,6 +461,7 @@ def test_likelihoods_error(): ) +@skip_on_valgrind def test_units(model_units_module): """ Test whether SBML import works for models using sbml:units annotations. @@ -467,6 +474,7 @@ def test_units(model_units_module): assert rdata['status'] == amici.AMICI_SUCCESS +@skip_on_valgrind @pytest.mark.skipif(os.name == 'nt', reason='Avoid `CERTIFICATE_VERIFY_FAILED` error') def test_sympy_exp_monkeypatch(): diff --git a/python/tests/test_sbml_import_special_functions.py b/python/tests/test_sbml_import_special_functions.py index 7b7cb7a8f7..5f3207878c 100644 --- a/python/tests/test_sbml_import_special_functions.py +++ b/python/tests/test_sbml_import_special_functions.py @@ -7,14 +7,16 @@ import os import shutil -import amici import numpy as np -from scipy.special import loggamma import pytest +from scipy.special import loggamma + +import amici from amici.gradient_check import check_derivatives +from amici.testing import skip_on_valgrind -@pytest.fixture +@pytest.fixture(scope="session") def model_special_likelihoods(): """Test model for special likelihood functions.""" # load sbml model @@ -50,6 +52,7 @@ def model_special_likelihoods(): shutil.rmtree(outdir, ignore_errors=True) +@skip_on_valgrind # FD check fails occasionally, so give some extra tries @pytest.mark.flaky(reruns=5) def test_special_likelihoods(model_special_likelihoods): From 81f8b431e1b1c8e3e7db42f2f837a053206092a3 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Sat, 10 Sep 2022 15:55:54 +0200 Subject: [PATCH 2/7] Fix extraction of common subexpressions (#1865) Got broken #1859 (https://github.com/AMICI-dev/AMICI/commit/cc5f7cc681b393f9500eb6284823528ce83b25e6#diff-44c38f8a81d42beabd5ab6dfd5db9a33bda06ded71f7cbfa5a47a7a1b8fd824eL3028), so that `AMICI_EXTRACT_CSE=1` had no effect. That change also broke the use of human-readable lhs symbols in generated code. Also fixed here. Added tests for both. --- python/amici/cxxcodeprinter.py | 7 ++-- python/amici/ode_export.py | 9 +++++ python/tests/test_sbml_import.py | 56 +++++++++++++++++++++++++------- 3 files changed, 58 insertions(+), 14 deletions(-) diff --git a/python/amici/cxxcodeprinter.py b/python/amici/cxxcodeprinter.py index 0f79591f10..06ba285977 100644 --- a/python/amici/cxxcodeprinter.py +++ b/python/amici/cxxcodeprinter.py @@ -107,7 +107,7 @@ def _get_sym_lines_symbols( def format_regular_line(symbol, math, index): return ( - f'{indent}{symbol} = {self.doprint(math)};' + f'{indent}{self.doprint(symbol)} = {self.doprint(math)};' f' // {variable}[{index}]'.replace('\n', '\n' + indent) ) @@ -136,10 +136,11 @@ def format_regular_line(symbol, math, index): }) symbol_to_idx = {sym: idx for idx, sym in enumerate(symbols)} - def format_line(symbol): + def format_line(symbol: sp.Symbol): math = expr_dict[symbol] if str(symbol).startswith(cse_sym_prefix): - return f'{indent}const realtype {symbol} '\ + return f'{indent}const realtype ' \ + f'{self.doprint(symbol)} ' \ f'= {self.doprint(math)};' elif math not in [0, 0.0]: return format_regular_line( diff --git a/python/amici/ode_export.py b/python/amici/ode_export.py index e055bd13ea..23cef1f500 100644 --- a/python/amici/ode_export.py +++ b/python/amici/ode_export.py @@ -2978,6 +2978,15 @@ def _get_function_body( iterator = 'iy' lines.extend(get_switch_statement(iterator, cases, 1)) + elif function in self.model.sym_names() \ + and function not in non_unique_id_symbols: + if function in sparse_functions: + symbols = self.model.sparsesym(function) + else: + symbols = self.model.sym(function) + lines += self.model._code_printer._get_sym_lines_symbols( + symbols, equations, function, 4) + else: lines += self.model._code_printer._get_sym_lines_array( equations, function, 4) diff --git a/python/tests/test_sbml_import.py b/python/tests/test_sbml_import.py index 65a86062cb..7090075314 100644 --- a/python/tests/test_sbml_import.py +++ b/python/tests/test_sbml_import.py @@ -17,6 +17,10 @@ from amici.testing import TemporaryDirectoryWinSafe as TemporaryDirectory, \ skip_on_valgrind +EXAMPLES_DIR = Path(__file__).parent / '..' / 'examples' +STEADYSTATE_MODEL_FILE = (EXAMPLES_DIR / 'example_steadystate' + / 'model_steadystate_scaled.xml') + @pytest.fixture def simple_sbml_model(): @@ -163,9 +167,7 @@ def test_sbml2amici_observable_dependent_error(observable_dependent_error_model) @pytest.fixture(scope='session') def model_steadystate_module(): - sbml_file = os.path.join(os.path.dirname(__file__), '..', - 'examples', 'example_steadystate', - 'model_steadystate_scaled.xml') + sbml_file = STEADYSTATE_MODEL_FILE sbml_importer = amici.SbmlImporter(sbml_file) observables = amici.assignmentRules2observables( @@ -192,8 +194,7 @@ def model_steadystate_module(): @pytest.fixture(scope='session') def model_units_module(): - sbml_file = Path(__file__).parent / '..' / 'examples' \ - / 'example_units' / 'model_units.xml' + sbml_file = EXAMPLES_DIR / 'example_units' / 'model_units.xml' module_name = 'test_model_units' sbml_importer = amici.SbmlImporter(sbml_file) @@ -339,9 +340,7 @@ def test_solver_reuse(model_steadystate_module): def model_test_likelihoods(): """Test model for various likelihood functions.""" # load sbml model - sbml_file = os.path.join(os.path.dirname(__file__), '..', - 'examples', 'example_steadystate', - 'model_steadystate_scaled.xml') + sbml_file = STEADYSTATE_MODEL_FILE sbml_importer = amici.SbmlImporter(sbml_file) # define observables @@ -438,9 +437,7 @@ def test_likelihoods(model_test_likelihoods): @skip_on_valgrind def test_likelihoods_error(): """Test whether wrong inputs lead to expected errors.""" - sbml_file = os.path.join(os.path.dirname(__file__), '..', - 'examples', 'example_steadystate', - 'model_steadystate_scaled.xml') + sbml_file = STEADYSTATE_MODEL_FILE sbml_importer = amici.SbmlImporter(sbml_file) # define observables @@ -571,3 +568,40 @@ def _test_set_parameters_by_dict(model_module): assert model.getParameterByName(change_par_name) == new_par_val model.setParameterByName(change_par_name, old_par_val) assert model.getParameters() == old_parameter_values + + +@pytest.mark.parametrize("extract_cse", [True, False]) +def test_code_gen_uses_cse(extract_cse): + """Check that code generation honors AMICI_EXTRACT_CSE""" + old_environ = os.environ.copy() + try: + os.environ["AMICI_EXTRACT_CSE"] = str(extract_cse) + sbml_importer = amici.SbmlImporter(STEADYSTATE_MODEL_FILE) + model_name = "test_code_gen_uses_cse" + with TemporaryDirectory() as tmpdir: + sbml_importer.sbml2amici( + model_name=model_name, + compile=False, + generate_sensitivity_code=False, + output_dir = tmpdir + ) + xdot = Path(tmpdir, f'{model_name}_xdot.cpp').read_text() + assert ("__amici_cse_0 = " in xdot) == extract_cse + finally: + os.environ = old_environ + + +def test_code_gen_uses_lhs_symbol_ids(): + """Check that code generation uses symbol IDs instead of plain array + indices""" + sbml_importer = amici.SbmlImporter(STEADYSTATE_MODEL_FILE) + model_name = "test_code_gen_uses_lhs_symbol_ids" + with TemporaryDirectory() as tmpdir: + sbml_importer.sbml2amici( + model_name=model_name, + compile=False, + generate_sensitivity_code=False, + output_dir=tmpdir + ) + dwdx = Path(tmpdir, f'{model_name}_dwdx.cpp').read_text() + assert "dobservable_x1_dx1 = " in dwdx From 779624b090ed0c494cab014ba4d6e8ac7e5509c9 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Sat, 10 Sep 2022 16:21:02 +0200 Subject: [PATCH 3/7] Add function to convert `ReturnData::status` flags to string (#1864) --- include/amici/amici.h | 9 +++++++++ include/amici/defines.h | 5 ++++- src/amici.cpp | 31 +++++++++++++++++++++++++++++++ tests/cpp/unittests/testMisc.cpp | 6 ++++++ 4 files changed, 50 insertions(+), 1 deletion(-) diff --git a/include/amici/amici.h b/include/amici/amici.h index 12e49c70f1..d919bbbc59 100644 --- a/include/amici/amici.h +++ b/include/amici/amici.h @@ -132,6 +132,15 @@ std::vector> runAmiciSimulations(Solver const &solver, const std::vector &edatas, Model const &model, bool failfast, int num_threads); + +/** + * @brief Get the string representation of the given simulation status code + * (see ReturnData::status). + * @param return_code + * @return Name of the variable representing this status code. + */ +std::string simulation_status_to_str(int status); + } // namespace amici #endif /* amici_h */ diff --git a/include/amici/defines.h b/include/amici/defines.h index 94768fc9b5..e8321e3714 100644 --- a/include/amici/defines.h +++ b/include/amici/defines.h @@ -60,7 +60,10 @@ constexpr double pi = M_PI; constexpr int AMICI_ONEOUTPUT= 5; -/* Return codes */ +// Return codes +// +// NOTE: When adding / removing / renaming return codes, +// please update simulation_status_to_str_map in amici.h constexpr int AMICI_RECOVERABLE_ERROR= 1; constexpr int AMICI_UNRECOVERABLE_ERROR= -10; constexpr int AMICI_TOO_MUCH_WORK= -1; diff --git a/src/amici.cpp b/src/amici.cpp index c19337afa2..934e91dfc0 100644 --- a/src/amici.cpp +++ b/src/amici.cpp @@ -18,6 +18,7 @@ #include #include #include +#include #include #include @@ -43,6 +44,24 @@ static_assert(std::is_same::value, namespace amici { +std::map simulation_status_to_str_map = { + {AMICI_RECOVERABLE_ERROR, "AMICI_RECOVERABLE_ERROR"}, + {AMICI_UNRECOVERABLE_ERROR, "AMICI_UNRECOVERABLE_ERROR"}, + {AMICI_TOO_MUCH_WORK, "AMICI_TOO_MUCH_WORK"}, + {AMICI_TOO_MUCH_ACC, "AMICI_TOO_MUCH_ACC"}, + {AMICI_ERR_FAILURE, "AMICI_ERR_FAILURE"}, + {AMICI_CONV_FAILURE, "AMICI_CONV_FAILURE"}, + {AMICI_RHSFUNC_FAIL, "AMICI_RHSFUNC_FAIL"}, + {AMICI_ILL_INPUT, "AMICI_ILL_INPUT"}, + {AMICI_ERROR, "AMICI_ERROR"}, + {AMICI_NO_STEADY_STATE, "AMICI_NO_STEADY_STATE"}, + {AMICI_DAMPING_FACTOR_ERROR, "AMICI_DAMPING_FACTOR_ERROR"}, + {AMICI_SINGULAR_JACOBIAN, "AMICI_SINGULAR_JACOBIAN"}, + {AMICI_NOT_IMPLEMENTED, "AMICI_NOT_IMPLEMENTED"}, + {AMICI_MAX_TIME_EXCEEDED, "AMICI_MAX_TIME_EXCEEDED"}, + {AMICI_SUCCESS, "AMICI_SUCCESS"}, +}; + /** AMICI default application context, kept around for convenience for using * amici::runAmiciSimulation or instantiating Solver and Model without special * needs. @@ -320,4 +339,16 @@ AmiciApplication::errorF(const char* identifier, const char* format, ...) const error(identifier, str); } +std::string simulation_status_to_str(int status) +{ + try { + return simulation_status_to_str_map.at(status); + } catch (std::out_of_range const&) { + // Missing mapping - terminate if this is a debug build, + // but show the number if non-debug. + gsl_ExpectsDebug(false); + return std::to_string(status); + } +} + } // namespace amici diff --git a/tests/cpp/unittests/testMisc.cpp b/tests/cpp/unittests/testMisc.cpp index c689090c8c..d77aa54f93 100644 --- a/tests/cpp/unittests/testMisc.cpp +++ b/tests/cpp/unittests/testMisc.cpp @@ -704,5 +704,11 @@ TEST(UnravelIndex, UnravelIndexSunMatSparse) SUNMatDestroy(S); } +TEST(ReturnCodeToStr, ReturnCodeToStr) +{ + EXPECT_EQ("AMICI_SUCCESS", simulation_status_to_str(AMICI_SUCCESS)); + EXPECT_EQ("AMICI_UNRECOVERABLE_ERROR", + simulation_status_to_str(AMICI_UNRECOVERABLE_ERROR)); +} } // namespace From 15e62ce6be0c8491310f4b76f271fa14fa87f550 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Sun, 11 Sep 2022 00:03:07 +0200 Subject: [PATCH 4/7] Cleanup ode_export.py (#1866) Various cleanup. --- python/amici/cxxcodeprinter.py | 5 + python/amici/ode_export.py | 218 +++++++++++++++----------------- python/tests/test_ode_export.py | 15 +++ 3 files changed, 122 insertions(+), 116 deletions(-) diff --git a/python/amici/cxxcodeprinter.py b/python/amici/cxxcodeprinter.py index 06ba285977..15f440a0fb 100644 --- a/python/amici/cxxcodeprinter.py +++ b/python/amici/cxxcodeprinter.py @@ -238,6 +238,11 @@ def csc_matrix( return symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, \ sparse_matrix + @staticmethod + def print_bool(expr) -> str: + """Print the boolean value of the given expression""" + return "true" if bool(expr) else "false" + def get_switch_statement(condition: str, cases: Dict[int, List[str]], indentation_level: Optional[int] = 0, diff --git a/python/amici/ode_export.py b/python/amici/ode_export.py index 23cef1f500..ee537efa69 100644 --- a/python/amici/ode_export.py +++ b/python/amici/ode_export.py @@ -49,7 +49,8 @@ MODEL_CMAKE_TEMPLATE_FILE = os.path.join(amiciSrcPath, 'CMakeLists.template.cmake') - +IDENTIFIER_PATTERN = re.compile(r'^[a-zA-Z_]\w*$') +DERIVATIVE_PATTERN = re.compile(r'^d(x_rdata|xdot|\w+?)d(\w+?)(?:_explicit)?$') @dataclass class _FunctionInfo: """Information on a model-specific generated C++ function @@ -482,8 +483,10 @@ def var_in_function_signature(name: str, varname: str) -> bool: @log_execution_time('running smart_jacobian', logger) -def smart_jacobian(eq: sp.MutableDenseMatrix, - sym_var: sp.MutableDenseMatrix) -> sp.MutableSparseMatrix: +def smart_jacobian( + eq: sp.MutableDenseMatrix, + sym_var: sp.MutableDenseMatrix +) -> sp.MutableSparseMatrix: """ Wrapper around symbolic jacobian with some additional checks that reduce computation time for large matrices @@ -530,9 +533,10 @@ def smart_jacobian(eq: sp.MutableDenseMatrix, @log_execution_time('running smart_multiply', logger) -def smart_multiply(x: Union[sp.MutableDenseMatrix, sp.MutableSparseMatrix], - y: sp.MutableDenseMatrix - ) -> Union[sp.MutableDenseMatrix, sp.MutableSparseMatrix]: +def smart_multiply( + x: Union[sp.MutableDenseMatrix, sp.MutableSparseMatrix], + y: sp.MutableDenseMatrix +) -> Union[sp.MutableDenseMatrix, sp.MutableSparseMatrix]: """ Wrapper around symbolic multiplication with some additional checks that reduce computation time for large matrices @@ -931,11 +935,12 @@ def transform_dxdt_to_concentration(species_id, dxdt): args += ['dt', 'init'] else: args += ['value'] + if symbol_name == SymbolId.EVENT: args += ['state_update', 'initial_value'] - if symbol_name == SymbolId.OBSERVABLE: + elif symbol_name == SymbolId.OBSERVABLE: args += ['transformation'] - if symbol_name == SymbolId.EVENT_OBSERVABLE: + elif symbol_name == SymbolId.EVENT_OBSERVABLE: args += ['event'] protos = [ @@ -963,7 +968,7 @@ def transform_dxdt_to_concentration(species_id, dxdt): si.process_conservation_laws(self) # fill in 'self._sym' based on prototypes and components in ode_model - self.generate_basic_variables(from_sbml=True) + self.generate_basic_variables() self._has_quadratic_nllh = all( llh['dist'] in ['normal', 'lin-normal', 'log-normal', 'log10-normal'] @@ -1298,7 +1303,7 @@ def free_symbols(self) -> Set[sp.Basic]: for state in self._states )) - def _generate_symbol(self, name: str, *, from_sbml: bool = False) -> None: + def _generate_symbol(self, name: str) -> None: """ Generates the symbolic identifiers for a symbolic variable @@ -1376,13 +1381,10 @@ def _generate_symbol(self, name: str, *, from_sbml: bool = False) -> None: for i in range(length) ]) - def generate_basic_variables(self, *, from_sbml: bool = False) -> None: + def generate_basic_variables(self) -> None: """ Generates the symbolic identifiers for all variables in ``ODEModel._variable_prototype`` - - :param from_sbml: - whether the model is generated from SBML """ # We need to process events and Heaviside functions in the ODE Model, # before adding it to ODEExporter @@ -1390,9 +1392,9 @@ def generate_basic_variables(self, *, from_sbml: bool = False) -> None: for var in self._variable_prototype: if var not in self._syms: - self._generate_symbol(var, from_sbml=from_sbml) + self._generate_symbol(var) - self._generate_symbol('x', from_sbml=from_sbml) + self._generate_symbol('x') def parse_events(self) -> None: """ @@ -1467,16 +1469,11 @@ def _generate_sparse_symbol(self, name: str) -> None: name of the symbolic variable """ matrix = self.eq(name) - match_deriv = re.match(r'd([\w]+)d([a-z]+)', name) - if match_deriv: + + if match_deriv := DERIVATIVE_PATTERN.match(name): eq = match_deriv[1] var = match_deriv[2] - if name == 'dtotal_cldx_rdata': - # not correctly parsed in regex - eq = 'total_cl' - var = 'x_rdata' - rownames = self.sym(eq) colnames = self.sym(var) @@ -1520,11 +1517,10 @@ def _compute_equation(self, name: str) -> None: """ # replacement ensures that we don't have to adapt name in abstract # model and keep backwards compatibility with matlab - match_deriv = re.match( - r'd([\w_]+)d([a-z_]+)', - re.sub(r'dJ(y|z|rz)dsigma', r'dJ\1dsigma\1', name).replace( - 'sigmarz', 'sigmaz' - ).replace('dJrzdz', 'dJrzdrz') + match_deriv = DERIVATIVE_PATTERN.match( + re.sub(r'dJ(y|z|rz)dsigma', r'dJ\1dsigma\1', name) + .replace('sigmarz', 'sigmaz') + .replace('dJrzdz', 'dJrzdrz') ) time_symbol = sp.Matrix([symbol_with_assumptions('t')]) @@ -2705,17 +2701,19 @@ def _write_function_file(self, function: str) -> None: # function body body = self._get_function_body(function, equations) - if self.assume_pow_positivity and func_info.assume_pow_positivity: - body = [re.sub(r'(^|\W)std::pow\(', r'\1amici::pos_pow(', line) - for line in body] - # execute this twice to catch cases where the ending ( would be the - # starting (^|\W) for the following match - body = [re.sub(r'(^|\W)std::pow\(', r'\1amici::pos_pow(', line) - for line in body] - if not body: return + if self.assume_pow_positivity and func_info.assume_pow_positivity: + pow_rx = re.compile(r'(^|\W)std::pow\(') + body = [ + # execute this twice to catch cases where the ending '(' would + # be the starting (^|\W) for the following match + pow_rx.sub(r'\1amici::pos_pow(', + pow_rx.sub(r'\1amici::pos_pow(', line)) + for line in body + ] + self.functions[function].body = body lines += body @@ -3021,49 +3019,43 @@ def _write_model_header_cpp(self) -> None: """ tpl_data = { - 'MODELNAME': str(self.model_name), - 'NX_RDATA': str(self.model.num_states_rdata()), - 'NXTRUE_RDATA': str(self.model.num_states_rdata()), - 'NX_SOLVER': str(self.model.num_states_solver()), - 'NXTRUE_SOLVER': str(self.model.num_states_solver()), - 'NX_SOLVER_REINIT': str(self.model.num_state_reinits()), - 'NY': str(self.model.num_obs()), - 'NYTRUE': str(self.model.num_obs()), - 'NZ': str(self.model.num_eventobs()), - 'NZTRUE': str(self.model.num_eventobs()), - 'NEVENT': str(self.model.num_events()), + 'MODELNAME': self.model_name, + 'NX_RDATA': self.model.num_states_rdata(), + 'NXTRUE_RDATA': self.model.num_states_rdata(), + 'NX_SOLVER': self.model.num_states_solver(), + 'NXTRUE_SOLVER': self.model.num_states_solver(), + 'NX_SOLVER_REINIT': self.model.num_state_reinits(), + 'NY': self.model.num_obs(), + 'NYTRUE': self.model.num_obs(), + 'NZ': self.model.num_eventobs(), + 'NZTRUE': self.model.num_eventobs(), + 'NEVENT': self.model.num_events(), 'NOBJECTIVE': '1', - 'NW': str(len(self.model.sym('w'))), - 'NDWDP': str(len(self.model.sparsesym( + 'NW': len(self.model.sym('w')), + 'NDWDP': len(self.model.sparsesym( 'dwdp', force_generate=self.generate_sensitivity_code - ))), - 'NDWDX': str(len(self.model.sparsesym('dwdx'))), - 'NDWDW': str(len(self.model.sparsesym('dwdw'))), - 'NDXDOTDW': str(len(self.model.sparsesym('dxdotdw'))), - 'NDXDOTDP_EXPLICIT': str(len(self.model.sparsesym( + )), + 'NDWDX': len(self.model.sparsesym('dwdx')), + 'NDWDW': len(self.model.sparsesym('dwdw')), + 'NDXDOTDW': len(self.model.sparsesym('dxdotdw')), + 'NDXDOTDP_EXPLICIT': len(self.model.sparsesym( 'dxdotdp_explicit', force_generate=self.generate_sensitivity_code - ))), - 'NDXDOTDX_EXPLICIT': str(len(self.model.sparsesym( - 'dxdotdx_explicit'))), + )), + 'NDXDOTDX_EXPLICIT': len(self.model.sparsesym( + 'dxdotdx_explicit')), 'NDJYDY': 'std::vector{%s}' % ','.join(str(len(x)) for x in self.model.sparsesym('dJydy')), - 'NDXRDATADXSOLVER': str( - len(self.model.sparsesym('dx_rdatadx_solver')) - ), - 'NDXRDATADTCL': str( - len(self.model.sparsesym('dx_rdatadtcl')) - ), - 'NDTOTALCLDXRDATA': str( - len(self.model.sparsesym('dtotal_cldx_rdata')) - ), - 'UBW': str(self.model.num_states_solver()), - 'LBW': str(self.model.num_states_solver()), - 'NP': str(self.model.num_par()), - 'NK': str(self.model.num_const()), + 'NDXRDATADXSOLVER': len(self.model.sparsesym('dx_rdatadx_solver')), + 'NDXRDATADTCL': len(self.model.sparsesym('dx_rdatadtcl')), + 'NDTOTALCLDXRDATA': len(self.model.sparsesym('dtotal_cldx_rdata')), + 'UBW': self.model.num_states_solver(), + 'LBW': self.model.num_states_solver(), + 'NP': self.model.num_par(), + 'NK': self.model.num_const(), 'O2MODE': 'amici::SecondOrderMode::none', - # using cxxcode ensures proper handling of nan/inf + # using code printer ensures proper handling of nan/inf 'PARAMETERS': self.model._code_printer.doprint( self.model.val('p'))[1:-1], 'FIXED_PARAMETERS': self.model._code_printer.doprint( @@ -3097,27 +3089,25 @@ def _write_model_header_cpp(self) -> None: self._get_symbol_id_initializer_list('w'), 'STATE_IDXS_SOLVER_INITIALIZER_LIST': ', '.join( - [ str(idx) for idx, state in enumerate(self.model._states) if not state.has_conservation_law() - ] ), 'REINIT_FIXPAR_INITCOND': - 'true' if self.allow_reinit_fixpar_initcond else - 'false', + AmiciCxxCodePrinter.print_bool( + self.allow_reinit_fixpar_initcond), 'AMICI_VERSION_STRING': __version__, 'AMICI_COMMIT_STRING': __commit__, 'W_RECURSION_DEPTH': self.model._w_recursion_depth, - 'QUADRATIC_LLH': 'true' - if self.model._has_quadratic_nllh else 'false', + 'QUADRATIC_LLH': AmiciCxxCodePrinter.print_bool( + self.model._has_quadratic_nllh), 'ROOT_INITIAL_VALUES': - ', '.join([ - 'true' if event.get_initial_value() else 'false' - for event in self.model._events - ]), + ', '.join(map( + lambda event: AmiciCxxCodePrinter.print_bool( + event.get_initial_value()), + self.model._events)), 'Z2EVENT': - ', '.join(str(ie) for ie in self.model._z2event) + ', '.join(map(str, self.model._z2event)) } for func_name, func_info in self.functions.items(): @@ -3175,6 +3165,8 @@ def _write_model_header_cpp(self) -> None: tpl_data['X_RDATA_DEF'] = '' tpl_data['X_RDATA_IMPL'] = '' + tpl_data = {k: str(v) for k, v in tpl_data.items()} + apply_template( os.path.join(amiciSrcPath, 'model_header.ODE_template.h'), os.path.join(self.model_path, f'{self.model_name}.h'), @@ -3199,10 +3191,8 @@ def _get_symbol_name_initializer_list(self, name: str) -> str: Template initializer list of names """ return '\n'.join( - [ - f'"{symbol}", // {name}[{idx}]' - for idx, symbol in enumerate(self.model.name(name)) - ] + f'"{symbol}", // {name}[{idx}]' + for idx, symbol in enumerate(self.model.name(name)) ) def _get_symbol_id_initializer_list(self, name: str) -> str: @@ -3217,40 +3207,37 @@ def _get_symbol_id_initializer_list(self, name: str) -> str: Template initializer list of ids """ return '\n'.join( - [ - f'"{strip_pysb(symbol)}", // {name}[{idx}]' - for idx, symbol in enumerate(self.model.sym(name)) - ] + f'"{self.model._code_printer.doprint(symbol)}", // {name}[{idx}]' + for idx, symbol in enumerate(self.model.sym(name)) ) def _write_c_make_file(self): """Write CMake ``CMakeLists.txt`` file for this model.""" - sources = [ + sources = '\n'.join( f + ' ' for f in os.listdir(self.model_path) if f.endswith('.cpp') and f != 'main.cpp' - ] + ) template_data = {'MODELNAME': self.model_name, - 'SOURCES': '\n'.join(sources), + 'SOURCES': sources, 'AMICI_VERSION': __version__} apply_template( MODEL_CMAKE_TEMPLATE_FILE, - os.path.join(self.model_path, 'CMakeLists.txt'), + Path(self.model_path, 'CMakeLists.txt'), template_data ) def _write_swig_files(self) -> None: """Write SWIG interface files for this model.""" - if not os.path.exists(self.model_swig_path): - os.makedirs(self.model_swig_path) + Path(self.model_swig_path).mkdir(exist_ok=True) template_data = {'MODELNAME': self.model_name} apply_template( - os.path.join(amiciSwigPath, 'modelname.template.i'), - os.path.join(self.model_swig_path, self.model_name + '.i'), + Path(amiciSwigPath, 'modelname.template.i'), + Path(self.model_swig_path, self.model_name + '.i'), template_data ) shutil.copy(SWIG_CMAKE_TEMPLATE_FILE, - os.path.join(self.model_swig_path, 'CMakeLists.txt')) + Path(self.model_swig_path, 'CMakeLists.txt')) def _write_module_setup(self) -> None: """ @@ -3260,18 +3247,17 @@ def _write_module_setup(self) -> None: template_data = {'MODELNAME': self.model_name, 'AMICI_VERSION': __version__, 'PACKAGE_VERSION': '0.1.0'} - apply_template(os.path.join(amiciModulePath, 'setup.template.py'), - os.path.join(self.model_path, 'setup.py'), + apply_template(Path(amiciModulePath, 'setup.template.py'), + Path(self.model_path, 'setup.py'), template_data) - apply_template(os.path.join(amiciModulePath, 'MANIFEST.template.in'), - os.path.join(self.model_path, 'MANIFEST.in'), {}) + apply_template(Path(amiciModulePath, 'MANIFEST.template.in'), + Path(self.model_path, 'MANIFEST.in'), {}) # write __init__.py for the model module - if not os.path.exists(os.path.join(self.model_path, self.model_name)): - os.makedirs(os.path.join(self.model_path, self.model_name)) + Path(self.model_path, self.model_name).mkdir(exist_ok=True) apply_template( - os.path.join(amiciModulePath, '__init__.template.py'), - os.path.join(self.model_path, self.model_name, '__init__.py'), + Path(amiciModulePath, '__init__.template.py'), + Path(self.model_path, self.model_name, '__init__.py'), template_data ) @@ -3321,8 +3307,8 @@ class TemplateAmici(Template): delimiter = 'TPL_' -def apply_template(source_file: str, - target_file: str, +def apply_template(source_file: Union[str, Path], + target_file: Union[str, Path], template_data: Dict[str, str]) -> None: """ Load source file, apply template substitution as provided in @@ -3417,7 +3403,7 @@ def get_model_override_implementation(fun: str, name: str, fun=fun, name=name, signature=func_info.arguments, - eval_signature=remove_typedefs(func_info.arguments), + eval_signature=remove_argument_types(func_info.arguments), return_type=func_info.return_type ) @@ -3465,9 +3451,9 @@ def get_sunindex_override_implementation(fun: str, name: str, ) -def remove_typedefs(signature: str) -> str: +def remove_argument_types(signature: str) -> str: """ - Strips typedef info from a function signature + Strips argument types from a function signature :param signature: function signature @@ -3481,7 +3467,7 @@ def remove_typedefs(signature: str) -> str: # same applies for const specifications) # # always add whitespace after type definition for cosmetic reasons - typedefs = [ + known_types = [ 'const realtype *', 'const double *', 'const realtype ', @@ -3493,8 +3479,8 @@ def remove_typedefs(signature: str) -> str: 'gsl::span' ] - for typedef in typedefs: - signature = signature.replace(typedef, '') + for type_str in known_types: + signature = signature.replace(type_str, '') return signature @@ -3512,7 +3498,7 @@ def is_valid_identifier(x: str) -> bool: ``True`` if valid, ``False`` otherwise """ - return re.match(r'^[a-zA-Z_]\w*$', x) is not None + return IDENTIFIER_PATTERN.match(x) is not None @contextlib.contextmanager diff --git a/python/tests/test_ode_export.py b/python/tests/test_ode_export.py index 180425c023..2e9c45698f 100644 --- a/python/tests/test_ode_export.py +++ b/python/tests/test_ode_export.py @@ -64,3 +64,18 @@ def test_csc_matrix_vector(): assert sparse_list == sp.Matrix([[3]]) assert symbol_list == ['da2_db_1'] assert str(sparse_matrix) == 'Matrix([[0], [da2_db_1]])' + + +def test_match_deriv(): + from amici.ode_export import DERIVATIVE_PATTERN as pat + + def check(str, out1, out2): + match = pat.match(str) + assert match[1] == out1, (str, match[1], match[2]) + assert match[2] == out2, (str, match[1], match[2]) + + check("dwdx", "w", "x") + check("dx_rdatadtotal_cl", "x_rdata", "total_cl") + check("dtotal_cldx_rdata", "total_cl", "x_rdata") + check("dxdotdw", "xdot", "w") + check("dxdotdx_explicit", "xdot", "x") From ad4706862a15f90f29b8342d2280e53706960880 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Mon, 12 Sep 2022 11:13:48 +0200 Subject: [PATCH 5/7] Refactor: Extract ODEModel creation from sbml2amici (#1867) * Makes it easier to test that part * Smaller functions * ODEModel usable for purposes other than code-generation --- python/amici/sbml_import.py | 71 +++++++++++++++++++++++++++---------- 1 file changed, 53 insertions(+), 18 deletions(-) diff --git a/python/amici/sbml_import.py b/python/amici/sbml_import.py index d6310ac4ed..e1f35245e4 100644 --- a/python/amici/sbml_import.py +++ b/python/amici/sbml_import.py @@ -331,10 +331,61 @@ def sbml2amici( :param generate_sensitivity_code: If ``False``, the code required for sensitivity computation will not be generated - """ set_log_level(logger, verbose) + ode_model = self._build_ode_model( + observables=observables, + event_observables=event_observables, + constant_parameters=constant_parameters, + sigmas=sigmas, + event_sigmas=event_sigmas, + noise_distributions=noise_distributions, + event_noise_distributions=event_noise_distributions, + verbose=verbose, + compute_conservation_laws=compute_conservation_laws, + simplify=simplify, + cache_simplify=cache_simplify, + log_as_log10=log_as_log10, + ) + + exporter = ODEExporter( + ode_model, + model_name=model_name, + outdir=output_dir, + verbose=verbose, + assume_pow_positivity=assume_pow_positivity, + compiler=compiler, + allow_reinit_fixpar_initcond=allow_reinit_fixpar_initcond, + generate_sensitivity_code=generate_sensitivity_code + ) + exporter.generate_model_code() + + if compile: + if not has_clibs: + warnings.warn('AMICI C++ extensions have not been built. ' + 'Generated model code, but unable to compile.') + exporter.compile_model() + + def _build_ode_model( + self, + observables: Dict[str, Dict[str, str]] = None, + event_observables: Dict[str, Dict[str, str]] = None, + constant_parameters: Iterable[str] = None, + sigmas: Dict[str, Union[str, float]] = None, + event_sigmas: Dict[str, Union[str, float]] = None, + noise_distributions: Dict[str, Union[str, Callable]] = None, + event_noise_distributions: Dict[str, Union[str, Callable]] = None, + verbose: Union[int, bool] = logging.ERROR, + compute_conservation_laws: bool = True, + simplify: Optional[Callable] = _default_simplify, + cache_simplify: bool = False, + log_as_log10: bool = True, + ) -> ODEModel: + """Generate an ODEModel from this SBML model. + + See :py:func:`sbml2amici` for parameters. + """ constant_parameters = list(constant_parameters) \ if constant_parameters else [] @@ -386,23 +437,7 @@ def sbml2amici( ) ode_model.import_from_sbml_importer( self, compute_cls=compute_conservation_laws) - exporter = ODEExporter( - ode_model, - model_name=model_name, - outdir=output_dir, - verbose=verbose, - assume_pow_positivity=assume_pow_positivity, - compiler=compiler, - allow_reinit_fixpar_initcond=allow_reinit_fixpar_initcond, - generate_sensitivity_code=generate_sensitivity_code - ) - exporter.generate_model_code() - - if compile: - if not has_clibs: - warnings.warn('AMICI C++ extensions have not been built. ' - 'Generated model code, but unable to compile.') - exporter.compile_model() + return ode_model @log_execution_time('importing SBML', logger) def _process_sbml(self, constant_parameters: List[str] = None) -> None: From 18b4d1e542516b43ff1e62924ae313ad2945514b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20Fr=C3=B6hlich?= Date: Thu, 15 Sep 2022 10:18:08 +0200 Subject: [PATCH 6/7] Reduce symlinking (#1868) * try replacing symlinks * move more stuff * move more stuff, fix GHA workflows * move remaining files * move init --- .../test_sbml_semantic_test_suite.yml | 6 +- python/amici/MANIFEST.template.in | 1 - python/amici/__init__.py | 193 - python/amici/__init__.template.py | 19 - python/amici/__main__.py | 25 - python/amici/bngl_import.py | 32 - .../amici/conserved_quantities_demartino.py | 911 ----- python/amici/conserved_quantities_rref.py | 99 - python/amici/constants.py | 33 - python/amici/custom_commands.py | 328 -- python/amici/cxxcodeprinter.py | 290 -- python/amici/gradient_check.py | 307 -- python/amici/import_utils.py | 682 ---- python/amici/logging.py | 202 - python/amici/numpy.py | 309 -- python/amici/ode_export.py | 3590 ---------------- python/amici/ode_model.py | 616 --- python/amici/pandas.py | 743 ---- python/amici/parameter_mapping.py | 427 -- python/amici/petab_import.py | 830 ---- python/amici/petab_import_pysb.py | 388 -- python/amici/petab_objective.py | 808 ---- python/amici/petab_simulate.py | 110 - python/amici/plotting.py | 90 - python/amici/pysb_import.py | 1415 ------- python/amici/sbml_import.py | 2365 ----------- python/amici/setup.template.py | 178 - python/amici/setuptools.py | 284 -- python/amici/swig.py | 183 - python/amici/swig_wrappers.py | 237 -- python/amici/testing.py | 41 - python/sdist/amici/MANIFEST.template.in | 2 +- python/sdist/amici/__init__.py | 194 +- python/sdist/amici/__init__.template.py | 20 +- python/sdist/amici/__main__.py | 26 +- python/sdist/amici/bngl_import.py | 33 +- .../amici/conserved_quantities_demartino.py | 912 ++++- .../sdist/amici/conserved_quantities_rref.py | 100 +- python/sdist/amici/constants.py | 34 +- python/sdist/amici/custom_commands.py | 329 +- python/sdist/amici/cxxcodeprinter.py | 291 +- python/sdist/amici/gradient_check.py | 308 +- python/sdist/amici/import_utils.py | 683 +++- python/sdist/amici/logging.py | 203 +- python/sdist/amici/numpy.py | 310 +- python/sdist/amici/ode_export.py | 3591 ++++++++++++++++- python/sdist/amici/ode_model.py | 617 ++- python/sdist/amici/pandas.py | 744 +++- python/sdist/amici/parameter_mapping.py | 428 +- python/sdist/amici/petab_import.py | 831 +++- python/sdist/amici/petab_import_pysb.py | 389 +- python/sdist/amici/petab_objective.py | 809 +++- python/sdist/amici/petab_simulate.py | 111 +- python/sdist/amici/plotting.py | 91 +- python/sdist/amici/pysb_import.py | 1416 ++++++- python/sdist/amici/sbml_import.py | 2366 ++++++++++- python/sdist/amici/setup.template.py | 179 +- python/sdist/amici/setuptools.py | 285 +- python/sdist/amici/swig.py | 184 +- python/sdist/amici/swig_wrappers.py | 238 +- python/sdist/amici/testing.py | 42 +- 61 files changed, 15739 insertions(+), 15769 deletions(-) delete mode 100644 python/amici/MANIFEST.template.in delete mode 100644 python/amici/__init__.py delete mode 100644 python/amici/__init__.template.py delete mode 100644 python/amici/__main__.py delete mode 100644 python/amici/bngl_import.py delete mode 100644 python/amici/conserved_quantities_demartino.py delete mode 100644 python/amici/conserved_quantities_rref.py delete mode 100644 python/amici/constants.py delete mode 100644 python/amici/custom_commands.py delete mode 100644 python/amici/cxxcodeprinter.py delete mode 100644 python/amici/gradient_check.py delete mode 100644 python/amici/import_utils.py delete mode 100644 python/amici/logging.py delete mode 100644 python/amici/numpy.py delete mode 100644 python/amici/ode_export.py delete mode 100644 python/amici/ode_model.py delete mode 100644 python/amici/pandas.py delete mode 100644 python/amici/parameter_mapping.py delete mode 100644 python/amici/petab_import.py delete mode 100644 python/amici/petab_import_pysb.py delete mode 100644 python/amici/petab_objective.py delete mode 100644 python/amici/petab_simulate.py delete mode 100644 python/amici/plotting.py delete mode 100644 python/amici/pysb_import.py delete mode 100644 python/amici/sbml_import.py delete mode 100644 python/amici/setup.template.py delete mode 100644 python/amici/setuptools.py delete mode 100644 python/amici/swig.py delete mode 100644 python/amici/swig_wrappers.py delete mode 100644 python/amici/testing.py mode change 120000 => 100644 python/sdist/amici/MANIFEST.template.in mode change 120000 => 100644 python/sdist/amici/__init__.py mode change 120000 => 100644 python/sdist/amici/__init__.template.py mode change 120000 => 100644 python/sdist/amici/__main__.py mode change 120000 => 100644 python/sdist/amici/bngl_import.py mode change 120000 => 100644 python/sdist/amici/conserved_quantities_demartino.py mode change 120000 => 100644 python/sdist/amici/conserved_quantities_rref.py mode change 120000 => 100644 python/sdist/amici/constants.py mode change 120000 => 100644 python/sdist/amici/custom_commands.py mode change 120000 => 100644 python/sdist/amici/cxxcodeprinter.py mode change 120000 => 100644 python/sdist/amici/gradient_check.py mode change 120000 => 100644 python/sdist/amici/import_utils.py mode change 120000 => 100644 python/sdist/amici/logging.py mode change 120000 => 100644 python/sdist/amici/numpy.py mode change 120000 => 100644 python/sdist/amici/ode_export.py mode change 120000 => 100644 python/sdist/amici/ode_model.py mode change 120000 => 100644 python/sdist/amici/pandas.py mode change 120000 => 100644 python/sdist/amici/parameter_mapping.py mode change 120000 => 100644 python/sdist/amici/petab_import.py mode change 120000 => 100644 python/sdist/amici/petab_import_pysb.py mode change 120000 => 100644 python/sdist/amici/petab_objective.py mode change 120000 => 100644 python/sdist/amici/petab_simulate.py mode change 120000 => 100644 python/sdist/amici/plotting.py mode change 120000 => 100644 python/sdist/amici/pysb_import.py mode change 120000 => 100644 python/sdist/amici/sbml_import.py mode change 120000 => 100644 python/sdist/amici/setup.template.py mode change 120000 => 100644 python/sdist/amici/setuptools.py mode change 120000 => 100644 python/sdist/amici/swig.py mode change 120000 => 100644 python/sdist/amici/swig_wrappers.py mode change 120000 => 100644 python/sdist/amici/testing.py diff --git a/.github/workflows/test_sbml_semantic_test_suite.yml b/.github/workflows/test_sbml_semantic_test_suite.yml index 31da1cdc9d..52f0b1a348 100644 --- a/.github/workflows/test_sbml_semantic_test_suite.yml +++ b/.github/workflows/test_sbml_semantic_test_suite.yml @@ -8,9 +8,9 @@ on: pull_request: paths: - .github/workflows/test_sbml_semantic_test_suite.yml - - python/amici/ode_export.py - - python/amici/sbml_import.py - - python/amici/import_utils.py + - python/sdist/amici/ode_export.py + - python/sdist/amici/sbml_import.py + - python/sdist/amici/import_utils.py - scripts/run-SBMLTestsuite.sh - tests/testSBMLSuite.py - tests/conftest.py diff --git a/python/amici/MANIFEST.template.in b/python/amici/MANIFEST.template.in deleted file mode 100644 index eb3b1b450f..0000000000 --- a/python/amici/MANIFEST.template.in +++ /dev/null @@ -1 +0,0 @@ -include *.cpp *.h diff --git a/python/amici/__init__.py b/python/amici/__init__.py deleted file mode 100644 index 46eac0cfb1..0000000000 --- a/python/amici/__init__.py +++ /dev/null @@ -1,193 +0,0 @@ -""" -AMICI ------ - -The AMICI Python module provides functionality for importing SBML or PySB -models and turning them into C++ Python extensions. - -:var amici_path: - absolute root path of the amici repository or Python package -:var amiciSwigPath: - absolute path of the amici swig directory -:var amiciSrcPath: - absolute path of the amici source directory -:var amiciModulePath: - absolute root path of the amici module -:var hdf5_enabled: - boolean indicating if amici was compiled with hdf5 support -:var has_clibs: - boolean indicating if this is the full package with swig interface or - the raw package without -""" - - -import contextlib -import importlib -import os -import re -import sys -from pathlib import Path -from types import ModuleType as ModelModule -from typing import Optional, Union - - -def _get_amici_path(): - """ - Determine package installation path, or, if used directly from git - repository, get repository root - """ - basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) - if os.path.exists(os.path.join(basedir, '.git')): - return os.path.abspath(basedir) - return os.path.dirname(__file__) - - -def _get_commit_hash(): - """Get commit hash from file""" - basedir = os.path.dirname(os.path.dirname(os.path.dirname(amici_path))) - commitfile = next( - ( - file for file in [ - os.path.join(basedir, '.git', 'FETCH_HEAD'), - os.path.join(basedir, '.git', 'ORIG_HEAD'), ] - if os.path.isfile(file) - ), - None - ) - - if commitfile: - with open(commitfile) as f: - return str(re.search(r'^([\w]*)', f.read().strip()).group()) - return 'unknown' - - -def _imported_from_setup() -> bool: - """Check whether this module is imported from `setup.py`""" - - from inspect import getouterframes, currentframe - from os import sep - - # in case we are imported from setup.py, this will be the AMICI package - # root directory (otherwise it is most likely the Python library directory, - # we are not interested in) - package_root = os.path.realpath(os.path.dirname(os.path.dirname(__file__))) - - for frame in getouterframes(currentframe(), context=0): - # Need to compare the full path, in case a user tries to import AMICI - # from a module `*setup.py`. Will still cause trouble if some package - # requires the AMICI extension during its installation, but seems - # unlikely... - frame_path = os.path.realpath(os.path.expanduser(frame.filename)) - if (frame_path == os.path.join(package_root, 'setup.py') - or frame_path.endswith(f"{sep}setuptools{sep}build_meta.py") - ): - return True - - return False - - -# Initialize AMICI paths -amici_path = _get_amici_path() -amiciSwigPath = os.path.join(amici_path, 'swig') -amiciSrcPath = os.path.join(amici_path, 'src') -amiciModulePath = os.path.dirname(__file__) - -has_clibs = any(os.path.isfile(os.path.join(amici_path, wrapper)) - for wrapper in ['amici.py', 'amici_without_hdf5.py']) -hdf5_enabled = False - -# Get version number from file -with open(os.path.join(amici_path, 'version.txt')) as f: - __version__ = f.read().strip() - -__commit__ = _get_commit_hash() - -# Import SWIG module and swig-dependent submodules if required and available -if not _imported_from_setup(): - if has_clibs: - from . import amici - from .amici import * - # has to be done before importing readSolverSettingsFromHDF5 - # from .swig_wrappers - hdf5_enabled = 'readSolverSettingsFromHDF5' in dir() - from .swig_wrappers import * - - # These modules require the swig interface and other dependencies - from .numpy import ReturnDataView, ExpDataView - from .pandas import * - - # These modules don't require the swig interface - from .sbml_import import SbmlImporter, assignmentRules2observables - from .ode_export import ODEModel, ODEExporter - - from typing import Protocol - - - class ModelModule(Protocol): - """Enable Python static type checking for AMICI-generated model - modules""" - def getModel(self) -> amici.Model: - pass - - -class add_path: - """Context manager for temporarily changing PYTHONPATH""" - - def __init__(self, path: Union[str, Path]): - self.path: str = str(path) - - def __enter__(self): - if self.path: - sys.path.insert(0, self.path) - - def __exit__(self, exc_type, exc_value, traceback): - with contextlib.suppress(ValueError): - sys.path.remove(self.path) - - -def import_model_module( - module_name: str, - module_path: Optional[Union[Path, str]] = None -) -> ModelModule: - """ - Import Python module of an AMICI model - - :param module_name: - Name of the python package of the model - :param module_path: - Absolute or relative path of the package directory - :return: - The model module - """ - module_path = str(module_path) - - # ensure we will find the newly created module - importlib.invalidate_caches() - - if not os.path.isdir(module_path): - raise ValueError(f"module_path '{module_path}' is not a directory.") - - module_path = os.path.abspath(module_path) - - # module already loaded? - if module_name in sys.modules: - # if a module with that name is already in sys.modules, we remove it, - # along with all other modules from that package. otherwise, there - # will be trouble if two different models with the same name are to - # be imported. - del sys.modules[module_name] - # collect first, don't delete while iterating - to_unload = {loaded_module_name for loaded_module_name in - sys.modules.keys() if - loaded_module_name.startswith(f"{module_name}.")} - for m in to_unload: - del sys.modules[m] - - with add_path(module_path): - return importlib.import_module(module_name) - - -class AmiciVersionError(RuntimeError): - """Error thrown if an AMICI model is loaded that is incompatible with - the installed AMICI base package""" - pass diff --git a/python/amici/__init__.template.py b/python/amici/__init__.template.py deleted file mode 100644 index 9fbab85003..0000000000 --- a/python/amici/__init__.template.py +++ /dev/null @@ -1,19 +0,0 @@ -"""AMICI-generated module for model TPL_MODELNAME""" - -import amici -from pathlib import Path - -# Ensure we are binary-compatible, see #556 -if 'TPL_AMICI_VERSION' != amici.__version__: - raise amici.AmiciVersionError( - f'Cannot use model `TPL_MODELNAME` in {Path(__file__).parent}, ' - 'generated with amici==TPL_AMICI_VERSION, ' - f'together with amici=={amici.__version__} ' - 'which is currently installed. To use this model, install ' - 'amici==TPL_AMICI_VERSION or re-import the model with the amici ' - 'version currently installed.' - ) - -from TPL_MODELNAME._TPL_MODELNAME import * - -__version__ = 'TPL_PACKAGE_VERSION' diff --git a/python/amici/__main__.py b/python/amici/__main__.py deleted file mode 100644 index dac5230270..0000000000 --- a/python/amici/__main__.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Package-level entrypoint""" - -from . import __version__, compiledWithOpenMP, has_clibs, hdf5_enabled -import os -import sys - -def print_info(): - """Displays information on the current AMICI installation. - - Useful for verifying package installation of submitting bug reports""" - features = [] - - if has_clibs: - features.append("extensions") - - if compiledWithOpenMP(): - features.append("OpenMP") - - if hdf5_enabled: - features.append("HDF5") - - print(f"AMICI ({sys.platform}) version {__version__} ({','.join(features)})") - -if __name__ == '__main__': - print_info() diff --git a/python/amici/bngl_import.py b/python/amici/bngl_import.py deleted file mode 100644 index 840e4a4229..0000000000 --- a/python/amici/bngl_import.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -BNGL Import ------------- -This module provides all necessary functionality to import a model specified -in the :term:`BNGL` format. -""" - - -from pysb.importers.bngl import model_from_bngl - -from .pysb_import import pysb2amici - - -def bngl2amici(bngl_model: str, *args, **kwargs) -> None: - r""" - Generate AMICI C++ files for the provided model. - - :param bngl_model: - bngl model file, model name will determine the name of the generated - module - - :param args: - see :func:`amici.pysb_import.pysb2amici` for additional arguments - - :param kwargs: - see :func:`amici.pysb_import.pysb2amici` for additional arguments - - """ - if 'model' in kwargs: - raise ValueError('model argument not allowed') - pysb_model = model_from_bngl(bngl_model) - pysb2amici(pysb_model, *args, **kwargs) diff --git a/python/amici/conserved_quantities_demartino.py b/python/amici/conserved_quantities_demartino.py deleted file mode 100644 index 28fe3f9e77..0000000000 --- a/python/amici/conserved_quantities_demartino.py +++ /dev/null @@ -1,911 +0,0 @@ -import logging -import math -import random -import sys -from typing import List, MutableSequence, Sequence, Tuple, Union, Optional - -from .logging import get_logger - -logger = get_logger(__name__, logging.ERROR) - -# increase recursion limit for recursive quicksort -sys.setrecursionlimit(3000) - -_MIN = 1e-9 -_MAX = 1e9 - - -def compute_moiety_conservation_laws( - stoichiometric_list: Sequence[float], - num_species: int, - num_reactions: int, - max_num_monte_carlo: int = 20, - rng_seed: Union[None, bool, int] = False, - species_names: Optional[Sequence[str]] = None, -) -> Tuple[List[List[int]], List[List[float]]]: - """Compute moiety conservation laws. - - According to the algorithm proposed by De Martino et al. (2014) - https://doi.org/10.1371/journal.pone.0100750 - - :param stoichiometric_list: - the stoichiometric matrix as a list (species x reactions, - column-major ordering) - :param num_species: - total number of species in the reaction network - :param num_reactions: - total number of reactions in the reaction network - :param max_num_monte_carlo: - maximum number of MonteCarlo steps before changing to relaxation - :param rng_seed: - Seed for the random number generator. If `False`, the RNG will not be - re-initialized. Other values will be passed to :func:`random.seed`. - :param species_names: - Species names. Optional and only used for logging. - :returns: - Integer MCLs as list of lists of indices of involved species and - list of lists of corresponding coefficients. - """ - # compute semi-positive conservation laws - (kernel_dim, engaged_species, int_kernel_dim, conserved_moieties, - cls_species_idxs, cls_coefficients) = _kernel( - stoichiometric_list, num_species, num_reactions) - # if the number of integer MCLs equals total MCLS no MC relaxation - done = (int_kernel_dim == kernel_dim) - - if not done: - # construct interaction matrix - J, J2, fields = _fill(stoichiometric_list, engaged_species, - num_species) - - # seed random number generator - if rng_seed is not False: - random.seed(rng_seed) - - timer = 0 - # maximum number of montecarlo search before starting relaxation - while not done: - yes, int_kernel_dim, conserved_moieties = _monte_carlo( - engaged_species, J, J2, fields, conserved_moieties, - int_kernel_dim, cls_species_idxs, cls_coefficients, - num_species, max_iter=max_num_monte_carlo - ) - # if the number of integer MCLs equals total MCLS then MC done - done = (int_kernel_dim == kernel_dim) - timer = 0 if yes else timer + 1 - - if timer == max_num_monte_carlo: - done = _relax(stoichiometric_list, conserved_moieties, - num_reactions, num_species) - timer = 0 - _reduce(int_kernel_dim, cls_species_idxs, cls_coefficients, num_species) - _output(int_kernel_dim, kernel_dim, engaged_species, cls_species_idxs, - cls_coefficients, species_names, verbose=True) - - return cls_species_idxs[:int_kernel_dim], cls_coefficients[:int_kernel_dim] - - -def _output( - int_kernel_dim: int, - kernel_dim: int, - int_matched: List[int], - species_indices: List[List[int]], - species_coefficients: List[List[float]], - species_names: Optional[Sequence[str]] = None, - verbose: bool = False, - log_level: int = logging.DEBUG -): - """Log infos on identified conservation laws""" - def log(*args, **kwargs): - logger.log(log_level, *args, **kwargs) - - log(f"There are {int_kernel_dim} linearly independent conserved " - f"moieties, engaging {len(int_matched)} state variables.") - if int_kernel_dim == kernel_dim: - log("They generate all the conservation laws") - else: - log(f"They don't generate all the conservation laws, " - f"{kernel_dim - int_kernel_dim} of them are not reducible to " - "moieties") - # print all conserved quantities - if verbose: - for i, (coefficients, engaged_species_idxs) \ - in enumerate(zip(species_coefficients, species_indices)): - if not engaged_species_idxs: - continue - log(f"Moiety number {i + 1} engages {len(engaged_species_idxs)} " - "species:") - for species_idx, coefficient \ - in zip(engaged_species_idxs, coefficients): - name = species_names[species_idx] if species_names \ - else species_idx - log(f"\t{name}\t{coefficient}") - - -def _qsort( - k: int, - km: int, - order: MutableSequence[int], - pivots: Sequence[int] -) -> None: - """Quicksort - - Recursive implementation of the quicksort algorithm - - :param k: - number of elements to sort - :param km: - current center element - :param order: - ordering of the elements - :param pivots: - corresponding pivot elements from scaled partial pivoting strategy - """ - # TODO: Rewrite into an iterative algorithm with pivoting strategy - - if k - km < 1: - # nothing to sort - return - - pivot = km + int((k - km) / 2) - l = 0 - p = k - km - 1 - new_order = [0] * (k - km) - for i in range(km, k): - if i != pivot: - if pivots[order[i]] < pivots[order[pivot]]: - new_order[l] = order[i] - l += 1 - else: - new_order[p] = order[i] - p -= 1 - new_order[p] = order[pivot] - order[km:k] = new_order - - # calculate center, then recursive calls on left and right intervals - centre = p + km - _qsort(k, centre + 1, order, pivots) - _qsort(centre, km, order, pivots) - - -def _kernel( - stoichiometric_list: Sequence[float], - num_species: int, - num_reactions: int -) -> Tuple[int, List[int], int, List[int], - List[List[int]], List[List[float]]]: - """ - Kernel (left nullspace of :math:`S`) calculation by Gaussian elimination - - To compute the left nullspace of the stoichiometric matrix :math:`S`, - a Gaussian elimination method with partial scaled pivoting is used to deal - effectively with a possibly ill-conditioned stoichiometric matrix - :math:`S`. - - Note that this is the Python reimplementation of the algorithm proposed by - `De Martino et al. (2014) `_ - and thus a direct adaption of the original implementation in C++. - - :param stoichiometric_list: - the stoichiometric matrix as a list (species x reactions, - col-major ordering) - :param num_species: - total number of species in the reaction network - :param num_reactions: - total number of reactions in the reaction network - :returns: - kernel dimension, MCLs, integer kernel dimension, integer MCLs and - indices to species and reactions in the preceding order as a tuple - """ - matrix: List[List[int]] = [[] for _ in range(num_species)] - matrix2: List[List[float]] = [[] for _ in range(num_species)] - i_reaction = 0 - i_species = 0 - for val in stoichiometric_list: - if val != 0: - matrix[i_species].append(i_reaction) - matrix2[i_species].append(val) - i_species += 1 - if i_species == num_species: - i_species = 0 - i_reaction += 1 - for i in range(num_species): - matrix[i].append(num_reactions + i) - matrix2[i].append(1) - - order: List[int] = list(range(num_species)) - pivots = [matrix[i][0] if len(matrix[i]) else _MAX - for i in range(num_species)] - - done = False - while not done: - _qsort(num_species, 0, order, pivots) - for j in range(num_species - 1): - if pivots[order[j + 1]] == pivots[order[j]] != _MAX: - min1 = _MAX - if len(matrix[order[j]]) > 1: - for i in range(len(matrix[order[j]])): - min1 = min(min1, abs(matrix2[order[j]][0] - / matrix2[order[j]][i])) - - min2 = _MAX - if len(matrix[order[j + 1]]) > 1: - for i in range(len(matrix[order[j + 1]])): - min2 = min(min2, abs(matrix2[order[j + 1]][0] - / matrix2[order[j + 1]][i])) - - if min2 > min1: - # swap - k2 = order[j + 1] - order[j + 1] = order[j] - order[j] = k2 - done = True - - for j in range(num_species - 1): - if pivots[order[j + 1]] == pivots[order[j]] != _MAX: - k1 = order[j + 1] - k2 = order[j] - column: List[float] = [0] * (num_species + num_reactions) - g = matrix2[k2][0] / matrix2[k1][0] - for i in range(1, len(matrix[k1])): - column[matrix[k1][i]] = matrix2[k1][i] * g - - for i in range(1, len(matrix[k2])): - column[matrix[k2][i]] -= matrix2[k2][i] - - matrix[k1] = [] - matrix2[k1] = [] - for col_idx, col_val in enumerate(column): - if abs(col_val) > _MIN: - matrix[k1].append(col_idx) - matrix2[k1].append(col_val) - - done = False - if len(matrix[order[j + 1]]): - pivots[order[j + 1]] = matrix[order[j + 1]][0] - else: - pivots[order[j + 1]] = _MAX - - RSolutions = [[] for _ in range(num_species)] - RSolutions2 = [[] for _ in range(num_species)] - kernel_dim = 0 - - for i in range(num_species): - done = all(matrix[i][j] >= num_reactions - for j in range(len(matrix[i]))) - if done and len(matrix[i]): - for j in range(len(matrix[i])): - RSolutions[kernel_dim].append(matrix[i][j] - num_reactions) - RSolutions2[kernel_dim].append(matrix2[i][j]) - kernel_dim += 1 - del matrix, matrix2 - - matched = [] - int_matched = [] - cls_species_idxs = [[] for _ in range(num_species)] - cls_coefficients = [[] for _ in range(num_species)] - - i2 = 0 - for i in range(kernel_dim): - ok2 = True - for j in range(len(RSolutions[i])): - if RSolutions2[i][j] * RSolutions2[i][0] < 0: - ok2 = False - if not matched or all( - cur_matched != RSolutions[i][j] for cur_matched in - matched - ): - matched.append(RSolutions[i][j]) - if ok2 and len(RSolutions[i]): - min_value = _MAX - for j in range(len(RSolutions[i])): - cls_species_idxs[i2].append(RSolutions[i][j]) - cls_coefficients[i2].append(abs(RSolutions2[i][j])) - min_value = min(min_value, abs(RSolutions2[i][j])) - if not int_matched or all( - cur_int_matched != cls_species_idxs[i2][j] - for cur_int_matched in int_matched - ): - int_matched.append(cls_species_idxs[i2][j]) - for j in range(len(cls_species_idxs[i2])): - cls_coefficients[i2][j] /= min_value - i2 += 1 - int_kernel_dim = i2 - - assert int_kernel_dim <= kernel_dim - assert len(cls_species_idxs) == len(cls_coefficients), \ - "Inconsistent number of conserved quantities in coefficients and " \ - "species" - return (kernel_dim, matched, int_kernel_dim, int_matched, cls_species_idxs, - cls_coefficients) - - -def _fill( - stoichiometric_list: Sequence[float], - matched: Sequence[int], - num_species: int -) -> Tuple[List[List[int]], List[List[int]], List[int]]: - """Construct interaction matrix - - Construct the interaction matrix out of the given stoichiometric matrix - :math:`S`. - - :param stoichiometric_list: - the stoichiometric matrix given as a flat list - :param matched: - found and independent moiety conservation laws (MCL) - :param num_species: - number of rows in :math:`S` - :returns: - interactions of metabolites and reactions, and matrix of interaction - """ - dim = len(matched) - - # for each entry in the stoichiometric matrix save interaction - i_reaction = 0 - i_species = 0 - matrix = [[] for _ in range(dim)] - matrix2 = [[] for _ in range(dim)] - for val in stoichiometric_list: - if val != 0: - take = dim - for matched_idx, matched_val in enumerate(matched): - if i_species == matched_val: - take = matched_idx - if take < dim: - matrix[take].append(i_reaction) - matrix2[take].append(val) - i_species += 1 - if i_species == num_species: - i_species = 0 - i_reaction += 1 - - J = [[] for _ in range(num_species)] - J2 = [[] for _ in range(num_species)] - fields = [0] * num_species - for i in range(dim): - for j in range(i, dim): - interactions = 0 - for po in range(len(matrix[i])): - for pu in range(len(matrix[j])): - if matrix[i][po] == matrix[j][pu]: - interactions += matrix2[i][po] * matrix2[j][pu] - if j == i: - fields[i] = interactions - elif abs(interactions) > _MIN: - J[i].append(j) - J2[i].append(interactions) - J[j].append(i) - J2[j].append(interactions) - return J, J2, fields - - -def _is_linearly_dependent( - vector: Sequence[float], - int_kernel_dim: int, - cls_species_idxs: Sequence[Sequence[int]], - cls_coefficients: Sequence[Sequence[float]], - matched: Sequence[int], - num_species: int -) -> bool: - """Check for linear dependence between MCLs - - Check if the solutions found with Monte Carlo are linearly independent - with respect to the previous found solution for all MCLs involved - - :param vector: - found basis - :param int_kernel_dim: - number of integer conservative laws - :param cls_species_idxs: - NSolutions contains the species involved in the MCL - :param cls_coefficients: - NSolutions2 contains the corresponding coefficients in the MCL - :param matched: - actual found MCLs - :param num_species: - number of rows in :math:`S` - :returns: - boolean indicating linear dependence (true) or not (false) - """ - K = int_kernel_dim + 1 - matrix: List[List[int]] = [[] for _ in range(K)] - matrix2: List[List[float]] = [[] for _ in range(K)] - # Populate matrices with species ids and coefficients for CLs - for i in range(K - 1): - for j in range(len(cls_species_idxs[i])): - matrix[i].append(cls_species_idxs[i][j]) - matrix2[i].append(cls_coefficients[i][j]) - - order2 = list(range(len(matched))) - pivots2 = matched[:] - _qsort(len(matched), 0, order2, pivots2) - - # ensure positivity - for i in range(len(matched)): - if vector[order2[i]] > _MIN: - matrix[K - 1].append(matched[order2[i]]) - matrix2[K - 1].append(vector[order2[i]]) - - order = list(range(K)) - pivots = [matrix[i][0] if len(matrix[i]) else _MAX for i in range(K)] - - # check for linear independence of the solution - ok = False - while not ok: - _qsort(K, 0, order, pivots) - for j in range(K - 1): - if pivots[order[j + 1]] == pivots[order[j]] != _MAX: - min1 = _MAX - if len(matrix[order[j]]) > 1: - for i in range(len(matrix[order[j]])): - min1 = min(min1, abs(matrix2[order[j]][0] - / matrix2[order[j]][i])) - min2 = _MAX - if len(matrix[order[j + 1]]) > 1: - for i in range(len(matrix[order[j + 1]])): - min2 = min(min2, abs(matrix2[order[j + 1]][0] - / matrix2[order[j + 1]][i])) - if min2 > min1: - # swap - k2 = order[j + 1] - order[j + 1] = order[j] - order[j] = k2 - ok = True - for j in range(K - 1): - if pivots[order[j + 1]] == pivots[order[j]] != _MAX: - k1 = order[j + 1] - k2 = order[j] - column: List[float] = [0] * num_species - g = matrix2[k2][0] / matrix2[k1][0] - for i in range(1, len(matrix[k1])): - column[matrix[k1][i]] = matrix2[k1][i] * g - for i in range(1, len(matrix[k2])): - column[matrix[k2][i]] -= matrix2[k2][i] - - matrix[k1] = [] - matrix2[k1] = [] - for i in range(num_species): - if abs(column[i]) > _MIN: - matrix[k1].append(i) - matrix2[k1].append(column[i]) - ok = False - pivots[k1] = matrix[k1][0] if len(matrix[k1]) else _MAX - K1 = sum(len(matrix[i]) > 0 for i in range(K)) - return K == K1 - - -def _monte_carlo( - matched: Sequence[int], - J: Sequence[Sequence[int]], - J2: Sequence[Sequence[float]], - fields: Sequence[float], - int_matched: MutableSequence[int], - int_kernel_dim: int, - cls_species_idxs: MutableSequence[MutableSequence[int]], - cls_coefficients: MutableSequence[MutableSequence[float]], - num_species: int, - initial_temperature: float = 1, - cool_rate: float = 1e-3, - max_iter: int = 10 -) -> Tuple[bool, int, Sequence[int]]: - """MonteCarlo simulated annealing for finding integer MCLs - - Finding integer solutions for the MCLs by Monte Carlo, see step (b) in - the De Martino (2014) paper and Eqs. 11-13 in the publication - - :param matched: - number of found MCLs - :param J: - index of metabolites involved in a MCL - :param J2: - coefficients of metabolites involved in a MCL - :param fields: - actual number of metabolites involved in a MCL - :param int_matched: - actual matched MCLs - :param int_kernel_dim: - number of MCLs found in :math:`S` - :param cls_species_idxs: - Modified in-place. - :param cls_coefficients: - Modified in-place. - :param initial_temperature: - initial temperature - :param cool_rate: - cooling rate of simulated annealing - :param max_iter: - maximum number of MonteCarlo steps before changing to relaxation - :returns: - status of MC iteration, number of integer MCLs, number of MCLs, - metabolites and reaction indices, MCLs and integer MCLs as a tuple - - status indicates if the currently found moiety by the Monte Carlo - process is linearly dependent (False) or linearly independent (True) - in case of linear dependence, the current Monte Carlo cycle can be - considered otherwise the algorithm retries Monte Carlo up to max_iter - """ - dim = len(matched) - num = [int(2 * random.uniform(0, 1)) if len(J[i]) else 0 - for i in range(dim)] - numtot = sum(num) - - def compute_h(): - H = 0 - for i in range(dim): - H += fields[i] * num[i] ** 2 - for j in range(len(J[i])): - H += J2[i][j] * num[i] * num[J[i][j]] - return H - - H = compute_h() - - count = 0 - howmany = 0 - T1 = initial_temperature - e = math.exp(-1 / T1) - while True: - en = int(random.uniform(0, 1) * dim) - while not len(J[en]): - en = int(random.uniform(0, 1) * dim) - - p = -1 if num[en] > 0 and random.uniform(0, 1) < 0.5 else 1 - delta = fields[en] * num[en] - for i in range(len(J[en])): - delta += J2[en][i] * num[J[en][i]] - delta = 2 * p * delta + fields[en] - - if delta < 0 or random.uniform(0, 1) < math.pow(e, delta): - num[en] += p - numtot += p - H += delta - - count += 1 - if count % dim == 0: - T1 -= cool_rate - if T1 <= 0: - T1 = cool_rate - e = math.exp(-1 / T1) - - if count == dim // cool_rate: - count = 0 - T1 = initial_temperature - e = math.exp(-1 / T1) - en = int(random.uniform(0, 1) * dim) - while not len(J[en]): - en = int(random.uniform(0, 1) * dim) - num = [0] * dim - num[en] = 1 - numtot = 1 - - H = compute_h() - howmany += 1 - - if (H < _MIN and numtot > 0) or (howmany == 10 * max_iter): - break - - if howmany >= 10 * max_iter: - return False, int_kernel_dim, int_matched - - # founds MCLS? need to check for linear independence - if len(int_matched) and not _is_linearly_dependent( - num, int_kernel_dim, cls_species_idxs, - cls_coefficients, matched, num_species): - logger.debug( - "Found a moiety but it is linearly dependent... next.") - return False, int_kernel_dim, int_matched - - # reduce by MC procedure - order2 = list(range(len(matched))) - pivots2 = matched[:] - _qsort(len(matched), 0, order2, pivots2) - for i in range(len(matched)): - if num[order2[i]] > 0: - cls_species_idxs[int_kernel_dim].append(matched[order2[i]]) - cls_coefficients[int_kernel_dim].append(num[order2[i]]) - int_kernel_dim += 1 - _reduce(int_kernel_dim, cls_species_idxs, cls_coefficients, num_species) - min_value = 1000 - for i in range(len(cls_species_idxs[int_kernel_dim - 1])): - if not len(int_matched) \ - or all(cur_int_matched - != cls_species_idxs[int_kernel_dim - 1][i] - for cur_int_matched in int_matched): - int_matched.append(cls_species_idxs[int_kernel_dim - 1][i]) - - min_value = min(min_value, cls_coefficients[int_kernel_dim - 1][i]) - for i in range(len(cls_species_idxs[int_kernel_dim - 1])): - cls_coefficients[int_kernel_dim - 1][i] /= min_value - - logger.debug( - f"Found linearly independent moiety, now there are " - f"{int_kernel_dim} engaging {len(int_matched)} species") - - return True, int_kernel_dim, int_matched - - -def _relax( - stoichiometric_list: Sequence[float], - int_matched: Sequence[int], - num_reactions: int, - num_species: int, - relaxation_max: float = 1e6, - relaxation_step: float = 1.9 -) -> bool: - """Relaxation scheme for Monte Carlo final solution - - Checking for completeness using Motzkin's theorem. See Step (c) in - De Martino (2014) and the Eqs. 14-16 in the corresponding publication - - :param stoichiometric_list: - stoichiometric matrix :math:`S` as a flat list (column-major ordering) - :param int_matched: - number of matched integer CLs - :param num_reactions: - number of reactions in reaction network - :param num_species: - number of species in reaction network - :param relaxation_max: - maximum relaxation step - :param relaxation_step: - relaxation step width - :returns: - boolean indicating if relaxation has succeeded (``True``) or not - (``False``) - """ - K = len(int_matched) - matrix: List[List[int]] = [[] for _ in range(K)] - matrix2: List[List[float]] = [[] for _ in range(K)] - i_reaction = 0 - i_species = 0 - for val in stoichiometric_list: - if val != 0: - take = K - if K > 0: - for i in range(K): - if i_species == int_matched[i]: - take = i - if take < K: - matrix[take].append(i_reaction) - matrix2[take].append(val) - i_species += 1 - if i_species == num_species: - i_species = 0 - i_reaction += 1 - - # reducing the stoichiometric matrix of conserved moieties to row echelon - # form by Gaussian elimination - order = list(range(K)) - pivots = [matrix[i][0] if len(matrix[i]) else _MAX for i in range(K)] - done = False - while not done: - _qsort(K, 0, order, pivots) - for j in range(K - 1): - if pivots[order[j + 1]] == pivots[order[j]] != _MAX: - min1 = _MAX - if len(matrix[order[j]]) > 1: - for i in range(len(matrix[order[j]])): - min1 = min(min1, abs(matrix2[order[j]][0] - / matrix2[order[j]][i])) - min2 = _MAX - if len(matrix[order[j + 1]]) > 1: - for i in range(len(matrix[order[j + 1]])): - min2 = min(min2, abs(matrix2[order[j + 1]][0] - / matrix2[order[j + 1]][i])) - if min2 > min1: - # swap - k2 = order[j + 1] - order[j + 1] = order[j] - order[j] = k2 - done = True - for j in range(K - 1): - if pivots[order[j + 1]] == pivots[order[j]] != _MAX: - k1 = order[j + 1] - k2 = order[j] - column: List[float] = [0] * num_reactions - g = matrix2[k2][0] / matrix2[k1][0] - for i in range(1, len(matrix[k1])): - column[matrix[k1][i]] = matrix2[k1][i] * g - for i in range(1, len(matrix[k2])): - column[matrix[k2][i]] -= matrix2[k2][i] - - matrix[k1] = [] - matrix2[k1] = [] - for col_idx, col_val in enumerate(column): - if abs(col_val) > _MIN: - matrix[k1].append(col_idx) - matrix2[k1].append(col_val) - done = False - if len(matrix[order[j + 1]]): - pivots[order[j + 1]] = matrix[order[j + 1]][0] - else: - pivots[order[j + 1]] = _MAX - - # normalize - for matrix2_i in matrix2: - if len(matrix2_i): - norm = matrix2_i[0] - for j in range(len(matrix2_i)): - matrix2_i[j] /= norm - - for k1 in reversed(range(K - 1)): - k = order[k1] - if len(matrix[k]) <= 1: - continue - - i = 0 - while i < len(matrix[k]): - for i_species in range(k1 + 1, K): - j = order[i_species] - if not len(matrix[j]) or matrix[j][0] != matrix[k][i]: - continue - - # subtract rows - # matrix2[k] = matrix2[k] - matrix2[j] * matrix2[k][i] - row_k: List[float] = [0] * num_reactions - for a in range(len(matrix[k])): - row_k[matrix[k][a]] = matrix2[k][a] - for a in range(len(matrix[j])): - row_k[matrix[j][a]] -= matrix2[j][a] * matrix2[k][i] - # filter - matrix[k] = [row_idx for row_idx, row_val in enumerate(row_k) - if row_val != 0] - matrix2[k] = [row_val for row_val in row_k if row_val != 0] - - if len(matrix[k]) <= i: - break - i += 1 - - indip = [K + 1] * num_reactions - for i in range(K): - if len(matrix[i]): - indip[matrix[i][0]] = i - M1 = 0 - for i in range(num_reactions): - if indip[i] == K + 1: - indip[i] = K + M1 - M1 += 1 - - matrixAus = [[] for _ in range(M1)] - matrixAus2 = [[] for _ in range(M1)] - i_reaction = 0 - for i in range(num_reactions): - if indip[i] >= K: - matrixAus[i_reaction].append(i) - matrixAus2[i_reaction].append(1) - i_reaction += 1 - else: - t = indip[i] - if len(matrix[t]) > 1: - for k in range(1, len(matrix[t])): - idx = indip[matrix[t][k]] - K - matrixAus[idx].append(i) - matrixAus2[idx].append(-matrix2[t][k]) - del matrix - - N1 = num_species - K - matrix_aus = [[] for _ in range(N1)] - matrix_aus2 = [[] for _ in range(N1)] - k1 = 0 - i_reaction = 0 - i_species = 0 - for val in stoichiometric_list: - take = 1 - for i in range(len(int_matched)): - if i_species == int_matched[i]: - take -= 1 - if val != 0 and take == 1: - matrix_aus[k1].append(i_reaction) - matrix_aus2[k1].append(val) - i_species += 1 - k1 += take - if i_species == num_species: - i_species = 0 - k1 = 0 - i_reaction += 1 - - matrixb = [[] for _ in range(N1)] - matrixb2 = [[] for _ in range(N1)] - for i in range(M1): - for j in range(N1): - if len(matrix_aus[j]) * len(matrixAus[i]): - prod = 0 - for ib in range(len(matrixAus[i])): - for jb in range(len(matrix_aus[j])): - if matrixAus[i][ib] == matrix_aus[j][jb]: - prod += matrixAus2[i][ib] * matrix_aus2[j][jb] - if abs(prod) > _MIN: - matrixb[j].append(i) - matrixb2[j].append(prod) - del matrixAus, matrixAus2, matrix_aus, matrix_aus2 - - var = [_MIN] * M1 - time = 0 - cmin_idx = 0 - while True: - cmin = 1000 - for j in range(N1): - constr = 0 - if len(matrixb[j]): - for i in range(len(matrixb[j])): - constr += matrixb2[j][i] * var[matrixb[j][i]] - if constr < cmin: - cmin_idx = j - cmin = constr - if cmin >= 0: - # constraints satisfied - break - - # Motzkin relaxation - alpha = -relaxation_step * cmin - fact = sum(val ** 2 for val in matrixb2[cmin_idx]) - alpha /= fact - alpha = max(1e-9 * _MIN, alpha) - for j in range(len(matrixb[cmin_idx])): - var[matrixb[cmin_idx][j]] += alpha * matrixb2[cmin_idx][j] - - time += 1 - if time >= relaxation_max: - # timeout - break - - return done - - -def _reduce( - int_kernel_dim: int, - cls_species_idxs: MutableSequence[MutableSequence[int]], - cls_coefficients: MutableSequence[MutableSequence[float]], - num_species: int -) -> None: - """Reducing the solution which has been found by the Monte Carlo process - - In case of superpositions of independent MCLs one can reduce by - iteratively subtracting the other independent MCLs, taking care - to maintain then non-negativity constraint, see Eq. 13 in De Martino (2014) - - :param int_kernel_dim: - number of found MCLs - :param cls_species_idxs: - Species indices involved in each of the conservation laws. - Modified in-place. - :param cls_coefficients: - Coefficients for each of the species involved in each of the - conservation laws. Modified in-place. - :param num_species: - number of species / rows in :math:`S` - """ - K = int_kernel_dim - order = list(range(K)) - pivots = [-len(cls_species_idxs[i]) for i in range(K)] - - done = False - while not done: - _qsort(K, 0, order, pivots) - done = True - for i in range(K - 1): - k1 = order[i] - for j in range(i + 1, K): - k2 = order[j] - column: List[float] = [0] * num_species - for species_idx, coefficient \ - in zip(cls_species_idxs[k1], cls_coefficients[k1]): - column[species_idx] = coefficient - ok1 = True - for species_idx, coefficient \ - in zip(cls_species_idxs[k2], cls_coefficients[k2]): - column[species_idx] -= coefficient - if column[species_idx] < -_MIN: - ok1 = False - break - if not ok1: - continue - - done = False - cls_species_idxs[k1] = [] - cls_coefficients[k1] = [] - for col_idx, col_val in enumerate(column): - if abs(col_val) > _MIN: - cls_species_idxs[k1].append(col_idx) - cls_coefficients[k1].append(col_val) - pivots[k1] = -len(cls_species_idxs[k1]) diff --git a/python/amici/conserved_quantities_rref.py b/python/amici/conserved_quantities_rref.py deleted file mode 100644 index 4c401293cf..0000000000 --- a/python/amici/conserved_quantities_rref.py +++ /dev/null @@ -1,99 +0,0 @@ -"""Find conserved quantities deterministically""" - -from typing import List, Literal, Optional, Union - -import numpy as np - - -def rref( - mat: np.array, - round_ndigits: Optional[Union[Literal[False], int]] = None -) -> np.array: - """ - Bring matrix ``mat`` to reduced row echelon form - - see https://en.wikipedia.org/wiki/Row_echelon_form - - :param mat: Numpy float matrix to operate on (will be copied) - :param round_ndigits: Number of digits to round intermediary results to, - or ``False`` to disable rounding completely. - Helps to avoid numerical artifacts. - :returns: ``mat`` in rref form. - """ - # Rounding function - if round_ndigits is False: - # no-op - def _round(mat): - return mat - else: - if round_ndigits is None: - # drop the least significant digit (more or less) - round_ndigits = - int(np.ceil(np.log10(np.spacing(1)))) - - def _round(mat): - mat = np.round(mat, round_ndigits) - mat[np.abs(mat) <= 10**(-round_ndigits)] = 0 - return mat - - # create a copy that will be modified - mat = mat.copy() - - lead = 0 - n_rows, n_columns = mat.shape - for r in range(n_rows): - if n_columns <= lead: - return mat - - i = r - while mat[i, lead] == 0: - i += 1 - if n_rows == i: - i = r - lead += 1 - if n_columns == lead: - return mat - - if i != r: - # Swap rows - mat[[i, r]] = mat[[r, i]] - # Divide row - mat[r] /= mat[r, lead] - for i in range(n_rows): - if i != r: - # Subtract multiple - mat[i] -= mat[i, lead] * mat[r] - mat = _round(mat) - lead += 1 - return mat - - -def pivots(mat: np.array) -> List[int]: - """Get indices of pivot columns in ``mat``, assumed to be in reduced row - echelon form""" - pivot_cols = [] - last_pivot_col = -1 - for i in range(mat.shape[0]): - for j in range(last_pivot_col + 1, mat.shape[1]): - if mat[i, j] != 0: - pivot_cols.append(j) - last_pivot_col = j - break - return pivot_cols - - -def nullspace_by_rref(mat: np.array) -> np.array: - """Compute basis of the nullspace of ``mat`` based on the reduced row - echelon form""" - rref_mat = rref(mat) - pivot_cols = pivots(rref_mat) - rows, cols = mat.shape - - basis = [] - for i in range(cols): - if i in pivot_cols: - continue - vec = [1.0 if i == j else 0.0 for j in range(cols)] - for pivot_row, pivot_col in enumerate(pivot_cols): - vec[pivot_col] -= rref_mat[pivot_row][i] - basis.append(vec) - return np.array(basis) diff --git a/python/amici/constants.py b/python/amici/constants.py deleted file mode 100644 index fabd34b3be..0000000000 --- a/python/amici/constants.py +++ /dev/null @@ -1,33 +0,0 @@ -""" -Constants ------------ -This module provides a central place to define native python enums and -constants that are used in multiple other modules -""" - -import enum - - -class SymbolId(str, enum.Enum): - """ - Defines the different fields in the symbol dict to which sbml entities - get parsed to. - - .. note:: This class inherits from str enabling direct comparison to - strings, which means that the species symbols can be accessed as - symbols['species'], which is convenient for debugging and symbols[ - SymbolId.SPECIES], which is how the field should be accessed - programmatically. - """ - SPECIES = 'species' - PARAMETER = 'parameter' - FIXED_PARAMETER = 'fixed_parameter' - OBSERVABLE = 'observable' - EXPRESSION = 'expression' - SIGMAY = 'sigmay' - LLHY = 'llhy' - EVENT = 'event' - EVENT_OBSERVABLE = 'event_observable' - SIGMAZ = 'sigmaz' - LLHZ = 'llhz' - LLHRZ = 'llhrz' diff --git a/python/amici/custom_commands.py b/python/amici/custom_commands.py deleted file mode 100644 index d4c25a348a..0000000000 --- a/python/amici/custom_commands.py +++ /dev/null @@ -1,328 +0,0 @@ -"""Custom setuptools commands for AMICI installation""" - -import glob -import os -import subprocess -import sys -from shutil import copyfile -from typing import Dict, List, Tuple - -from amici.swig import fix_typehints -from amici.setuptools import generate_swig_interface_files -from setuptools.command.build_clib import build_clib -from setuptools.command.build_ext import build_ext -from setuptools.command.develop import develop -from setuptools.command.install import install -from setuptools.command.install_lib import install_lib -from setuptools.command.sdist import sdist - -# typehints -Library = Tuple[str, Dict[str, List[str]]] - - -class AmiciInstall(install): - """Custom install to handle extra arguments""" - - print("running AmiciInstall") - - # Passing --no-clibs allows to install the Python-only part of AMICI - user_options = install.user_options + [ - ('no-clibs', None, "Don't build AMICI C++ extension"), - ] - - def initialize_options(self): - install.initialize_options(self) - self.no_clibs = False - - def finalize_options(self): - if self.no_clibs: - self.no_clibs = True - install.finalize_options(self) - - -def compile_parallel(self, sources, output_dir=None, macros=None, - include_dirs=None, debug=0, extra_preargs=None, - extra_postargs=None, depends=None): - """Parallelized version of distutils.ccompiler.compile""" - - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) - cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) - - # parallel compilation - num_threads = 1 - if 'AMICI_PARALLEL_COMPILE' in os.environ: - max_threads = int(os.environ['AMICI_PARALLEL_COMPILE']) - num_threads = min(len(objects), max_threads) - num_threads = max(1, num_threads) - - def _single_compile(obj): - try: - src, ext = build[obj] - except KeyError: - return - self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) - - if num_threads > 1: - import multiprocessing.pool - # convert to list, imap is evaluated on-demand - list(multiprocessing.pool.ThreadPool(num_threads).imap( - _single_compile, objects)) - else: - for obj in objects: - _single_compile(obj) - - return objects - - -class AmiciBuildCLib(build_clib): - """Custom build_clib""" - - def run(self): - print("running AmiciBuildCLib") - - # Always force recompilation. The way setuptools/distutils check for - # whether sources require recompilation is not reliable and may lead - # to crashes or wrong results. We rather compile once too often... - self.force = True - - build_clib.run(self) - - def build_libraries(self, libraries: List[Library]): - print("running AmiciBuildCLib.build_libraries") - - no_clibs = 'develop' in self.distribution.command_obj \ - and self.get_finalized_command('develop').no_clibs - no_clibs |= 'install' in self.distribution.command_obj \ - and self.get_finalized_command('install').no_clibs - - if no_clibs: - return - - # Override for parallel compilation - import distutils.ccompiler - distutils.ccompiler.CCompiler.compile = compile_parallel - - # Work-around for compiler-specific build options - set_compiler_specific_library_options( - libraries, self.compiler.compiler_type) - - # Monkey-patch setuptools, to force recompilation of library sources - # --force does not work as expected - - # need full import here, not module-level imported build_clib - import setuptools.command.build_clib - # the patched function may return anything but `([], [])` to trigger - # recompilation - setuptools.command.build_clib.newer_pairwise_group = lambda *_: None - - build_clib.build_libraries(self, libraries) - - -class AmiciDevelop(develop): - """Custom develop to build clibs""" - - # Passing --no-clibs allows to install the Python-only part of AMICI - user_options = develop.user_options + [ - ('no-clibs', None, "Don't build AMICI C++ extension"), - ] - - def initialize_options(self): - develop.initialize_options(self) - self.no_clibs = False - - def finalize_options(self): - if self.no_clibs: - self.no_clibs = True - develop.finalize_options(self) - - def run(self): - print("running AmiciDevelop") - - if not self.no_clibs: - self.get_finalized_command('build_clib').run() - - develop.run(self) - - -class AmiciInstallLib(install_lib): - """Custom install to allow preserving of debug symbols""" - - def run(self): - """strip debug symbols - - Returns: - - """ - print("running AmiciInstallLib") - - if 'ENABLE_AMICI_DEBUGGING' in os.environ \ - and os.environ['ENABLE_AMICI_DEBUGGING'] == 'TRUE' \ - and sys.platform == 'darwin': - search_dir = os.path.join(os.getcwd(), self.build_dir, 'amici') - for file in os.listdir(search_dir): - if file.endswith('.so'): - subprocess.run(['dsymutil', os.path.join(search_dir, file), - '-o', - os.path.join(search_dir, file + '.dSYM')]) - - # Continue with the actual installation - install_lib.run(self) - - -class AmiciBuildExt(build_ext): - """Custom build_ext to allow keeping otherwise temporary static libs""" - - def build_extension(self, ext): - # Work-around for compiler-specific build options - set_compiler_specific_extension_options( - ext, self.compiler.compiler_type) - - build_ext.build_extension(self, ext) - - def run(self): - """Copy the generated clibs to the extensions folder to be included in - the wheel - """ - - print("running AmiciBuildExt") - - no_clibs = 'develop' in self.distribution.command_obj \ - and self.get_finalized_command('develop').no_clibs - no_clibs |= 'install' in self.distribution.command_obj \ - and self.get_finalized_command('install').no_clibs - - if no_clibs: - # Nothing to build - return - - if not self.dry_run and self.distribution.has_c_libraries(): - # get the previously built static libraries - build_clib = self.get_finalized_command('build_clib') - libraries = build_clib.get_library_names() or [] - - # Module build directory where we want to copy the generated - # libs to - if self.inplace == 0: - build_dir = self.build_lib - else: - build_dir = os.getcwd() - target_dir = os.path.join(build_dir, 'amici', 'libs') - self.mkpath(target_dir) - - # Copy the generated libs - for lib in libraries: - libfilenames = glob.glob( - f"{build_clib.build_clib}{os.sep}*{lib}.*") - assert len(libfilenames) == 1, \ - f"Found unexpected number of files: {libfilenames}" - src = libfilenames[0] - dest = os.path.join(target_dir, os.path.basename(src)) - print(f"copying {src} -> {dest}") - copyfile(src, dest) - - swig_outdir = os.path.join(os.path.abspath(build_dir), "amici") - generate_swig_interface_files(swig_outdir=swig_outdir) - swig_py_module_path = os.path.join(swig_outdir, 'amici.py') - print("updating typehints") - fix_typehints(swig_py_module_path, swig_py_module_path) - - # Always force recompilation. The way setuptools/distutils check for - # whether sources require recompilation is not reliable and may lead - # to crashes or wrong results. We rather compile once too often... - self.force = True - - # Continue with the actual extension building - build_ext.run(self) - - -class AmiciSDist(sdist): - """Customized creation of source distribution""" - - def run(self): - """Setuptools entry-point""" - - print("running AmiciSDist") - - save_git_version() - - sdist.run(self) - - -def save_git_version(): - """Create file with extended version string - - This requires git. We assume that whoever creates the sdist will work - inside a valid git repository. - - Returns: - - """ - with open(os.path.join("amici", "git_version.txt"), "w") as f: - try: - cmd = ['git', 'describe', '--abbrev=4', '--dirty=-dirty', - '--always', '--tags'] - subprocess.run(cmd, stdout=f) - except Exception as e: - print(e) - - -def set_compiler_specific_library_options( - libraries: List[Library], - compiler_type: str) -> None: - """Set compiler-specific library options. - - C/C++-libraries for setuptools/distutils are provided as dict containing - entries for 'sources', 'macros', 'cflags', etc. - As we don't know the compiler type at the stage of calling - ``setuptools.setup`` and as there is no other apparent way to set - compiler-specific options, we elsewhere extend the dict with additional - fields ${original_field}_${compiler_class}, and add the additional - compiler-specific options here, at a stage when the compiler has been - determined by distutils. - - Arguments: - libraries: - List of libraries as passed as ``libraries`` argument to - ``setuptools.setup`` and ``setuptools.build_ext.build_extension``. - This is modified in place. - compiler_type: - Compiler type, as defined in - ``distutils.ccompiler.compiler.compiler_class``, (e.g. 'unix', - 'msvc', 'mingw32'). - """ - - for lib in libraries: - for field in ['cflags', 'sources', 'macros']: - try: - lib[1][field] += lib[1][f'{field}_{compiler_type}'] - print(f"Changed {field} for {lib[0]} with {compiler_type} " - f"to {lib[1][field]}") - except KeyError: - # No compiler-specific options set - pass - - -def set_compiler_specific_extension_options( - ext: 'setuptools.Extension', - compiler_type: str) -> None: - """Set compiler-specific extension build options. - - Same game as in ``set_compiler_specific_library_options``, except that - here we look for compiler-specific class attributes. - - Arguments: - ext: setuptools/distutils extension object - compiler_type: Compiler type - """ - for attr in ['extra_compile_args', 'extra_link_args']: - try: - new_value = getattr(ext, attr) + \ - getattr(ext, f'{attr}_{compiler_type}') - setattr(ext, attr, new_value) - print(f"Changed {attr} for {compiler_type} to {new_value}") - except AttributeError: - # No compiler-specific options set - pass - diff --git a/python/amici/cxxcodeprinter.py b/python/amici/cxxcodeprinter.py deleted file mode 100644 index 15f440a0fb..0000000000 --- a/python/amici/cxxcodeprinter.py +++ /dev/null @@ -1,290 +0,0 @@ -"""C++ code generation""" -import itertools -import os -import re -from typing import Dict, List, Optional, Tuple - -import sympy as sp -from sympy.printing.cxx import CXX11CodePrinter -from sympy.utilities.iterables import numbered_symbols -from toposort import toposort - - -class AmiciCxxCodePrinter(CXX11CodePrinter): - """C++ code printer""" - - def __init__(self): - super().__init__() - - # extract common subexpressions in matrix functions? - self.extract_cse = (os.getenv("AMICI_EXTRACT_CSE", "0").lower() - in ('1', 'on', 'true')) - - def doprint(self, expr: sp.Expr, assign_to: Optional[str] = None) -> str: - try: - code = super().doprint(expr, assign_to) - code = re.sub(r'(^|\W)M_PI(\W|$)', r'\1amici::pi\2', code) - - return code - except TypeError as e: - raise ValueError( - f'Encountered unsupported function in expression "{expr}"' - ) from e - - def _print_min_max(self, expr, cpp_fun: str, sympy_fun): - # C++ doesn't like mixing int and double for arguments for min/max, - # therefore, we just always convert to float - arg0 = sp.Float(expr.args[0]) if expr.args[0].is_number \ - else expr.args[0] - if len(expr.args) == 1: - return self._print(arg0) - return "%s%s(%s, %s)" % (self._ns, cpp_fun, self._print(arg0), - self._print(sympy_fun(*expr.args[1:]))) - - def _print_Min(self, expr): - from sympy.functions.elementary.miscellaneous import Min - return self._print_min_max(expr, "min", Min) - - def _print_Max(self, expr): - from sympy.functions.elementary.miscellaneous import Max - return self._print_min_max(expr, "max", Max) - - def _get_sym_lines_array( - self, - equations: sp.Matrix, - variable: str, - indent_level: int - ) -> List[str]: - """ - Generate C++ code for assigning symbolic terms in symbols to C++ array - `variable`. - - :param equations: - vectors of symbolic expressions - - :param variable: - name of the C++ array to assign to - - :param indent_level: - indentation level (number of leading blanks) - - :return: - C++ code as list of lines - """ - return [ - ' ' * indent_level + f'{variable}[{index}] = ' - f'{self.doprint(math)};' - for index, math in enumerate(equations) - if math not in [0, 0.0] - ] - - def _get_sym_lines_symbols( - self, symbols: sp.Matrix, - equations: sp.Matrix, - variable: str, - indent_level: int - ) -> List[str]: - """ - Generate C++ code for where array elements are directly replaced with - their corresponding macro symbol - - :param symbols: - vectors of symbols that equations are assigned to - - :param equations: - vectors of expressions - - :param variable: - name of the C++ array to assign to, only used in comments - - :param indent_level: - indentation level (number of leading blanks) - - :return: - C++ code as list of lines - """ - indent = " " * indent_level - - def format_regular_line(symbol, math, index): - return ( - f'{indent}{self.doprint(symbol)} = {self.doprint(math)};' - f' // {variable}[{index}]'.replace('\n', '\n' + indent) - ) - - if self.extract_cse: - # Extract common subexpressions - cse_sym_prefix = "__amici_cse_" - symbol_generator = numbered_symbols( - cls=sp.Symbol, prefix=cse_sym_prefix) - replacements, reduced_exprs = sp.cse( - equations, - symbols=symbol_generator, - order='none', - list=False, - ) - if replacements: - # we need toposort to handle the dependencies of extracted - # subexpressions - expr_dict = dict(itertools.chain(zip(symbols, reduced_exprs), - replacements)) - sorted_symbols = toposort({ - identifier: { - s for s in definition.free_symbols - if s in expr_dict - } - for (identifier, definition) in expr_dict.items() - }) - symbol_to_idx = {sym: idx for idx, sym in enumerate(symbols)} - - def format_line(symbol: sp.Symbol): - math = expr_dict[symbol] - if str(symbol).startswith(cse_sym_prefix): - return f'{indent}const realtype ' \ - f'{self.doprint(symbol)} ' \ - f'= {self.doprint(math)};' - elif math not in [0, 0.0]: - return format_regular_line( - symbol, math, symbol_to_idx[symbol]) - return [ - line - for symbol_group in sorted_symbols - for symbol in sorted(symbol_group, key=str) - if (line := format_line(symbol)) - ] - - return [ - format_regular_line(sym, math, index) - for index, (sym, math) in enumerate(zip(symbols, equations)) - if math not in [0, 0.0] - ] - - def csc_matrix( - self, - matrix: sp.Matrix, - rownames: List[sp.Symbol], - colnames: List[sp.Symbol], - identifier: Optional[int] = 0, - pattern_only: Optional[bool] = False - ) -> Tuple[ - List[int], List[int], sp.Matrix, List[str], sp.Matrix - ]: - """ - Generates the sparse symbolic identifiers, symbolic identifiers, - sparse matrix, column pointers and row values for a symbolic - variable - - :param matrix: - dense matrix to be sparsified - - :param rownames: - ids of the variable of which the derivative is computed (assuming - matrix is the jacobian) - - :param colnames: - ids of the variable with respect to which the derivative is computed - (assuming matrix is the jacobian) - - :param identifier: - additional identifier that gets appended to symbol names to - ensure their uniqueness in outer loops - - :param pattern_only: - flag for computing sparsity pattern without whole matrix - - :return: - symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, - sparse_matrix - """ - idx = 0 - - nrows, ncols = matrix.shape - - if not pattern_only: - sparse_matrix = sp.zeros(nrows, ncols) - symbol_list = [] - sparse_list = [] - symbol_col_ptrs = [] - symbol_row_vals = [] - - for col in range(ncols): - symbol_col_ptrs.append(idx) - for row in range(nrows): - if matrix[row, col] == 0: - continue - - symbol_row_vals.append(row) - idx += 1 - symbol_name = f'd{self.doprint(rownames[row])}' \ - f'_d{self.doprint(colnames[col])}' - if identifier: - symbol_name += f'_{identifier}' - symbol_list.append(symbol_name) - if pattern_only: - continue - - sparse_matrix[row, col] = sp.Symbol(symbol_name, real=True) - sparse_list.append(matrix[row, col]) - - if idx == 0: - symbol_col_ptrs = [] # avoid bad memory access for empty matrices - else: - symbol_col_ptrs.append(idx) - - if pattern_only: - sparse_matrix = None - else: - sparse_list = sp.Matrix(sparse_list) - - return symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, \ - sparse_matrix - - @staticmethod - def print_bool(expr) -> str: - """Print the boolean value of the given expression""" - return "true" if bool(expr) else "false" - - -def get_switch_statement(condition: str, cases: Dict[int, List[str]], - indentation_level: Optional[int] = 0, - indentation_step: Optional[str] = ' ' * 4): - """ - Generate code for switch statement - - :param condition: - Condition for switch - - :param cases: - Cases as dict with expressions as keys and statement as - list of strings - - :param indentation_level: - indentation level - - :param indentation_step: - indentation whitespace per level - - :return: - Code for switch expression as list of strings - - """ - lines = [] - - if not cases: - return lines - - indent0 = indentation_level * indentation_step - indent1 = (indentation_level + 1) * indentation_step - indent2 = (indentation_level + 2) * indentation_step - for expression, statements in cases.items(): - if statements: - lines.extend([ - f'{indent1}case {expression}:', - *(f"{indent2}{statement}" for statement in statements), - f'{indent2}break;' - ]) - - if lines: - lines.insert(0, f'{indent0}switch({condition}) {{') - lines.append(indent0 + '}') - - return lines diff --git a/python/amici/gradient_check.py b/python/amici/gradient_check.py deleted file mode 100644 index 76a17817c2..0000000000 --- a/python/amici/gradient_check.py +++ /dev/null @@ -1,307 +0,0 @@ -""" -Finite Difference Check ------------------------ -This module provides functions to automatically check correctness of amici -computed sensitivities using finite difference approximations -""" - -from . import ( - runAmiciSimulation, SensitivityOrder, AMICI_SUCCESS, SensitivityMethod, - Model, Solver, ExpData, ReturnData, ParameterScaling) -import numpy as np -import copy - -from typing import Callable, Optional, List, Sequence - - -def check_finite_difference( - x0: Sequence[float], - model: Model, - solver: Solver, - edata: ExpData, - ip: int, - fields: List[str], - atol: Optional[float] = 1e-4, - rtol: Optional[float] = 1e-4, - epsilon: Optional[float] = 1e-3 -) -> None: - """ - Checks the computed sensitivity based derivatives against a finite - difference approximation. - - :param x0: - parameter value at which to check finite difference approximation - - :param model: - amici model - - :param solver: - amici solver - - :param edata: - exp data - - :param ip: - parameter index - - :param fields: - rdata fields for which to check the gradient - - :param atol: - absolute tolerance for comparison - - :param rtol: - relative tolerance for comparison - - :param epsilon: - finite difference step-size - - """ - og_sensitivity_order = solver.getSensitivityOrder() - og_parameters = model.getParameters() - og_plist = model.getParameterList() - if edata: - og_eplist = edata.plist - - # sensitivity - p = copy.deepcopy(x0) - plist = [ip] - - model.setParameters(p) - model.setParameterList(plist) - if edata: - edata.plist = plist - - # simulation with gradient - if int(og_sensitivity_order) < int(SensitivityOrder.first): - solver.setSensitivityOrder(SensitivityOrder.first) - rdata = runAmiciSimulation(model, solver, edata) - if rdata['status'] != AMICI_SUCCESS: - raise AssertionError(f"Simulation failed (status {rdata['status']}") - - # finite difference - solver.setSensitivityOrder(SensitivityOrder.none) - - pf = copy.deepcopy(x0) - pb = copy.deepcopy(x0) - pscale = model.getParameterScale()[ip] - if x0[ip] == 0 or pscale != int(ParameterScaling.none): - pf[ip] += epsilon / 2 - pb[ip] -= epsilon / 2 - else: - pf[ip] *= 1 + epsilon / 2 - pb[ip] /= 1 + epsilon / 2 - - # forward: - model.setParameters(pf) - rdataf = runAmiciSimulation(model, solver, edata) - if rdataf['status'] != AMICI_SUCCESS: - raise AssertionError(f"Simulation failed (status {rdataf['status']}") - - # backward: - model.setParameters(pb) - rdatab = runAmiciSimulation(model, solver, edata) - if rdatab['status'] != AMICI_SUCCESS: - raise AssertionError(f"Simulation failed (status {rdatab['status']}") - - for field in fields: - sensi_raw = rdata[f's{field}'] - fd = (rdataf[field] - rdatab[field]) / (pf[ip] - pb[ip]) - if len(sensi_raw.shape) == 1: - sensi = sensi_raw[0] - elif len(sensi_raw.shape) == 2: - sensi = sensi_raw[:, 0] - elif len(sensi_raw.shape) == 3: - sensi = sensi_raw[:, 0, :] - else: - raise NotImplementedError() - - _check_close(sensi, fd, atol=atol, rtol=rtol, field=field, ip=ip) - - solver.setSensitivityOrder(og_sensitivity_order) - model.setParameters(og_parameters) - model.setParameterList(og_plist) - if edata: - edata.plist = og_eplist - - -def check_derivatives( - model: Model, - solver: Solver, - edata: Optional[ExpData] = None, - atol: Optional[float] = 1e-4, - rtol: Optional[float] = 1e-4, - epsilon: Optional[float] = 1e-3, - check_least_squares: bool = True, - skip_zero_pars: bool = False -) -> None: - """ - Finite differences check for likelihood gradient. - - :param model: - amici model - - :param solver: - amici solver - - :param edata: - exp data - - :param atol: - absolute tolerance for comparison - - :param rtol: - relative tolerance for comparison - - :param epsilon: - finite difference step-size - - :param check_least_squares: - whether to check least squares related values. - - :param skip_zero_pars: - whether to perform FD checks for parameters that are zero - - """ - p = np.array(model.getParameters()) - - og_sens_order = solver.getSensitivityOrder() - - if int(og_sens_order) < int(SensitivityOrder.first): - solver.setSensitivityOrder(SensitivityOrder.first) - rdata = runAmiciSimulation(model, solver, edata) - solver.setSensitivityOrder(og_sens_order) - - if rdata['status'] != AMICI_SUCCESS: - raise AssertionError(f"Simulation failed (status {rdata['status']}") - - fields = [] - - if solver.getSensitivityMethod() == SensitivityMethod.forward and \ - solver.getSensitivityOrder() <= SensitivityOrder.first: - fields.append('x') - - leastsquares_applicable = \ - solver.getSensitivityMethod() == SensitivityMethod.forward \ - and edata is not None - - if 'ssigmay' in rdata.keys() \ - and rdata['ssigmay'] is not None \ - and rdata['ssigmay'].any() and not model.getAddSigmaResiduals(): - leastsquares_applicable = False - - if check_least_squares and leastsquares_applicable: - fields += ['res', 'y'] - - _check_results(rdata, 'FIM', np.dot(rdata['sres'].T, rdata['sres']), - atol=1e-8, rtol=1e-4) - _check_results(rdata, 'sllh', -np.dot(rdata['res'].T, rdata['sres']), - atol=1e-8, rtol=1e-4) - - if edata is not None: - fields.append('llh') - - for ip, pval in enumerate(p): - if pval == 0.0 and skip_zero_pars: - continue - check_finite_difference(p, model, solver, edata, ip, fields, - atol=atol, rtol=rtol, epsilon=epsilon) - - -def _check_close( - result: np.array, - expected: np.array, - atol: float, - rtol: float, - field: str, - ip: Optional[int] = None, - verbose: Optional[bool] = True, -) -> None: - """ - Compares computed values against expected values and provides rich - output information. - - :param result: - computed values - - :param expected: - expected values - - :param field: - rdata field for which the gradient is checked, only for error reporting - - :param atol: - absolute tolerance for comparison - - :param rtol: - relative tolerance for comparison - - :param ip: - parameter index, for more informative output - - :param verbose: - produce a more verbose error message in case of unmatched expectations - """ - close = np.isclose(result, expected, atol=atol, rtol=rtol, equal_nan=True) - if close.all(): - return - - if ip is None: - index_str = '' - check_type = 'Regression check' - else: - index_str = f'at index ip={ip} ' - check_type = 'FD check' - - lines = [f'{check_type} failed for {field} {index_str}for ' - f'{close.size - close.sum()} indices:'] - if verbose: - for idx in np.argwhere(~close): - idx = tuple(idx) - if result.shape: - rr = result[idx] - else: - rr = result - lines.append( - f"\tat {idx}: Expected {expected[idx]}, got {rr}") - adev = np.abs(result - expected) - rdev = np.abs((result - expected) / (expected + atol)) - lines.append(f'max(adev): {adev.max()}, max(rdev): {rdev.max()}') - - raise AssertionError("\n".join(lines)) - - -def _check_results( - rdata: ReturnData, - field: str, - expected: np.array, - atol: float, - rtol: float - ) -> None: - """ - Checks whether rdata[field] agrees with expected according to provided - tolerances. - - :param rdata: - simulation results as returned by - :meth:`amici.amici.runAmiciSimulation` - - :param field: - name of the field to check - - :param expected: - expected values - - :param atol: - absolute tolerance for comparison - - :param rtol: - relative tolerance for comparison - """ - - result = rdata[field] - if type(result) is float: - result = np.array(result) - - _check_close(result=result, expected=expected, - atol=atol, rtol=rtol, field=field) diff --git a/python/amici/import_utils.py b/python/amici/import_utils.py deleted file mode 100644 index 65d7285a65..0000000000 --- a/python/amici/import_utils.py +++ /dev/null @@ -1,682 +0,0 @@ -"""Miscellaneous functions related to model import, independent of any specific - model format""" -import enum -import itertools as itt -import numbers -import sys -from typing import (Any, Callable, Dict, Iterable, Optional, Sequence, - SupportsFloat, Tuple, Union) - -import sympy as sp -from sympy.functions.elementary.piecewise import ExprCondPair -from sympy.logic.boolalg import BooleanAtom -from toposort import toposort - -RESERVED_SYMBOLS = ['x', 'k', 'p', 'y', 'w', 'h', 't', 'AMICI_EMPTY_BOLUS'] - -try: - import pysb -except ImportError: - pysb = None - -SymbolDef = Dict[sp.Symbol, Union[Dict[str, sp.Expr], sp.Expr]] - - -# Monkey-patch toposort CircularDependencyError to handle non-sortable objects, -# such as sympy objects -class CircularDependencyError(ValueError): - def __init__(self, data): - # Sort the data just to make the output consistent, for use in - # error messages. That's convenient for doctests. - s = "Circular dependencies exist among these items: {{{}}}".format( - ", ".join( - "{!r}:{!r}".format(key, value) for key, value in sorted( - {str(k): v for k, v in data.items()}.items()) - ) - ) - super(CircularDependencyError, self).__init__(s) - self.data = data - - -setattr(sys.modules["toposort"], "CircularDependencyError", - CircularDependencyError) - - -class ObservableTransformation(str, enum.Enum): - """ - Different modes of observable transformation. - """ - LOG10 = 'log10' - LOG = 'log' - LIN = 'lin' - - -def noise_distribution_to_observable_transformation( - noise_distribution: Union[str, Callable] -) -> ObservableTransformation: - """ - Parse noise distribution string and extract observable transformation - - :param noise_distribution: - see :func:`noise_distribution_to_cost_function` - - :return: - observable transformation - """ - if isinstance(noise_distribution, str): - if noise_distribution.startswith('log-'): - return ObservableTransformation.LOG - if noise_distribution.startswith('log10-'): - return ObservableTransformation.LOG10 - - return ObservableTransformation.LIN - - -def noise_distribution_to_cost_function( - noise_distribution: Union[str, Callable] -) -> Callable[[str], str]: - """ - Parse noise distribution string to a cost function definition amici can - work with. - - The noise distributions listed in the following are supported. :math:`m` - denotes the measurement, :math:`y` the simulation, and :math:`\\sigma` a - distribution scale parameter - (currently, AMICI only supports a single distribution parameter). - - - `'normal'`, `'lin-normal'`: A normal distribution: - - .. math:: - \\pi(m|y,\\sigma) = \\frac{1}{\\sqrt{2\\pi}\\sigma}\\ - exp\\left(-\\frac{(m-y)^2}{2\\sigma^2}\\right) - - - `'log-normal'`: A log-normal distribution (i.e. log(m) is - normally distributed): - - .. math:: - \\pi(m|y,\\sigma) = \\frac{1}{\\sqrt{2\\pi}\\sigma m}\\ - exp\\left(-\\frac{(\\log m - \\log y)^2}{2\\sigma^2}\\right) - - - `'log10-normal'`: A log10-normal distribution (i.e. log10(m) is - normally distributed): - - .. math:: - \\pi(m|y,\\sigma) = \\frac{1}{\\sqrt{2\\pi}\\sigma m \\log(10)}\\ - exp\\left(-\\frac{(\\log_{10} m - \\log_{10} y)^2}{2\\sigma^2}\\right) - - - `'laplace'`, `'lin-laplace'`: A laplace distribution: - - .. math:: - \\pi(m|y,\\sigma) = \\frac{1}{2\\sigma} - \\exp\\left(-\\frac{|m-y|}{\\sigma}\\right) - - - `'log-laplace'`: A log-Laplace distribution (i.e. log(m) is Laplace - distributed): - - .. math:: - \\pi(m|y,\\sigma) = \\frac{1}{2\\sigma m} - \\exp\\left(-\\frac{|\\log m - \\log y|}{\\sigma}\\right) - - - `'log10-laplace'`: A log10-Laplace distribution (i.e. log10(m) is - Laplace distributed): - - .. math:: - \\pi(m|y,\\sigma) = \\frac{1}{2\\sigma m \\log(10)} - \\exp\\left(-\\frac{|\\log_{10} m - \\log_{10} y|}{\\sigma}\\right) - - - `'binomial'`, `'lin-binomial'`: A (continuation of a discrete) binomial - distribution, parameterized via the success probability - :math:`p=\\sigma`: - - .. math:: - \\pi(m|y,\\sigma) = \\operatorname{Heaviside}(y-m) \\cdot - \\frac{\\Gamma(y+1)}{\\Gamma(m+1) \\Gamma(y-m+1)} - \\sigma^m (1-\\sigma)^{(y-m)} - - - `'negative-binomial'`, `'lin-negative-binomial'`: A (continuation of a - discrete) negative binomial distribution, with with `mean = y`, - parameterized via success probability `p`: - - .. math:: - - \\pi(m|y,\\sigma) = \\frac{\\Gamma(m+r)}{\\Gamma(m+1) \\Gamma(r)} - (1-\\sigma)^m \\sigma^r - - where - - .. math:: - r = \\frac{1-\\sigma}{\\sigma} y - - The distributions above are for a single data point. - For a collection :math:`D=\\{m_i\\}_i` of data points and corresponding - simulations :math:`Y=\\{y_i\\}_i` and noise parameters - :math:`\\Sigma=\\{\\sigma_i\\}_i`, AMICI assumes independence, - i.e. the full distributions is - - .. math:: - \\pi(D|Y,\\Sigma) = \\prod_i\\pi(m_i|y_i,\\sigma_i) - - AMICI uses the logarithm :math:`\\log(\\pi(m|y,\\sigma)`. - - In addition to the above mentioned distributions, it is also possible to - pass a function taking a symbol string and returning a log-distribution - string with variables '{str_symbol}', 'm{str_symbol}', 'sigma{str_symbol}' - for y, m, sigma, respectively. - - :param noise_distribution: An identifier specifying a noise model. - Possible values are - - {`'normal'`, `'lin-normal'`, `'log-normal'`, `'log10-normal'`, - `'laplace'`, `'lin-laplace'`, `'log-laplace'`, `'log10-laplace'`, - `'binomial'`, `'lin-binomial'`, `'negative-binomial'`, - `'lin-negative-binomial'`, ``} - - For the meaning of the values see above. - - :return: A function that takes a strSymbol and then creates a cost - function string (negative log-likelihood) from it, which can be - sympified. - """ - - if isinstance(noise_distribution, Callable): - return noise_distribution - - if noise_distribution in ['normal', 'lin-normal']: - y_string = '0.5*log(2*pi*{sigma}**2) + 0.5*(({y} - {m}) / {sigma})**2' - elif noise_distribution == 'log-normal': - y_string = '0.5*log(2*pi*{sigma}**2*{m}**2) ' \ - '+ 0.5*((log({y}) - log({m})) / {sigma})**2' - elif noise_distribution == 'log10-normal': - y_string = '0.5*log(2*pi*{sigma}**2*{m}**2*log(10)**2) ' \ - '+ 0.5*((log({y}, 10) - log({m}, 10)) / {sigma})**2' - elif noise_distribution in ['laplace', 'lin-laplace']: - y_string = 'log(2*{sigma}) + Abs({y} - {m}) / {sigma}' - elif noise_distribution == 'log-laplace': - y_string = 'log(2*{sigma}*{m}) + Abs(log({y}) - log({m})) / {sigma}' - elif noise_distribution == 'log10-laplace': - y_string = 'log(2*{sigma}*{m}*log(10)) ' \ - '+ Abs(log({y}, 10) - log({m}, 10)) / {sigma}' - elif noise_distribution in ['binomial', 'lin-binomial']: - # Binomial noise model parameterized via success probability p - y_string = '- log(Heaviside({y} - {m})) - loggamma({y}+1) ' \ - '+ loggamma({m}+1) + loggamma({y}-{m}+1) ' \ - '- {m} * log({sigma}) - ({y} - {m}) * log(1-{sigma})' - elif noise_distribution in ['negative-binomial', 'lin-negative-binomial']: - # Negative binomial noise model of the number of successes m - # (data) before r=(1-sigma)/sigma * y failures occur, - # with mean number of successes y (simulation), - # parameterized via success probability p = sigma. - r = '{y} * (1-{sigma}) / {sigma}' - y_string = f'- loggamma({{m}}+{r}) + loggamma({{m}}+1) ' \ - f'+ loggamma({r}) - {r} * log(1-{{sigma}}) ' \ - f'- {{m}} * log({{sigma}})' - else: - raise ValueError( - f"Cost identifier {noise_distribution} not recognized.") - - def nllh_y_string(str_symbol): - y, m, sigma = _get_str_symbol_identifiers(str_symbol) - return y_string.format(y=y, m=m, sigma=sigma) - - return nllh_y_string - - -def _get_str_symbol_identifiers(str_symbol: str) -> tuple: - """Get identifiers for simulation, measurement, and sigma.""" - y, m, sigma = f"{str_symbol}", f"m{str_symbol}", f"sigma{str_symbol}" - return y, m, sigma - - -def smart_subs_dict(sym: sp.Expr, - subs: SymbolDef, - field: Optional[str] = None, - reverse: bool = True) -> sp.Expr: - """ - Substitutes expressions completely flattening them out. Requires - sorting of expressions with toposort. - - :param sym: - Symbolic expression in which expressions will be substituted - - :param subs: - Substitutions - - :param field: - Field of substitution expressions in subs.values(), if applicable - - :param reverse: - Whether ordering in subs should be reversed. Note that substitution - requires the reverse order of what is required for evaluation. - - :return: - Substituted symbolic expression - """ - s = [ - (eid, expr[field] if field is not None else expr) - for eid, expr in subs.items() - ] - if reverse: - s.reverse() - for substitution in s: - # note that substitution may change free symbols, so we have to do - # this recursively - if sym.has(substitution[0]): - sym = sym.subs(*substitution) - return sym - - -def smart_subs(element: sp.Expr, old: sp.Symbol, new: sp.Expr) -> sp.Expr: - """ - Optimized substitution that checks whether anything needs to be done first - - :param element: - substitution target - - :param old: - to be substituted - - :param new: - subsitution value - - :return: - substituted expression - """ - return element.subs(old, new) if element.has(old) else element - - -def toposort_symbols(symbols: SymbolDef, - field: Optional[str] = None) -> SymbolDef: - """ - Topologically sort symbol definitions according to their interdependency - - :param symbols: - symbol definitions - - :param field: - field of definition.values() that is used to compute interdependency - - :return: - ordered symbol definitions - """ - sorted_symbols = toposort({ - identifier: { - s for s in ( - definition[field] if field is not None else definition - ).free_symbols - if s in symbols - } - for identifier, definition - in symbols.items() - }) - return { - s: symbols[s] - for symbol_group in sorted_symbols - for s in sorted(symbol_group, key=str) - } - - -def _parse_special_functions(sym: sp.Expr, toplevel: bool = True) -> sp.Expr: - """ - Recursively checks the symbolic expression for functions which have be - to parsed in a special way, such as piecewise functions - - :param sym: - symbolic expressions - - :param toplevel: - as this is called recursively, are we in the top level expression? - """ - args = tuple(arg if arg.__class__.__name__ == 'piecewise' - and sym.__class__.__name__ == 'piecewise' - else _parse_special_functions(arg, False) - for arg in sym.args) - - fun_mappings = { - 'times': sp.Mul, - 'xor': sp.Xor, - 'abs': sp.Abs, - 'min': sp.Min, - 'max': sp.Max, - 'ceil': sp.functions.ceiling, - 'floor': sp.functions.floor, - 'factorial': sp.functions.factorial, - 'arcsin': sp.functions.asin, - 'arccos': sp.functions.acos, - 'arctan': sp.functions.atan, - 'arccot': sp.functions.acot, - 'arcsec': sp.functions.asec, - 'arccsc': sp.functions.acsc, - 'arcsinh': sp.functions.asinh, - 'arccosh': sp.functions.acosh, - 'arctanh': sp.functions.atanh, - 'arccoth': sp.functions.acoth, - 'arcsech': sp.functions.asech, - 'arccsch': sp.functions.acsch, - } - - if sym.__class__.__name__ in fun_mappings: - return fun_mappings[sym.__class__.__name__](*args) - - elif sym.__class__.__name__ == 'piecewise' \ - or isinstance(sym, sp.Piecewise): - if isinstance(sym, sp.Piecewise): - # this is sympy piecewise, can't be nested - denested_args = args - else: - # this is sbml piecewise, can be nested - denested_args = _denest_piecewise(args) - return _parse_piecewise_to_heaviside(denested_args) - - if sym.__class__.__name__ == 'plus' and not sym.args: - return sp.Float(0.0) - - if isinstance(sym, (sp.Function, sp.Mul, sp.Add, sp.Pow)): - sym._args = args - - elif toplevel and isinstance(sym, BooleanAtom): - # Replace boolean constants by numbers so they can be differentiated - # must not replace in Piecewise function. Therefore, we only replace - # it the complete expression consists only of a Boolean value. - sym = sp.Float(int(bool(sym))) - - return sym - - -def _denest_piecewise( - args: Sequence[Union[sp.Expr, sp.logic.boolalg.Boolean, bool]] -) -> Tuple[Union[sp.Expr, sp.logic.boolalg.Boolean, bool]]: - """ - Denest piecewise functions that contain piecewise as condition - - :param args: - Arguments to the piecewise function - - :return: - Arguments where conditions no longer contain piecewise functions and - the conditional dependency is flattened out - """ - args_out = [] - for coeff, cond in grouper(args, 2, True): - # handling of this case is explicitely disabled in - # _parse_special_functions as keeping track of coeff/cond - # arguments is tricky. Simpler to just parse them out here - if coeff.__class__.__name__ == 'piecewise': - coeff = _parse_special_functions(coeff, False) - - # we can have conditions that are piecewise function - # returning True or False - if cond.__class__.__name__ == 'piecewise': - # this keeps track of conditional that the previous - # piece was picked - previous_was_picked = sp.false - # recursively denest those first - for sub_coeff, sub_cond in grouper( - _denest_piecewise(cond.args), 2, True - ): - # flatten the individual pieces - pick_this = sp.And( - sp.Not(previous_was_picked), sub_cond - ) - if sub_coeff == sp.true: - args_out.extend([coeff, pick_this]) - previous_was_picked = pick_this - - else: - args_out.extend([coeff, cond]) - # cut off last condition as that's the default - return tuple(args_out[:-1]) - - -def _parse_piecewise_to_heaviside(args: Iterable[sp.Expr]) -> sp.Expr: - """ - Piecewise functions cannot be transformed into C++ right away, but AMICI - has a special interface for Heaviside functions, so we transform them. - - :param args: - symbolic expressions for arguments of the piecewise function - """ - # how many condition-expression pairs will we have? - formula = sp.Float(0.0) - not_condition = sp.Float(1.0) - - if all(isinstance(arg, ExprCondPair) for arg in args): - # sympy piecewise - grouped_args = args - else: - # smbl piecewise - grouped_args = grouper(args, 2, True) - - for coeff, trigger in grouped_args: - if isinstance(coeff, BooleanAtom): - coeff = sp.Float(int(bool(coeff))) - - if trigger == sp.true: - return formula + coeff * not_condition - - if trigger == sp.false: - continue - - tmp = _parse_heaviside_trigger(trigger) - formula += coeff * sp.simplify(not_condition * tmp) - not_condition *= (1-tmp) - - return formula - - -def _parse_heaviside_trigger(trigger: sp.Expr) -> sp.Expr: - """ - Recursively translates a boolean trigger function into a real valued - root function - - :param trigger: - :return: real valued root function expression - """ - if trigger.is_Relational: - root = trigger.args[0] - trigger.args[1] - _check_unsupported_functions(root, 'sympy.Expression') - - # normalize such that we always implement <, - # this ensures that we can correctly evaluate the condition if - # simulation starts at H(0). This is achieved by translating - # conditionals into Heaviside functions H that is implemented as unit - # step with H(0) = 1 - if isinstance(trigger, sp.core.relational.StrictLessThan): - # x < y => x - y < 0 => r < 0 - return 1 - sp.Heaviside(root) - if isinstance(trigger, sp.core.relational.LessThan): - # x <= y => not(y < x) => not(y - x < 0) => not -r < 0 - return sp.Heaviside(-root) - if isinstance(trigger, sp.core.relational.StrictGreaterThan): - # y > x => y - x < 0 => -r < 0 - return 1 - sp.Heaviside(-root) - if isinstance(trigger, sp.core.relational.GreaterThan): - # y >= x => not(x < y) => not(x - y < 0) => not r < 0 - return sp.Heaviside(root) - - # or(x,y) = not(and(not(x),not(y)) - if isinstance(trigger, sp.Or): - return 1-sp.Mul(*[1-_parse_heaviside_trigger(arg) - for arg in trigger.args]) - - if isinstance(trigger, sp.And): - return sp.Mul(*[_parse_heaviside_trigger(arg) - for arg in trigger.args]) - - raise RuntimeError( - 'AMICI can not parse piecewise/event trigger functions with argument ' - f'{trigger}.' - ) - - -def grouper(iterable: Iterable, n: int, - fillvalue: Any = None) -> Iterable[Tuple[Any]]: - """ - Collect data into fixed-length chunks or blocks - - grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" - - :param iterable: - any iterable - - :param n: - chunk length - - :param fillvalue: - padding for last chunk if length < n - - :return: itertools.zip_longest of requested chunks - """ - args = [iter(iterable)] * n - return itt.zip_longest(*args, fillvalue=fillvalue) - - -def _check_unsupported_functions(sym: sp.Expr, - expression_type: str, - full_sym: Optional[sp.Expr] = None): - """ - Recursively checks the symbolic expression for unsupported symbolic - functions - - :param sym: - symbolic expressions - - :param expression_type: - type of expression, only used when throwing errors - - :param full sym: - outermost symbolic expression in recursive checks, only used for errors - """ - if full_sym is None: - full_sym = sym - - # note that sp.functions.factorial, sp.functions.ceiling, - # sp.functions.floor applied to numbers should be simplified out and - # thus pass this test - unsupported_functions = ( - sp.functions.factorial, sp.functions.ceiling, sp.functions.floor, - sp.functions.sec, sp.functions.csc, sp.functions.cot, - sp.functions.asec, sp.functions.acsc, sp.functions.acot, - sp.functions.acsch, sp.functions.acoth, - sp.Mod, sp.core.function.UndefinedFunction - ) - - if isinstance(sym.func, unsupported_functions) \ - or isinstance(sym, unsupported_functions): - raise RuntimeError(f'Encountered unsupported expression ' - f'"{sym.func}" of type ' - f'"{type(sym.func)}" as part of a ' - f'{expression_type}: "{full_sym}"!') - for arg in list(sym.args): - _check_unsupported_functions(arg, expression_type) - - -def cast_to_sym(value: Union[SupportsFloat, sp.Expr, BooleanAtom], - input_name: str) -> sp.Expr: - """ - Typecasts the value to :py:class:`sympy.Float` if possible, and ensures the - value is a symbolic expression. - - :param value: - value to be cast - - :param input_name: - name of input variable - - :return: - typecast value - """ - if isinstance(value, (sp.RealNumber, numbers.Number)): - value = sp.Float(float(value)) - elif isinstance(value, BooleanAtom): - value = sp.Float(float(bool(value))) - - if not isinstance(value, sp.Expr): - raise TypeError(f"Couldn't cast {input_name} to sympy.Expr, was " - f"{type(value)}") - - return value - - -def generate_measurement_symbol(observable_id: Union[str, sp.Symbol]): - """ - Generates the appropriate measurement symbol for the provided observable - - :param observable_id: - symbol (or string representation) of the observable - - :return: - symbol for the corresponding measurement - """ - if not isinstance(observable_id, str): - observable_id = strip_pysb(observable_id) - return symbol_with_assumptions(f'm{observable_id}') - - -def generate_regularization_symbol(observable_id: Union[str, sp.Symbol]): - """ - Generates the appropriate regularization symbol for the provided observable - - :param observable_id: - symbol (or string representation) of the observable - - :return: - symbol for the corresponding regularization - """ - if not isinstance(observable_id, str): - observable_id = strip_pysb(observable_id) - return symbol_with_assumptions(f'r{observable_id}') - - -def generate_flux_symbol( - reaction_index: int, - name: Optional[str] = None -) -> sp.Symbol: - """ - Generate identifier symbol for a reaction flux. - This function will always return the same unique python object for a - given entity. - - :param reaction_index: - index of the reaction to which the flux corresponds - :param name: - an optional identifier of the reaction to which the flux corresponds - :return: - identifier symbol - """ - if name is not None: - return symbol_with_assumptions(name) - - return symbol_with_assumptions(f'flux_r{reaction_index}') - - -def symbol_with_assumptions(name: str): - """ - Central function to create symbols with consistent, canonical assumptions - - :param name: - name of the symbol - - :return: - symbol with canonical assumptions - """ - return sp.Symbol(name, real=True) - - -def strip_pysb(symbol: sp.Basic) -> sp.Basic: - """ - Strips pysb info from a :class:`pysb.Component` object - - :param symbol: - symbolic expression - - :return: - stripped expression - """ - # strip pysb type and transform into a flat sympy.Symbol. - # this ensures that the pysb type specific __repr__ is used when converting - # to string - if pysb and isinstance(symbol, pysb.Component): - return sp.Symbol(symbol.name, real=True) - else: - # in this case we will use sympy specific transform anyways - return symbol diff --git a/python/amici/logging.py b/python/amici/logging.py deleted file mode 100644 index eae753b29e..0000000000 --- a/python/amici/logging.py +++ /dev/null @@ -1,202 +0,0 @@ -""" -Logging -------- -This module provides custom logging functionality for other amici modules -""" - -import logging -import platform -import socket -import amici -import os -import warnings -import time -import functools - -from inspect import getouterframes, currentframe - -LOG_LEVEL_ENV_VAR = 'AMICI_LOG' -BASE_LOGGER_NAME = 'amici' -# Supported values for LOG_LEVEL_ENV_VAR -NAMED_LOG_LEVELS = {'NOTSET': logging.NOTSET, - 'DEBUG': logging.DEBUG, - 'INFO': logging.INFO, - 'WARNING': logging.WARNING, - 'ERROR': logging.ERROR, - 'CRITICAL': logging.CRITICAL} - -from typing import Optional, Callable, Union - - -def _setup_logger(level: Optional[int] = logging.WARNING, - console_output: Optional[bool] = True, - file_output: Optional[bool] = False, - capture_warnings: Optional[bool] = True) -> logging.Logger: - """ - Set up a new logging.Logger for AMICI logging - - :param level: - Logging level, typically using a constant like logging.INFO or - logging.DEBUG - - :param console_output: - Set up a default console log handler if True (default) - - :param file_output: - Supply a filename to copy all log output to that file, or - set to False to disable (default) - - :param capture_warnings: - Capture warnings from Python's warnings module if True (default) - - :return: - A :class:`logging.Logger` object for AMICI logging. Note that other - AMICI modules - should use a logger specific to their namespace instead by calling - :func:`get_logger`. - """ - log = logging.getLogger(BASE_LOGGER_NAME) - - # Logging level can be overridden with environment variable - if LOG_LEVEL_ENV_VAR in os.environ: - try: - level = int(os.environ[LOG_LEVEL_ENV_VAR]) - except ValueError: - # Try parsing as a name - level_name = os.environ[LOG_LEVEL_ENV_VAR] - if level_name in NAMED_LOG_LEVELS.keys(): - level = NAMED_LOG_LEVELS[level_name] - else: - raise ValueError(f'Environment variable {LOG_LEVEL_ENV_VAR} ' - f'contains an invalid value "{level_name}".' - f' If set, its value must be one of ' - f'{", ".join(NAMED_LOG_LEVELS.keys())}' - f' (case-sensitive) or an integer log level.') - - log.setLevel(level) - - # Remove default logging handler - log.handlers = [] - - log_fmt = logging.Formatter('%(asctime)s.%(msecs).3d - %(name)s - ' - '%(levelname)s - %(message)s', - datefmt='%Y-%m-%d %H:%M:%S') - - if console_output: - stream_handler = logging.StreamHandler() - stream_handler.setFormatter(log_fmt) - log.addHandler(stream_handler) - - if file_output: - file_handler = logging.FileHandler(file_output) - file_handler.setFormatter(log_fmt) - log.addHandler(file_handler) - - log.info('Logging started on AMICI version %s', amici.__version__) - - log.debug('OS Platform: %s', platform.platform()) - log.debug('Python version: %s', platform.python_version()) - log.debug('Hostname: %s', socket.getfqdn()) - - logging.captureWarnings(capture_warnings) - - return log - - -def set_log_level(logger: logging.Logger, log_level: Union[int, bool]) -> None: - if log_level is not None and log_level is not False: - if isinstance(log_level, bool): - log_level = logging.DEBUG - elif not isinstance(log_level, int): - raise ValueError('log_level must be a boolean, integer or None') - - if logger.getEffectiveLevel() != log_level: - logger.debug('Changing log_level from %d to %d' % ( - logger.getEffectiveLevel(), log_level)) - logger.setLevel(log_level) - - -def get_logger(logger_name: Optional[str] = BASE_LOGGER_NAME, - log_level: Optional[int] = None, - **kwargs) -> logging.Logger: - """ - Returns (if extistant) or creates an AMICI logger - - If the AMICI base logger has already been set up, this method will - return it or any of its descendant loggers without overriding the - settings - i.e. any values supplied as kwargs will be ignored. - - :param logger_name: - Get a logger for a specific namespace, typically __name__ - for code outside of classes or self.__module__ inside a class - - :param log_level: - Override the default or preset log level for the requested logger. - None or False uses the default or preset value. True evaluates to - logging.DEBUG. Any integer is used directly. - - :param console_output: - Set up a default console log handler if True (default). Only used when - the AMICI logger hasn't been set up yet. - - :param file_output: - Supply a filename to copy all log output to that file, or set to - False to disable (default). Only used when the AMICI logger hasn't - been set up yet. - - :param capture_warnings: - Capture warnings from Python's warnings module if True (default). - Only used when the AMICI logger hasn't been set up yet.. - - :return: - A logging.Logger object with the requested name - """ - if BASE_LOGGER_NAME not in logging.Logger.manager.loggerDict.keys(): - _setup_logger(**kwargs) - elif kwargs: - warnings.warn('AMICI logger already exists, ignoring keyword ' - 'arguments to setup_logger') - - logger = logging.getLogger(logger_name) - - set_log_level(logger, log_level) - - return logger - - -def log_execution_time(description: str, logger: logging.Logger) -> Callable: - """ - Parameterized function decorator that enables automatic execution time - tracking - - :param description: - Description of what the decorated function does - - :param logger: - Logger to which execution timing will be printed - """ - def decorator_timer(func): - @functools.wraps(func) - def wrapper_timer(*args, **kwargs): - - # append pluses to indicate recursion level - recursion_level = sum( - frame.function == 'wrapper_timer' - and frame.filename == __file__ - for frame in getouterframes(currentframe(), context=0) - ) - - recursion = '' - if recursion_level > 1: - recursion = '+' * (recursion_level - 1) - - tstart = time.perf_counter() - rval = func(*args, **kwargs) - tend = time.perf_counter() - spacers = ' ' * max(54 - len(description) - len(logger.name) - - len(recursion), 0) - logger.info(f'Finished {description}{spacers}' - f'{recursion} ({(tend - tstart):.2E}s)') - return rval - return wrapper_timer - return decorator_timer diff --git a/python/amici/numpy.py b/python/amici/numpy.py deleted file mode 100644 index 52ea05e6fe..0000000000 --- a/python/amici/numpy.py +++ /dev/null @@ -1,309 +0,0 @@ -""" -C++ object views ----------------- -This module provides views on C++ objects for efficient access. -""" - -import numpy as np -import copy -import collections - -from . import ExpDataPtr, ReturnDataPtr, ExpData, ReturnData -from typing import Union, List, Dict, Iterator - - -class SwigPtrView(collections.abc.Mapping): - """ - Interface class to expose std::vector and scalar members of - swig wrapped C++ objects as numpy array attributes and fields. This - class is memory efficient as copies of the underlying C++ objects is - only created when respective fields are accessed for the first time. - Cached copies are used for all subsequent calls. - - :ivar _swigptr: pointer to the c++ object - :ivar _field_names: names of members that will be exposed as numpy arrays - :ivar _field_dimensions: dimensions of numpy arrays - :ivar _cache: dictionary with cached values - """ - - _swigptr = None - _field_names: List[str] = [] - _field_dimensions: Dict[str, List[int]] = dict() - - def __getitem__(self, item: str) -> Union[np.ndarray, float]: - """ - Access to field names, copies data from C++ object into numpy - array, reshapes according to field dimensions and stores values in - cache. - - :param item: field name - :return: value - """ - if self._swigptr is None: - raise NotImplementedError('Cannot get items from abstract class.') - - if item == 'ptr': - return self._swigptr - - if item in self._cache: - return self._cache[item] - - if item == 'id': - return getattr(self._swigptr, item) - - if item not in self._field_names: - self.__missing__(item) - - value = field_as_numpy( - self._field_dimensions, item, self._swigptr - ) - self._cache[item] = value - return value - - def __missing__(self, key: str) -> None: - """ - Default behaviour for missing keys - - :param key: field name - """ - raise KeyError(f'Unknown field name {key}.') - - def __getattr__(self, item) -> Union[np.ndarray, float]: - """ - Attribute accessor for field names - - :param item: field name - - :returns: value - """ - return self.__getitem__(item) - - def __init__(self, swigptr): - """ - Constructor - - :param swigptr: pointer to the C++ object - """ - self._swigptr = swigptr - self._cache = dict() - super(SwigPtrView, self).__init__() - - def __len__(self) -> int: - """ - Returns the number of available keys/fields - - :returns: length of _field_names - """ - return len(self._field_names) - - def __iter__(self) -> Iterator: - """ - Create an iterator of the keys/fields - - :returns: iterator over _field_names - """ - return iter(self._field_names) - - def __copy__(self): - """ - Create a shallow copy - - :return: SwigPtrView shallow copy - """ - other = SwigPtrView(self._swigptr) - other._field_names = self._field_names - other._field_dimensions = self._field_dimensions - other._cache = self._cache - return other - - def __contains__(self, item) -> bool: - """ - Faster implementation of __contains__ that avoids copy of the field - - :param item: item to check for - - :returns: whether item is available as key - """ - return item in self._field_names - - def __deepcopy__(self, memo): - """ - Create a deep copy - - :param memo: dict with id-to-object mapping - - :returns: SwigPtrView deep copy - """ - other = SwigPtrView(self._swigptr) - other._field_names = copy.deepcopy(self._field_names) - other._field_dimensions = copy.deepcopy(self._field_dimensions) - other._cache = copy.deepcopy(self._cache) - return other - - -class ReturnDataView(SwigPtrView): - """ - Interface class for C++ Return Data objects that avoids possibly costly - copies of member data. - """ - - _field_names = [ - 'ts', 'x', 'x0', 'x_ss', 'sx', 'sx0', 'sx_ss', 'y', 'sigmay', - 'sy', 'ssigmay', 'z', 'rz', 'sigmaz', 'sz', 'srz', - 'ssigmaz', 'sllh', 's2llh', 'J', 'xdot', 'status', 'llh', - 'chi2', 'res', 'sres', 'FIM', 'w', 'preeq_wrms', 'preeq_t', - 'preeq_numsteps', 'preeq_numstepsB', 'preeq_status', 'preeq_cpu_time', - 'preeq_cpu_timeB', 'posteq_wrms', 'posteq_t', 'posteq_numsteps', - 'posteq_numstepsB', 'posteq_status', 'posteq_cpu_time', - 'posteq_cpu_timeB', 'numsteps', 'numrhsevals', - 'numerrtestfails', 'numnonlinsolvconvfails', 'order', 'cpu_time', - 'numstepsB', 'numrhsevalsB', 'numerrtestfailsB', - 'numnonlinsolvconvfailsB', 'cpu_timeB', 'cpu_time_total' - ] - - def __init__(self, rdata: Union[ReturnDataPtr, ReturnData]): - """ - Constructor - - :param rdata: pointer to the ReturnData instance - """ - if not isinstance(rdata, (ReturnDataPtr, ReturnData)): - raise TypeError(f'Unsupported pointer {type(rdata)}, must be' - f'amici.ExpDataPtr!') - self._field_dimensions = { - 'ts': [rdata.nt], - 'x': [rdata.nt, rdata.nx], - 'x0': [rdata.nx], - 'x_ss': [rdata.nx], - 'sx': [rdata.nt, rdata.nplist, rdata.nx], - 'sx0': [rdata.nplist, rdata.nx], - 'sx_ss': [rdata.nplist, rdata.nx], - - # observables - 'y': [rdata.nt, rdata.ny], - 'sigmay': [rdata.nt, rdata.ny], - 'sy': [rdata.nt, rdata.nplist, rdata.ny], - 'ssigmay': [rdata.nt, rdata.nplist, rdata.ny], - - # event observables - 'z': [rdata.nmaxevent, rdata.nz], - 'rz': [rdata.nmaxevent, rdata.nz], - 'sigmaz': [rdata.nmaxevent, rdata.nz], - 'sz': [rdata.nmaxevent, rdata.nplist, rdata.nz], - 'srz': [rdata.nmaxevent, rdata.nplist, rdata.nz], - 'ssigmaz': [rdata.nmaxevent, rdata.nplist, rdata.nz], - - # objective function - 'sllh': [rdata.nplist], - 's2llh': [rdata.np, rdata.nplist], - - 'res': [rdata.nt * rdata.nytrue * - (2 if rdata.sigma_res else 1)], - 'sres': [rdata.nt * rdata.nytrue * - (2 if rdata.sigma_res else 1), rdata.nplist], - 'FIM': [rdata.nplist, rdata.nplist], - - # diagnosis - 'J': [rdata.nx_solver, rdata.nx_solver], - 'w': [rdata.nt, rdata.nw], - 'xdot': [rdata.nx_solver], - 'preeq_numlinsteps': [rdata.newton_maxsteps, 2], - 'preeq_numsteps': [1, 3], - 'preeq_status': [1, 3], - 'posteq_numlinsteps': [rdata.newton_maxsteps, 2], - 'posteq_numsteps': [1, 3], - 'posteq_status': [1, 3], - 'numsteps': [rdata.nt], - 'numrhsevals': [rdata.nt], - 'numerrtestfails': [rdata.nt], - 'numnonlinsolvconvfails': [rdata.nt], - 'order': [rdata.nt], - 'numstepsB': [rdata.nt], - 'numrhsevalsB': [rdata.nt], - 'numerrtestfailsB': [rdata.nt], - 'numnonlinsolvconvfailsB': [rdata.nt], - } - super(ReturnDataView, self).__init__(rdata) - - def __getitem__(self, item: str) -> Union[np.ndarray, ReturnDataPtr, - ReturnData, float]: - """ - Custom getitem implementation shim to map `t` to `ts` - - :param item: field/attribute key - - :returns: self[item] - """ - if item == 't': - item = 'ts' - return super(ReturnDataView, self).__getitem__(item) - - -class ExpDataView(SwigPtrView): - """ - Interface class for C++ Exp Data objects that avoids possibly costly - copies of member data. - """ - - _field_names = [ - 'observedData', 'observedDataStdDev', 'observedEvents', - 'observedEventsStdDev', 'fixedParameters', - 'fixedParametersPreequilibration', - 'fixedParametersPresimulation' - ] - - def __init__(self, edata: Union[ExpDataPtr, ExpData]): - """ - Constructor - - :param edata: pointer to the ExpData instance - """ - if not isinstance(edata, (ExpDataPtr, ExpData)): - raise TypeError(f'Unsupported pointer {type(edata)}, must be' - f'amici.ExpDataPtr!') - self._field_dimensions = { # observables - 'observedData': [edata.nt(), edata.nytrue()], - 'observedDataStdDev': [edata.nt(), edata.nytrue()], - - # event observables - 'observedEvents': [edata.nmaxevent(), edata.nztrue()], - 'observedEventsStdDev': [edata.nmaxevent(), edata.nztrue()], - - # fixed parameters - 'fixedParameters': [len(edata.fixedParameters)], - 'fixedParametersPreequilibration': [ - len(edata.fixedParametersPreequilibration)], - 'fixedParametersPresimulation': [ - len(edata.fixedParametersPreequilibration)], - } - edata.observedData = edata.getObservedData() - edata.observedDataStdDev = edata.getObservedDataStdDev() - edata.observedEvents = edata.getObservedEvents() - edata.observedEventsStdDev = edata.getObservedEventsStdDev() - super(ExpDataView, self).__init__(edata) - - -def field_as_numpy(field_dimensions: Dict[str, List[int]], - field: str, data: SwigPtrView) -> Union[np.ndarray, - float, - None]: - """ - Convert data object field to numpy array with dimensions according to - specified field dimensions - - :param field_dimensions: dimension specifications - dict({field: list([dim1, dim2, ...])}) - :param data: object with fields - :param field: Name of field - - :returns: Field Data as numpy array with dimensions according to - specified field dimensions - """ - attr = getattr(data, field) - if field in field_dimensions: - if len(attr) == 0: - return None - else: - return np.array(attr).reshape(field_dimensions[field]) - else: - return float(attr) diff --git a/python/amici/ode_export.py b/python/amici/ode_export.py deleted file mode 100644 index ee537efa69..0000000000 --- a/python/amici/ode_export.py +++ /dev/null @@ -1,3590 +0,0 @@ -""" -C++ Export ----------- -This module provides all necessary functionality specify an ODE model and -generate executable C++ simulation code. The user generally won't have to -directly call any function from this module as this will be done by -:py:func:`amici.pysb_import.pysb2amici`, -:py:func:`amici.sbml_import.SbmlImporter.sbml2amici` and -:py:func:`amici.petab_import.import_model`. -""" -import contextlib -import copy -import itertools -import logging -import os -import re -import shutil -import subprocess -import sys -from dataclasses import dataclass -from itertools import chain, starmap -from pathlib import Path -from string import Template -from typing import (Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, - Union) - -import numpy as np -import sympy as sp -from sympy.matrices.dense import MutableDenseMatrix -from sympy.matrices.immutable import ImmutableDenseMatrix - -from . import (__commit__, __version__, amiciModulePath, amiciSrcPath, - amiciSwigPath, sbml_import) -from .constants import SymbolId -from .cxxcodeprinter import AmiciCxxCodePrinter, get_switch_statement -from .import_utils import (ObservableTransformation, generate_flux_symbol, - smart_subs_dict, strip_pysb, - symbol_with_assumptions, toposort_symbols) -from .logging import get_logger, log_execution_time, set_log_level -from .ode_model import * - - -# Template for model simulation main.cpp file -CXX_MAIN_TEMPLATE_FILE = os.path.join(amiciSrcPath, 'main.template.cpp') -# Template for model/swig/CMakeLists.txt -SWIG_CMAKE_TEMPLATE_FILE = os.path.join(amiciSwigPath, - 'CMakeLists_model.cmake') -# Template for model/CMakeLists.txt -MODEL_CMAKE_TEMPLATE_FILE = os.path.join(amiciSrcPath, - 'CMakeLists.template.cmake') - -IDENTIFIER_PATTERN = re.compile(r'^[a-zA-Z_]\w*$') -DERIVATIVE_PATTERN = re.compile(r'^d(x_rdata|xdot|\w+?)d(\w+?)(?:_explicit)?$') -@dataclass -class _FunctionInfo: - """Information on a model-specific generated C++ function - - :ivar arguments: argument list of the function. input variables should be - ``const``. - :ivar return_type: the return type of the function - :ivar assume_pow_positivity: - identifies the functions on which ``assume_pow_positivity`` will have - an effect when specified during model generation. generally these are - functions that are used for solving the ODE, where negative values may - negatively affect convergence of the integration algorithm - :ivar sparse: - specifies whether the result of this function will be stored in sparse - format. sparse format means that the function will only return an - array of nonzero values and not a full matrix. - :ivar generate_body: - indicates whether a model-specific implementation is to be generated - :ivar body: - the actual function body. will be filled later - """ - arguments: str = '' - return_type: str = 'void' - assume_pow_positivity: bool = False - sparse: bool = False - generate_body: bool = True - body: str = '' - - -# Information on a model-specific generated C++ function -# prototype for generated C++ functions, keys are the names of functions -functions = { - 'Jy': - _FunctionInfo( - 'realtype *Jy, const int iy, const realtype *p, ' - 'const realtype *k, const realtype *y, const realtype *sigmay, ' - 'const realtype *my' - ), - 'dJydsigma': - _FunctionInfo( - 'realtype *dJydsigma, const int iy, const realtype *p, ' - 'const realtype *k, const realtype *y, const realtype *sigmay, ' - 'const realtype *my' - ), - 'dJydy': - _FunctionInfo( - 'realtype *dJydy, const int iy, const realtype *p, ' - 'const realtype *k, const realtype *y, ' - 'const realtype *sigmay, const realtype *my', - sparse=True - ), - 'Jz': - _FunctionInfo( - 'realtype *Jz, const int iz, const realtype *p, const realtype *k, ' - 'const realtype *z, const realtype *sigmaz, const realtype *mz' - ), - 'dJzdsigma': - _FunctionInfo( - 'realtype *dJzdsigma, const int iz, const realtype *p, ' - 'const realtype *k, const realtype *z, const realtype *sigmaz, ' - 'const realtype *mz' - ), - 'dJzdz': - _FunctionInfo( - 'realtype *dJzdz, const int iz, const realtype *p, ' - 'const realtype *k, const realtype *z, const realtype *sigmaz, ' - 'const double *mz', - ), - 'Jrz': - _FunctionInfo( - 'realtype *Jrz, const int iz, const realtype *p, ' - 'const realtype *k, const realtype *rz, const realtype *sigmaz' - ), - 'dJrzdsigma': - _FunctionInfo( - 'realtype *dJrzdsigma, const int iz, const realtype *p, ' - 'const realtype *k, const realtype *rz, const realtype *sigmaz' - ), - 'dJrzdz': - _FunctionInfo( - 'realtype *dJrzdz, const int iz, const realtype *p, ' - 'const realtype *k, const realtype *rz, const realtype *sigmaz', - ), - 'root': - _FunctionInfo( - 'realtype *root, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const realtype *tcl' - ), - 'dwdp': - _FunctionInfo( - 'realtype *dwdp, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const realtype *w, const realtype *tcl, const realtype *dtcldp', - assume_pow_positivity=True, sparse=True - ), - 'dwdx': - _FunctionInfo( - 'realtype *dwdx, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const realtype *w, const realtype *tcl', - assume_pow_positivity=True, sparse=True - ), - 'dwdw': - _FunctionInfo( - 'realtype *dwdw, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const realtype *w, const realtype *tcl', - assume_pow_positivity=True, sparse=True - ), - 'dxdotdw': - _FunctionInfo( - 'realtype *dxdotdw, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const realtype *w', - assume_pow_positivity=True, sparse=True - ), - 'dxdotdx_explicit': - _FunctionInfo( - 'realtype *dxdotdx_explicit, const realtype t, ' - 'const realtype *x, const realtype *p, const realtype *k, ' - 'const realtype *h, const realtype *w', - assume_pow_positivity=True, sparse=True - ), - 'dxdotdp_explicit': - _FunctionInfo( - 'realtype *dxdotdp_explicit, const realtype t, ' - 'const realtype *x, const realtype *p, const realtype *k, ' - 'const realtype *h, const realtype *w', - assume_pow_positivity=True, sparse=True - ), - 'dydx': - _FunctionInfo( - 'realtype *dydx, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const realtype *w, const realtype *dwdx', - ), - 'dydp': - _FunctionInfo( - 'realtype *dydp, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const int ip, const realtype *w, const realtype *tcl, ' - 'const realtype *dtcldp', - ), - 'dzdx': - _FunctionInfo( - 'realtype *dzdx, const int ie, const realtype t, ' - 'const realtype *x, const realtype *p, const realtype *k, ' - 'const realtype *h', - ), - 'dzdp': - _FunctionInfo( - 'realtype *dzdp, const int ie, const realtype t, ' - 'const realtype *x, const realtype *p, const realtype *k, ' - 'const realtype *h, const int ip', - ), - 'drzdx': - _FunctionInfo( - 'realtype *drzdx, const int ie, const realtype t, ' - 'const realtype *x, const realtype *p, const realtype *k, ' - 'const realtype *h', - ), - 'drzdp': - _FunctionInfo( - 'realtype *drzdp, const int ie, const realtype t, ' - 'const realtype *x, const realtype *p, const realtype *k, ' - 'const realtype *h, const int ip', - ), - 'dsigmaydy': - _FunctionInfo( - 'realtype *dsigmaydy, const realtype t, const realtype *p, ' - 'const realtype *k, const realtype *y' - ), - 'dsigmaydp': - _FunctionInfo( - 'realtype *dsigmaydp, const realtype t, const realtype *p, ' - 'const realtype *k, const realtype *y, const int ip', - ), - 'sigmay': - _FunctionInfo( - 'realtype *sigmay, const realtype t, const realtype *p, ' - 'const realtype *k, const realtype *y', - ), - 'dsigmazdp': - _FunctionInfo( - 'realtype *dsigmazdp, const realtype t, const realtype *p,' - ' const realtype *k, const int ip', - ), - 'sigmaz': - _FunctionInfo( - 'realtype *sigmaz, const realtype t, const realtype *p, ' - 'const realtype *k', - ), - 'sroot': - _FunctionInfo( - 'realtype *stau, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const realtype *sx, const int ip, const int ie, ' - 'const realtype *tcl', - generate_body=False - ), - 'drootdt': - _FunctionInfo(generate_body=False), - 'drootdt_total': - _FunctionInfo(generate_body=False), - 'drootdp': - _FunctionInfo(generate_body=False), - 'drootdx': - _FunctionInfo(generate_body=False), - 'stau': - _FunctionInfo( - 'realtype *stau, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const realtype *tcl, const realtype *sx, const int ip, ' - 'const int ie' - ), - 'deltax': - _FunctionInfo( - 'double *deltax, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const int ie, const realtype *xdot, const realtype *xdot_old' - ), - 'ddeltaxdx': - _FunctionInfo(generate_body=False), - 'ddeltaxdt': - _FunctionInfo(generate_body=False), - 'ddeltaxdp': - _FunctionInfo(generate_body=False), - 'deltasx': - _FunctionInfo( - 'realtype *deltasx, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const realtype *w, const int ip, const int ie, ' - 'const realtype *xdot, const realtype *xdot_old, ' - 'const realtype *sx, const realtype *stau, const realtype *tcl' - ), - 'w': - _FunctionInfo( - 'realtype *w, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, ' - 'const realtype *h, const realtype *tcl', - assume_pow_positivity=True - ), - 'x0': - _FunctionInfo( - 'realtype *x0, const realtype t, const realtype *p, ' - 'const realtype *k' - ), - 'x0_fixedParameters': - _FunctionInfo( - 'realtype *x0_fixedParameters, const realtype t, ' - 'const realtype *p, const realtype *k, ' - 'gsl::span reinitialization_state_idxs', - ), - 'sx0': - _FunctionInfo( - 'realtype *sx0, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const int ip', - ), - 'sx0_fixedParameters': - _FunctionInfo( - 'realtype *sx0_fixedParameters, const realtype t, ' - 'const realtype *x0, const realtype *p, const realtype *k, ' - 'const int ip, gsl::span reinitialization_state_idxs', - ), - 'xdot': - _FunctionInfo( - 'realtype *xdot, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h, ' - 'const realtype *w', - assume_pow_positivity=True - ), - 'xdot_old': - _FunctionInfo(generate_body=False), - 'y': - _FunctionInfo( - 'realtype *y, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, ' - 'const realtype *h, const realtype *w', - ), - 'x_rdata': - _FunctionInfo( - 'realtype *x_rdata, const realtype *x, const realtype *tcl, ' - 'const realtype *p, const realtype *k' - ), - 'total_cl': - _FunctionInfo( - 'realtype *total_cl, const realtype *x_rdata, ' - 'const realtype *p, const realtype *k' - ), - 'dtotal_cldp': - _FunctionInfo( - 'realtype *dtotal_cldp, const realtype *x_rdata, ' - 'const realtype *p, const realtype *k, const int ip' - ), - 'dtotal_cldx_rdata': - _FunctionInfo( - 'realtype *dtotal_cldx_rdata, const realtype *x_rdata, ' - 'const realtype *p, const realtype *k, const realtype *tcl', - sparse=True - ), - 'x_solver': - _FunctionInfo('realtype *x_solver, const realtype *x_rdata'), - 'dx_rdatadx_solver': - _FunctionInfo( - 'realtype *dx_rdatadx_solver, const realtype *x, ' - 'const realtype *tcl, const realtype *p, const realtype *k', - sparse=True - ), - 'dx_rdatadp': - _FunctionInfo( - 'realtype *dx_rdatadp, const realtype *x, ' - 'const realtype *tcl, const realtype *p, const realtype *k, ' - 'const int ip' - ), - 'dx_rdatadtcl': - _FunctionInfo( - 'realtype *dx_rdatadtcl, const realtype *x, ' - 'const realtype *tcl, const realtype *p, const realtype *k', - sparse=True - ), - 'z': - _FunctionInfo( - 'realtype *z, const int ie, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h' - ), - 'rz': - _FunctionInfo( - 'realtype *rz, const int ie, const realtype t, const realtype *x, ' - 'const realtype *p, const realtype *k, const realtype *h' - ), -} - -# list of sparse functions -sparse_functions = [ - func_name for func_name, func_info in functions.items() - if func_info.sparse -] -# list of nobody functions -nobody_functions = [ - func_name for func_name, func_info in functions.items() - if not func_info.generate_body -] -# list of sensitivity functions -sensi_functions = [ - func_name for func_name, func_info in functions.items() - if 'const int ip' in func_info.arguments -] -# list of sensitivity functions -sparse_sensi_functions = [ - func_name for func_name, func_info in functions.items() - if 'const int ip' not in func_info.arguments - and func_name.endswith('dp') or func_name.endswith('dp_explicit') -] -# list of event functions -event_functions = [ - func_name for func_name, func_info in functions.items() - if 'const int ie' in func_info.arguments and - 'const int ip' not in func_info.arguments -] -event_sensi_functions = [ - func_name for func_name, func_info in functions.items() - if 'const int ie' in func_info.arguments and - 'const int ip' in func_info.arguments -] -# list of multiobs functions -multiobs_functions = [ - func_name for func_name, func_info in functions.items() - if 'const int iy' in func_info.arguments - or 'const int iz' in func_info.arguments -] -# list of equations that have ids which may not be unique -non_unique_id_symbols = [ - 'x_rdata', 'y' -] - -# custom c++ function replacements -CUSTOM_FUNCTIONS = [ - {'sympy': 'polygamma', - 'c++': 'boost::math::polygamma', - 'include': '#include ', - 'build_hint': 'Using polygamma requires libboost-math header files.' - }, - {'sympy': 'Heaviside', - 'c++': 'amici::heaviside'}, - {'sympy': 'DiracDelta', - 'c++': 'amici::dirac'} -] - -# python log manager -logger = get_logger(__name__, logging.ERROR) - - -def var_in_function_signature(name: str, varname: str) -> bool: - """ - Checks if the values for a symbolic variable is passed in the signature - of a function - - :param name: - name of the function - :param varname: - name of the symbolic variable - - :return: - boolean indicating whether the variable occurs in the function - signature - """ - return name in functions \ - and re.search( - rf'const (realtype|double) \*{varname}[0]*(,|$)+', - functions[name].arguments - ) - - -# defines the type of some attributes in ODEModel -symbol_to_type = { - SymbolId.SPECIES: State, - SymbolId.PARAMETER: Parameter, - SymbolId.FIXED_PARAMETER: Constant, - SymbolId.OBSERVABLE: Observable, - SymbolId.EVENT_OBSERVABLE: EventObservable, - SymbolId.SIGMAY: SigmaY, - SymbolId.SIGMAZ: SigmaZ, - SymbolId.LLHY: LogLikelihoodY, - SymbolId.LLHZ: LogLikelihoodZ, - SymbolId.LLHRZ: LogLikelihoodRZ, - SymbolId.EXPRESSION: Expression, - SymbolId.EVENT: Event -} - - -@log_execution_time('running smart_jacobian', logger) -def smart_jacobian( - eq: sp.MutableDenseMatrix, - sym_var: sp.MutableDenseMatrix -) -> sp.MutableSparseMatrix: - """ - Wrapper around symbolic jacobian with some additional checks that reduce - computation time for large matrices - - :param eq: - equation - :param sym_var: - differentiation variable - :return: - jacobian of eq wrt sym_var - """ - nrow = eq.shape[0] - ncol = sym_var.shape[0] - if ( - not min(eq.shape) - or not min(sym_var.shape) - or smart_is_zero_matrix(eq) - or smart_is_zero_matrix(sym_var) - ): - return sp.MutableSparseMatrix(nrow, ncol, dict()) - - # preprocess sparsity pattern - elements = ( - (i, j, a, b) - for i, a in enumerate(eq) - for j, b in enumerate(sym_var) - if a.has(b) - ) - - if (n_procs := int(os.environ.get("AMICI_IMPORT_NPROCS", 1))) == 1: - # serial - return sp.MutableSparseMatrix(nrow, ncol, - dict(starmap(_jacobian_element, elements)) - ) - - # parallel - from multiprocessing import get_context - # "spawn" should avoid potential deadlocks occurring with fork - # see e.g. https://stackoverflow.com/a/66113051 - ctx = get_context('spawn') - with ctx.Pool(n_procs) as p: - mapped = p.starmap(_jacobian_element, elements) - return sp.MutableSparseMatrix(nrow, ncol, dict(mapped)) - - -@log_execution_time('running smart_multiply', logger) -def smart_multiply( - x: Union[sp.MutableDenseMatrix, sp.MutableSparseMatrix], - y: sp.MutableDenseMatrix -) -> Union[sp.MutableDenseMatrix, sp.MutableSparseMatrix]: - """ - Wrapper around symbolic multiplication with some additional checks that - reduce computation time for large matrices - - :param x: - educt 1 - :param y: - educt 2 - :return: - product - """ - if not x.shape[0] or not y.shape[1] or smart_is_zero_matrix(x) or \ - smart_is_zero_matrix(y): - return sp.zeros(x.shape[0], y.shape[1]) - return x.multiply(y) - - -def smart_is_zero_matrix(x: Union[sp.MutableDenseMatrix, - sp.MutableSparseMatrix]) -> bool: - """A faster implementation of sympy's is_zero_matrix - - Avoids repeated indexer type checks and double iteration to distinguish - False/None. Found to be about 100x faster for large matrices. - - :param x: Matrix to check - """ - - if isinstance(x, sp.MutableDenseMatrix): - return all(xx.is_zero is True for xx in x.flat()) - - if isinstance(x, list): - return all(smart_is_zero_matrix(xx) for xx in x) - - return x.nnz() == 0 - - -def _default_simplify(x): - """Default simplification applied in ODEModel""" - # We need this as a free function instead of a lambda to have it picklable - # for parallel simplification - return sp.powsimp(x, deep=True) - - -class ODEModel: - """ - Defines an Ordinary Differential Equation as set of ModelQuantities. - This class provides general purpose interfaces to compute arbitrary - symbolic derivatives that are necessary for model simulation or - sensitivity computation. - - :ivar _states: - list of state variables - - :ivar _observables: - list of observables - - :ivar _event_observables: - list of event observables - - :ivar _sigmays: - list of sigmas for observables - - :ivar _sigmazs: - list of sigmas for event observables - - :ivar _parameters: - list of parameters - - :ivar _loglikelihoodys: - list of loglikelihoods for observables - - :ivar _loglikelihoodzs: - list of loglikelihoods for event observables - - :ivar _loglikelihoodrzs: - list of loglikelihoods for event observable regularizations - - :ivar _expressions: - list of expressions instances - - :ivar _conservationlaws: - list of conservation laws - - :ivar _symboldim_funs: - define functions that compute model dimensions, these - are functions as the underlying symbolic expressions have not been - populated at compile time - - :ivar _eqs: - carries symbolic formulas of the symbolic variables of the model - - :ivar _sparseeqs: - carries linear list of all symbolic formulas for sparsified - variables - - :ivar _vals: - carries numeric values of symbolic identifiers of the symbolic - variables of the model - - :ivar _names: - carries names of symbolic identifiers of the symbolic variables - of the model - - :ivar _syms: - carries symbolic identifiers of the symbolic variables of the - model - - :ivar _sparsesyms: - carries linear list of all symbolic identifiers for sparsified - variables - - :ivar _colptrs: - carries column pointers for sparsified variables. See - SUNMatrixContent_Sparse definition in ``sunmatrix/sunmatrix_sparse.h`` - - :ivar _rowvals: - carries row values for sparsified variables. See - SUNMatrixContent_Sparse definition in ``sunmatrix/sunmatrix_sparse.h`` - - :ivar _equation_prototype: - defines the attribute from which an equation should be generated via - list comprehension (see :meth:`ODEModel._generate_equation`) - - :ivar _variable_prototype: - defines the attribute from which a variable should be generated via - list comprehension (see :meth:`ODEModel._generate_symbol`) - - :ivar _value_prototype: - defines the attribute from which a value should be generated via - list comprehension (see :meth:`ODEModel._generate_value`) - - :ivar _total_derivative_prototypes: - defines how a total derivative equation is computed for an equation, - key defines the name and values should be arguments for - ODEModel.totalDerivative() - - :ivar _lock_total_derivative: - add chainvariables to this set when computing total derivative from - a partial derivative call to enforce a partial derivative in the - next recursion. prevents infinite recursion - - :ivar _simplify: - If not None, this function will be used to simplify symbolic - derivative expressions. Receives sympy expressions as only argument. - To apply multiple simplifications, wrap them in a lambda expression. - - :ivar _x0_fixedParameters_idx: - Index list of subset of states for which x0_fixedParameters was - computed - - :ivar _w_recursion_depth: - recursion depth in w, quantified as nilpotency of dwdw - - :ivar _has_quadratic_nllh: - whether all observables have a gaussian noise model, i.e. whether - res and FIM make sense. - - :ivar _code_printer: - Code printer to generate C++ code - - :ivar _z2event: - list of event indices for each event observable - """ - - def __init__(self, verbose: Optional[Union[bool, int]] = False, - simplify: Optional[Callable] = _default_simplify, - cache_simplify: bool = False): - """ - Create a new ODEModel instance. - - :param verbose: - verbosity level for logging, True/False default to - ``logging.DEBUG``/``logging.ERROR`` - - :param simplify: - see :meth:`ODEModel._simplify` - - :param cache_simplify: - Whether to cache calls to the simplify method. Can e.g. decrease - import times for models with events. - """ - self._states: List[State] = [] - self._observables: List[Observable] = [] - self._eventobservables: List[EventObservable] = [] - self._sigmays: List[SigmaY] = [] - self._sigmazs: List[SigmaZ] = [] - self._parameters: List[Parameter] = [] - self._constants: List[Constant] = [] - self._loglikelihoodys: List[LogLikelihoodY] = [] - self._loglikelihoodzs: List[LogLikelihoodZ] = [] - self._loglikelihoodrzs: List[LogLikelihoodRZ] = [] - self._expressions: List[Expression] = [] - self._conservationlaws: List[ConservationLaw] = [] - self._events: List[Event] = [] - self._symboldim_funs: Dict[str, Callable[[], int]] = { - 'sx': self.num_states_solver, - 'v': self.num_states_solver, - 'vB': self.num_states_solver, - 'xB': self.num_states_solver, - 'sigmay': self.num_obs, - 'sigmaz': self.num_eventobs, - } - self._eqs: Dict[str, Union[sp.Matrix, sp.SparseMatrix, - List[Union[sp.Matrix, sp.SparseMatrix]]]] = dict() - self._sparseeqs: Dict[str, Union[sp.Matrix, List[sp.Matrix]]] = dict() - self._vals: Dict[str, List[float]] = dict() - self._names: Dict[str, List[str]] = dict() - self._syms: Dict[str, Union[sp.Matrix, List[sp.Matrix]]] = dict() - self._sparsesyms: Dict[str, Union[List[str], List[List[str]]]] = dict() - self._colptrs: Dict[str, Union[List[int], List[List[int]]]] = dict() - self._rowvals: Dict[str, Union[List[int], List[List[int]]]] = dict() - - self._equation_prototype: Dict[str, str] = { - 'total_cl': '_conservationlaws', - 'x0': '_states', - 'y': '_observables', - 'Jy': '_loglikelihoodys', - 'Jz': '_loglikelihoodzs', - 'Jrz': '_loglikelihoodrzs', - 'w': '_expressions', - 'root': '_events', - 'sigmay': '_sigmays', - 'sigmaz': '_sigmazs' - } - self._variable_prototype: Dict[str, str] = { - 'tcl': '_conservationlaws', - 'x_rdata': '_states', - 'y': '_observables', - 'z': '_eventobservables', - 'p': '_parameters', - 'k': '_constants', - 'w': '_expressions', - 'sigmay': '_sigmays', - 'sigmaz': '_sigmazs', - 'h': '_events' - } - self._value_prototype: Dict[str, str] = { - 'p': '_parameters', - 'k': '_constants', - } - self._total_derivative_prototypes: \ - Dict[str, Dict[str, Union[str, List[str]]]] = { - 'sroot': { - 'eq': 'root', - 'chainvars': ['x'], - 'var': 'p', - 'dxdz_name': 'sx', - }, - } - - self._lock_total_derivative: List[str] = list() - self._simplify: Callable = simplify - if cache_simplify and simplify is not None: - def cached_simplify( - expr: sp.Expr, - _simplified: Dict[str, sp.Expr] = {}, - _simplify: Callable = simplify, - ) -> sp.Expr: - """Speed up expression simplification with caching. - - NB: This can decrease model import times for models that have - many repeated expressions during C++ file generation. - For example, this can be useful for models with events. - However, for other models, this may increase model import - times. - - :param expr: - The SymPy expression. - :param _simplified: - The cache. - :param _simplify: - The simplification method. - - :return: - The simplified expression. - """ - expr_str = repr(expr) - if expr_str not in _simplified: - _simplified[expr_str] = _simplify(expr) - return _simplified[expr_str] - self._simplify = cached_simplify - self._x0_fixedParameters_idx: Union[None, Sequence[int]] - self._w_recursion_depth: int = 0 - self._has_quadratic_nllh: bool = True - set_log_level(logger, verbose) - - self._code_printer = AmiciCxxCodePrinter() - for fun in CUSTOM_FUNCTIONS: - self._code_printer.known_functions[fun['sympy']] = fun['c++'] - - @log_execution_time('importing SbmlImporter', logger) - def import_from_sbml_importer( - self, - si: 'sbml_import.SbmlImporter', - compute_cls: Optional[bool] = True - ) -> None: - """ - Imports a model specification from a - :class:`amici.sbml_import.SbmlImporter` instance. - - :param si: - imported SBML model - :param compute_cls: - whether to compute conservation laws - """ - - # get symbolic expression from SBML importers - symbols = copy.copy(si.symbols) - - # assemble fluxes and add them as expressions to the model - assert len(si.flux_ids) == len(si.flux_vector) - fluxes = [generate_flux_symbol(ir, name=flux_id) - for ir, flux_id in enumerate(si.flux_ids)] - - # correct time derivatives for compartment changes - def transform_dxdt_to_concentration(species_id, dxdt): - """ - Produces the appropriate expression for the first derivative of a - species with respect to time, for species that reside in - compartments with a constant volume, or a volume that is defined by - an assignment or rate rule. - - :param species_id: - The identifier of the species (generated in "sbml_import.py"). - - :param dxdt: - The element-wise product of the row in the stoichiometric - matrix that corresponds to the species (row x_index) and the - flux (kinetic laws) vector. Ignored in the case of rate rules. - """ - # The derivation of the below return expressions can be found in - # the documentation. They are found by rearranging - # $\frac{d}{dt} (vx) = Sw$ for $\frac{dx}{dt}$, where $v$ is the - # vector of species compartment volumes, $x$ is the vector of - # species concentrations, $S$ is the stoichiometric matrix, and $w$ - # is the flux vector. The conditional below handles the cases of - # species in (i) compartments with a rate rule, (ii) compartments - # with an assignment rule, and (iii) compartments with a constant - # volume, respectively. - species = si.symbols[SymbolId.SPECIES][species_id] - - comp = species['compartment'] - if comp in si.symbols[SymbolId.SPECIES]: - dv_dt = si.symbols[SymbolId.SPECIES][comp]['dt'] - xdot = (dxdt - dv_dt * species_id) / comp - return xdot - elif comp in si.compartment_assignment_rules: - v = si.compartment_assignment_rules[comp] - - # we need to flatten out assignments in the compartment in - # order to ensure that we catch all species dependencies - v = smart_subs_dict(v, si.symbols[SymbolId.EXPRESSION], - 'value') - dv_dt = v.diff(si.amici_time_symbol) - # we may end up with a time derivative of the compartment - # volume due to parameter rate rules - comp_rate_vars = [p for p in v.free_symbols - if p in si.symbols[SymbolId.SPECIES]] - for var in comp_rate_vars: - dv_dt += \ - v.diff(var) * si.symbols[SymbolId.SPECIES][var]['dt'] - dv_dx = v.diff(species_id) - xdot = (dxdt - dv_dt * species_id) / (dv_dx * species_id + v) - return xdot - else: - v = si.compartments[comp] - - if v == 1.0: - return dxdt - - return dxdt / v - - # create dynamics without respecting conservation laws first - dxdt = smart_multiply(si.stoichiometric_matrix, - MutableDenseMatrix(fluxes)) - for ix, ((species_id, species), formula) in enumerate(zip( - symbols[SymbolId.SPECIES].items(), - dxdt - )): - assert ix == species['index'] # check that no reordering occurred - # rate rules and amount species don't need to be updated - if 'dt' in species: - continue - if species['amount']: - species['dt'] = formula - else: - species['dt'] = transform_dxdt_to_concentration(species_id, - formula) - - # create all basic components of the ODE model and add them. - for symbol_name in symbols: - # transform dict of lists into a list of dicts - args = ['name', 'identifier'] - - if symbol_name == SymbolId.SPECIES: - args += ['dt', 'init'] - else: - args += ['value'] - - if symbol_name == SymbolId.EVENT: - args += ['state_update', 'initial_value'] - elif symbol_name == SymbolId.OBSERVABLE: - args += ['transformation'] - elif symbol_name == SymbolId.EVENT_OBSERVABLE: - args += ['event'] - - protos = [ - { - 'identifier': var_id, - **{k: v for k, v in var.items() if k in args} - } - for var_id, var in symbols[symbol_name].items() - ] - - for proto in protos: - self.add_component(symbol_to_type[symbol_name](**proto)) - - # add fluxes as expressions, this needs to happen after base - # expressions from symbols have been parsed - for flux_id, flux in zip(fluxes, si.flux_vector): - self.add_component(Expression( - identifier=flux_id, - name=str(flux_id), - value=flux - )) - - # process conservation laws - if compute_cls: - si.process_conservation_laws(self) - - # fill in 'self._sym' based on prototypes and components in ode_model - self.generate_basic_variables() - self._has_quadratic_nllh = all( - llh['dist'] in ['normal', 'lin-normal', 'log-normal', - 'log10-normal'] - for llh in si.symbols[SymbolId.LLHY].values() - ) - - def add_component(self, component: ModelQuantity, - insert_first: Optional[bool] = False) -> None: - """ - Adds a new ModelQuantity to the model. - - :param component: - model quantity to be added - - :param insert_first: - whether to add quantity first or last, relevant when components - may refer to other components of the same type. - """ - if type(component) not in { - Observable, Expression, Parameter, Constant, State, - LogLikelihoodY, LogLikelihoodZ, LogLikelihoodRZ, - SigmaY, SigmaZ, ConservationLaw, Event, EventObservable - }: - raise ValueError(f'Invalid component type {type(component)}') - - component_list = getattr( - self, f'_{type(component).__name__.lower()}s' - ) - if insert_first: - component_list.insert(0, component) - else: - component_list.append(component) - - def add_conservation_law(self, - state: sp.Symbol, - total_abundance: sp.Symbol, - coefficients: Dict[sp.Symbol, sp.Expr]) -> None: - r""" - Adds a new conservation law to the model. A conservation law is defined - by the conserved quantity :math:`T = \sum_i(a_i * x_i)`, where - :math:`a_i` are coefficients and :math:`x_i` are different state - variables. - - :param state: - symbolic identifier of the state that should be replaced by - the conservation law (:math:`x_j`) - - :param total_abundance: - symbolic identifier of the total abundance (:math:`T/a_j`) - - :param coefficients: - Dictionary of coefficients {x_i: a_i} - """ - try: - ix = next(filter(lambda is_s: is_s[1].get_id() == state, - enumerate(self._states)))[0] - except StopIteration: - raise ValueError(f'Specified state {state} was not found in the ' - f'model states.') - - state_id = self._states[ix].get_id() - - # \sum_{i≠j}(a_i * x_i)/a_j - target_expression = sp.Add(*( - c_i*x_i for x_i, c_i in coefficients.items() if x_i != state - )) / coefficients[state] - - # x_j = T/a_j - \sum_{i≠j}(a_i * x_i)/a_j - state_expr = total_abundance - target_expression - - # T/a_j = \sum_{i≠j}(a_i * x_i)/a_j + x_j - abundance_expr = target_expression + state_id - - self.add_component( - Expression(state_id, str(state_id), state_expr), - insert_first=True - ) - - cl = ConservationLaw( - total_abundance, f'total_{state_id}', abundance_expr, - coefficients, state_id - ) - - self.add_component(cl) - self._states[ix].set_conservation_law(cl) - - def get_observable_transformations(self) -> List[ObservableTransformation]: - """ - List of observable transformations - - :return: - list of transformations - """ - return [obs.trafo for obs in self._observables] - - def num_states_rdata(self) -> int: - """ - Number of states. - - :return: - number of state variable symbols - """ - return len(self.sym('x_rdata')) - - def num_states_solver(self) -> int: - """ - Number of states after applying conservation laws. - - :return: - number of state variable symbols - """ - return len(self.sym('x')) - - def num_cons_law(self) -> int: - """ - Number of conservation laws. - - :return: - number of conservation laws - """ - return self.num_states_rdata() - self.num_states_solver() - - def num_state_reinits(self) -> int: - """ - Number of solver states which would be reinitialized after - preequilibration - - :return: - number of state variable symbols with reinitialization - """ - reinit_states = self.eq('x0_fixedParameters') - solver_states = self.eq('x_solver') - return sum(ix in solver_states for ix in reinit_states) - - def num_obs(self) -> int: - """ - Number of Observables. - - :return: - number of observable symbols - """ - return len(self.sym('y')) - - def num_eventobs(self) -> int: - """ - Number of Event Observables. - - :return: - number of event observable symbols - """ - return len(self.sym('z')) - - def num_const(self) -> int: - """ - Number of Constants. - - :return: - number of constant symbols - """ - return len(self.sym('k')) - - def num_par(self) -> int: - """ - Number of Parameters. - - :return: - number of parameter symbols - """ - return len(self.sym('p')) - - def num_expr(self) -> int: - """ - Number of Expressions. - - :return: - number of expression symbols - """ - return len(self.sym('w')) - - def num_events(self) -> int: - """ - Number of Events. - - :return: - number of event symbols (length of the root vector in AMICI) - """ - return len(self.sym('h')) - - def sym(self, name: str) -> sp.Matrix: - """ - Returns (and constructs if necessary) the identifiers for a symbolic - entity. - - :param name: - name of the symbolic variable - - :return: - matrix of symbolic identifiers - """ - if name not in self._syms: - self._generate_symbol(name) - - return self._syms[name] - - def sparsesym(self, name: str, force_generate: bool = True) -> List[str]: - """ - Returns (and constructs if necessary) the sparsified identifiers for - a sparsified symbolic variable. - - :param name: - name of the symbolic variable - - :param force_generate: - whether the symbols should be generated if not available - - :return: - linearized Matrix containing the symbolic identifiers - """ - if name not in sparse_functions: - raise ValueError(f'{name} is not marked as sparse') - if name not in self._sparsesyms and force_generate: - self._generate_sparse_symbol(name) - return self._sparsesyms.get(name, []) - - def eq(self, name: str) -> sp.Matrix: - """ - Returns (and constructs if necessary) the formulas for a symbolic - entity. - - :param name: - name of the symbolic variable - - :return: - matrix of symbolic formulas - """ - - if name not in self._eqs: - dec = log_execution_time(f'computing {name}', logger) - dec(self._compute_equation)(name) - return self._eqs[name] - - def sparseeq(self, name) -> sp.Matrix: - """ - Returns (and constructs if necessary) the sparsified formulas for a - sparsified symbolic variable. - - :param name: - name of the symbolic variable - - :return: - linearized matrix containing the symbolic formulas - """ - if name not in sparse_functions: - raise ValueError(f'{name} is not marked as sparse') - if name not in self._sparseeqs: - self._generate_sparse_symbol(name) - return self._sparseeqs[name] - - def colptrs(self, name: str) -> Union[List[sp.Number], - List[List[sp.Number]]]: - """ - Returns (and constructs if necessary) the column pointers for - a sparsified symbolic variable. - - :param name: - name of the symbolic variable - - :return: - list containing the column pointers - """ - if name not in sparse_functions: - raise ValueError(f'{name} is not marked as sparse') - if name not in self._sparseeqs: - self._generate_sparse_symbol(name) - return self._colptrs[name] - - def rowvals(self, name: str) -> Union[List[sp.Number], - List[List[sp.Number]]]: - """ - Returns (and constructs if necessary) the row values for a - sparsified symbolic variable. - - :param name: - name of the symbolic variable - - :return: - list containing the row values - """ - if name not in sparse_functions: - raise ValueError(f'{name} is not marked as sparse') - if name not in self._sparseeqs: - self._generate_sparse_symbol(name) - return self._rowvals[name] - - def val(self, name: str) -> List[float]: - """ - Returns (and constructs if necessary) the numeric values of a - symbolic entity - - :param name: - name of the symbolic variable - - :return: - list containing the numeric values - """ - if name not in self._vals: - self._generate_value(name) - return self._vals[name] - - def name(self, name: str) -> List[str]: - """ - Returns (and constructs if necessary) the names of a symbolic - variable - - :param name: - name of the symbolic variable - - :return: - list of names - """ - if name not in self._names: - self._generate_name(name) - return self._names[name] - - def free_symbols(self) -> Set[sp.Basic]: - """ - Returns list of free symbols that appear in ODE RHS and initial - conditions. - """ - return set(chain.from_iterable( - state.get_free_symbols() - for state in self._states - )) - - def _generate_symbol(self, name: str) -> None: - """ - Generates the symbolic identifiers for a symbolic variable - - :param name: - name of the symbolic variable - """ - if name in self._variable_prototype: - component = self._variable_prototype[name] - self._syms[name] = sp.Matrix([ - comp.get_id() - for comp in getattr(self, component) - ]) - if name == 'y': - self._syms['my'] = sp.Matrix([ - comp.get_measurement_symbol() - for comp in getattr(self, component) - ]) - if name == 'z': - self._syms['mz'] = sp.Matrix([ - comp.get_measurement_symbol() - for comp in getattr(self, component) - ]) - self._syms['rz'] = sp.Matrix([ - comp.get_regularization_symbol() - for comp in getattr(self, component) - ]) - return - elif name == 'x': - self._syms[name] = sp.Matrix([ - state.get_id() - for state in self._states - if not state.has_conservation_law() - ]) - return - elif name == 'sx0': - self._syms[name] = sp.Matrix([ - f's{state.get_id()}_0' - for state in self._states - if not state.has_conservation_law() - ]) - return - elif name == 'sx_rdata': - self._syms[name] = sp.Matrix([ - f'sx_rdata_{i}' - for i in range(len(self._states)) - ]) - return - elif name == 'dtcldp': - # check, whether the CL consists of only one state. Then, - # sensitivities drop out, otherwise generate symbols - self._syms[name] = sp.Matrix([ - [sp.Symbol(f's{strip_pysb(tcl.get_id())}__' - f'{strip_pysb(par.get_id())}', real=True) - for par in self._parameters] - if self.conservation_law_has_multispecies(tcl) - else [0] * self.num_par() - for tcl in self._conservationlaws - ]) - return - elif name == 'xdot_old': - length = len(self.eq('xdot')) - elif name in sparse_functions: - self._generate_sparse_symbol(name) - return - elif name in self._symboldim_funs: - length = self._symboldim_funs[name]() - elif name == 'stau': - length = self.eq(name)[0].shape[1] - elif name in sensi_functions: - length = self.eq(name).shape[0] - else: - length = len(self.eq(name)) - self._syms[name] = sp.Matrix([ - sp.Symbol(f'{name}{0 if name == "stau" else i}', real=True) - for i in range(length) - ]) - - def generate_basic_variables(self) -> None: - """ - Generates the symbolic identifiers for all variables in - ``ODEModel._variable_prototype`` - """ - # We need to process events and Heaviside functions in the ODE Model, - # before adding it to ODEExporter - self.parse_events() - - for var in self._variable_prototype: - if var not in self._syms: - self._generate_symbol(var) - - self._generate_symbol('x') - - def parse_events(self) -> None: - """ - This function checks the right-hand side for roots of Heaviside - functions or events, collects the roots, removes redundant roots, - and replaces the formulae of the found roots by identifiers of AMICI's - Heaviside function implementation in the right-hand side - """ - # Track all roots functions in the right-hand side - roots = copy.deepcopy(self._events) - for state in self._states: - state.set_dt(self._process_heavisides(state.get_dt(), roots)) - - for expr in self._expressions: - expr.set_val(self._process_heavisides(expr.get_val(), roots)) - - # remove all possible Heavisides from roots, which may arise from - # the substitution of `'w'` in `_collect_heaviside_roots` - for root in roots: - root.set_val(self._process_heavisides(root.get_val(), roots)) - - # Now add the found roots to the model components - for root in roots: - # skip roots of SBML events, as these have already been added - if root in self._events: - continue - # add roots of heaviside functions - self.add_component(root) - - def get_appearance_counts(self, idxs: List[int]) -> List[int]: - """ - Counts how often a state appears in the time derivative of - another state and expressions for a subset of states - - :param idxs: - list of state indices for which counts are to be computed - - :return: - list of counts for the states ordered according to the provided - indices - """ - free_symbols_dt = list(itertools.chain.from_iterable( - [ - str(symbol) - for symbol in state.get_dt().free_symbols - ] - for state in self._states - )) - - free_symbols_expr = list(itertools.chain.from_iterable( - [ - str(symbol) - for symbol in expr.get_val().free_symbols - ] - for expr in self._expressions - )) - - return [ - free_symbols_dt.count(str(self._states[idx].get_id())) - + - free_symbols_expr.count(str(self._states[idx].get_id())) - for idx in idxs - ] - - def _generate_sparse_symbol(self, name: str) -> None: - """ - Generates the sparse symbolic identifiers, symbolic identifiers, - sparse equations, column pointers and row values for a symbolic - variable - - :param name: - name of the symbolic variable - """ - matrix = self.eq(name) - - if match_deriv := DERIVATIVE_PATTERN.match(name): - eq = match_deriv[1] - var = match_deriv[2] - - rownames = self.sym(eq) - colnames = self.sym(var) - - if name == 'dJydy': - # One entry per y-slice - self._colptrs[name] = [] - self._rowvals[name] = [] - self._sparseeqs[name] = [] - self._sparsesyms[name] = [] - self._syms[name] = [] - - for iy in range(self.num_obs()): - symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, \ - sparse_matrix = self._code_printer.csc_matrix( - matrix[iy, :], rownames=rownames, colnames=colnames, - identifier=iy) - self._colptrs[name].append(symbol_col_ptrs) - self._rowvals[name].append(symbol_row_vals) - self._sparseeqs[name].append(sparse_list) - self._sparsesyms[name].append(symbol_list) - self._syms[name].append(sparse_matrix) - else: - symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, \ - sparse_matrix = self._code_printer.csc_matrix( - matrix, rownames=rownames, colnames=colnames, - pattern_only=name in nobody_functions - ) - - self._colptrs[name] = symbol_col_ptrs - self._rowvals[name] = symbol_row_vals - self._sparseeqs[name] = sparse_list - self._sparsesyms[name] = symbol_list - self._syms[name] = sparse_matrix - - def _compute_equation(self, name: str) -> None: - """ - Computes the symbolic formula for a symbolic variable - - :param name: - name of the symbolic variable - """ - # replacement ensures that we don't have to adapt name in abstract - # model and keep backwards compatibility with matlab - match_deriv = DERIVATIVE_PATTERN.match( - re.sub(r'dJ(y|z|rz)dsigma', r'dJ\1dsigma\1', name) - .replace('sigmarz', 'sigmaz') - .replace('dJrzdz', 'dJrzdrz') - ) - time_symbol = sp.Matrix([symbol_with_assumptions('t')]) - - if name in self._equation_prototype: - self._equation_from_component(name, self._equation_prototype[name]) - - elif name in self._total_derivative_prototypes: - args = self._total_derivative_prototypes[name] - args['name'] = name - self._lock_total_derivative += args['chainvars'] - self._total_derivative(**args) - for cv in args['chainvars']: - self._lock_total_derivative.remove(cv) - - elif name == 'xdot': - self._eqs[name] = sp.Matrix([ - state.get_dt() for state in self._states - if not state.has_conservation_law() - ]) - - elif name == 'x_rdata': - self._eqs[name] = sp.Matrix([ - state.get_x_rdata() - for state in self._states - ]) - - elif name == 'x_solver': - self._eqs[name] = sp.Matrix([ - state.get_id() - for state in self._states - if not state.has_conservation_law() - ]) - - elif name == 'sx_solver': - self._eqs[name] = sp.Matrix([ - self.sym('sx_rdata')[ix] - for ix, state in enumerate(self._states) - if not state.has_conservation_law() - ]) - - elif name == 'sx0': - self._derivative(name[1:], 'p', name=name) - - elif name == 'sx0_fixedParameters': - # deltax = -x+x0_fixedParameters if x0_fixedParameters>0 else 0 - # deltasx = -sx+dx0_fixed_parametersdx*sx+dx0_fixedParametersdp - # if x0_fixedParameters>0 else 0 - # sx0_fixedParameters = sx+deltasx = - # dx0_fixed_parametersdx*sx+dx0_fixedParametersdp - self._eqs[name] = smart_jacobian( - self.eq('x0_fixedParameters'), self.sym('p') - ) - - dx0_fixed_parametersdx = smart_jacobian( - self.eq('x0_fixedParameters'), self.sym('x') - ) - - if not smart_is_zero_matrix(dx0_fixed_parametersdx): - if isinstance(self._eqs[name], ImmutableDenseMatrix): - self._eqs[name] = MutableDenseMatrix(self._eqs[name]) - tmp = smart_multiply(dx0_fixed_parametersdx, self.sym('sx0')) - for ip in range(self._eqs[name].shape[1]): - self._eqs[name][:, ip] += tmp - - elif name == 'x0_fixedParameters': - k = self.sym('k') - self._x0_fixedParameters_idx = [ - ix - for ix, eq in enumerate(self.eq('x0')) - if any(sym in eq.free_symbols for sym in k) - ] - eq = self.eq('x0') - self._eqs[name] = sp.Matrix([eq[ix] for ix in - self._x0_fixedParameters_idx]) - - elif name == 'dtotal_cldx_rdata': - x_rdata = self.sym('x_rdata') - self._eqs[name] = sp.Matrix( - [ - [cl.get_ncoeff(xr) for xr in x_rdata] - for cl in self._conservationlaws - ] - ) - - elif name == 'dtcldx': - # this is always zero - self._eqs[name] = \ - sp.zeros(self.num_cons_law(), self.num_states_solver()) - - elif name == 'dtcldp': - # force symbols - self._eqs[name] = self.sym(name) - - elif name == 'dx_rdatadx_solver': - if self.num_cons_law(): - x_solver = self.sym('x') - self._eqs[name] = sp.Matrix( - [ - [state.get_dx_rdata_dx_solver(xs) for xs in x_solver] - for state in self._states - ] - ) - else: - # so far, dx_rdatadx_solver is only required for sx_rdata - # in case of no conservation laws, C++ code will directly use - # sx, we don't need this - self._eqs[name] = \ - sp.zeros(self.num_states_rdata(), - self.num_states_solver()) - - elif name == 'dx_rdatadp': - if self.num_cons_law(): - self._eqs[name] = smart_jacobian(self.eq('x_rdata'), - self.sym('p')) - else: - # so far, dx_rdatadp is only required for sx_rdata - # in case of no conservation laws, C++ code will directly use - # sx, we don't need this - self._eqs[name] = \ - sp.zeros(self.num_states_rdata(), - self.num_par()) - - elif name == 'dx_rdatadtcl': - self._eqs[name] = smart_jacobian(self.eq('x_rdata'), - self.sym('tcl')) - - elif name == 'dxdotdx_explicit': - # force symbols - self._derivative('xdot', 'x', name=name) - - elif name == 'dxdotdp_explicit': - # force symbols - self._derivative('xdot', 'p', name=name) - - elif name == 'drootdt': - self._eqs[name] = smart_jacobian(self.eq('root'), time_symbol) - - elif name == 'drootdt_total': - # backsubstitution of optimized right-hand side terms into RHS - # calling subs() is costly. Due to looping over events though, the - # following lines are only evaluated if a model has events - w_sorted = \ - toposort_symbols(dict(zip(self.sym('w'), self.eq('w')))) - tmp_xdot = smart_subs_dict(self.eq('xdot'), w_sorted) - self._eqs[name] = self.eq('drootdt') - if self.num_states_solver(): - self._eqs[name] += smart_multiply(self.eq('drootdx'), tmp_xdot) - - elif name == 'deltax': - # fill boluses for Heaviside functions, as empty state updates - # would cause problems when writing the function file later - event_eqs = [] - for event in self._events: - if event._state_update is None: - event_eqs.append(sp.zeros(self.num_states_solver(), 1)) - else: - event_eqs.append(event._state_update) - - self._eqs[name] = event_eqs - - elif name == 'z': - event_observables = [ - sp.zeros(self.num_eventobs(), 1) - for _ in self._events - ] - event_ids = [ - e.get_id() for e in self._events - ] - # TODO: get rid of this stupid 1-based indexing as soon as we can - # the matlab interface - z2event = [ - event_ids.index(event_obs.get_event()) + 1 - for event_obs in self._eventobservables - ] - for (iz, ie), event_obs in zip(enumerate(z2event), - self._eventobservables): - event_observables[ie-1][iz] = event_obs.get_val() - - self._eqs[name] = event_observables - self._z2event = z2event - - elif name in ['ddeltaxdx', 'ddeltaxdp', 'ddeltaxdt', 'dzdp', 'dzdx']: - if match_deriv[2] == 't': - var = time_symbol - else: - var = self.sym(match_deriv[2]) - - self._eqs[name] = [ - smart_jacobian(self.eq(match_deriv[1])[ie], var) - for ie in range(self.num_events()) - ] - if name == 'dzdx': - for ie in range(self.num_events()): - dtaudx = -self.eq('drootdx')[ie, :] / \ - self.eq('drootdt_total')[ie] - for iz in range(self.num_eventobs()): - if ie != self._z2event[iz]-1: - continue - dzdt = sp.diff(self.eq('z')[ie][iz], time_symbol) - self._eqs[name][ie][iz, :] += dzdt * dtaudx - - elif name in ['rz', 'drzdx', 'drzdp']: - eq_events = [] - for ie in range(self.num_events()): - val = sp.zeros( - self.num_eventobs(), - 1 if name == 'rz' else len(self.sym(match_deriv[2])) - ) - # match event observables to root function - for iz in range(self.num_eventobs()): - if ie == self._z2event[iz]-1: - val[iz, :] = self.eq(name.replace('rz', 'root'))[ie, :] - eq_events.append(val) - - self._eqs[name] = eq_events - - elif name == 'stau': - self._eqs[name] = [ - -self.eq('sroot')[ie, :] / self.eq('drootdt_total')[ie] - if not self.eq('drootdt_total')[ie].is_zero else - sp.zeros(*self.eq('sroot')[ie, :].shape) - for ie in range(self.num_events()) - ] - - elif name == 'deltasx': - event_eqs = [] - for ie, event in enumerate(self._events): - - tmp_eq = sp.zeros(self.num_states_solver(), self.num_par()) - - # need to check if equations are zero since we are using - # symbols - if not smart_is_zero_matrix(self.eq('stau')[ie]): - tmp_eq += smart_multiply( - (self.sym('xdot_old') - self.sym('xdot')), - self.sym('stau').T) - - # only add deltax part if there is state update - if event._state_update is not None: - # partial derivative for the parameters - tmp_eq += self.eq('ddeltaxdp')[ie] - - # initial part of chain rule state variables - tmp_dxdp = self.sym('sx') * sp.ones(1, self.num_par()) - - # need to check if equations are zero since we are using - # symbols - if not smart_is_zero_matrix(self.eq('stau')[ie]): - # chain rule for the time point - tmp_eq += smart_multiply(self.eq('ddeltaxdt')[ie], - self.sym('stau').T) - - # additional part of chain rule state variables - # This part only works if we use self.eq('xdot') - # instead of self.sym('xdot'). Not immediately clear - # why that is. - tmp_dxdp += smart_multiply(self.eq('xdot'), - self.sym('stau').T) - - # finish chain rule for the state variables - tmp_eq += smart_multiply(self.eq('ddeltaxdx')[ie], - tmp_dxdp) - - event_eqs.append(tmp_eq) - - self._eqs[name] = event_eqs - - elif name == 'xdot_old': - # force symbols - self._eqs[name] = self.sym(name) - - elif name == 'dwdx': - x = self.sym('x') - self._eqs[name] = sp.Matrix([ - [-cl.get_ncoeff(xs) for xs in x] - # the insert first in ode_model._add_conservation_law() means - # that we need to reverse the order here - for cl in reversed(self._conservationlaws) - ]).col_join(smart_jacobian(self.eq('w')[self.num_cons_law():, :], - x)) - - elif match_deriv: - self._derivative(match_deriv[1], match_deriv[2], name) - - else: - raise ValueError(f'Unknown equation {name}') - - if name == 'root': - # Events are processed after the ODE model has been set up. - # Equations are there, but symbols for roots must be added - self.sym('h') - - if name in {'Jy', 'dydx'}: - # do not transpose if we compute the partial derivative as part of - # a total derivative - if not len(self._lock_total_derivative): - self._eqs[name] = self._eqs[name].transpose() - - if name in {'dzdx', 'drzdx'}: - self._eqs[name] = [ - e.T for e in self._eqs[name] - ] - - if self._simplify: - dec = log_execution_time(f'simplifying {name}', logger) - if isinstance(self._eqs[name], list): - self._eqs[name] = [ - dec(_parallel_applyfunc)(sub_eq, self._simplify) - for sub_eq in self._eqs[name] - ] - else: - self._eqs[name] = dec(_parallel_applyfunc)(self._eqs[name], - self._simplify) - - def sym_names(self) -> List[str]: - """ - Returns a list of names of generated symbolic variables - - :return: - list of names - """ - return list(self._syms.keys()) - - def _derivative(self, eq: str, var: str, name: str = None) -> None: - """ - Creates a new symbolic variable according to a derivative - - :param eq: - name of the symbolic variable that defines the formula - - :param var: - name of the symbolic variable that defines the identifiers - with respect to which the derivatives are to be computed - - :param name: - name of resulting symbolic variable, default is ``d{eq}d{var}`` - """ - if not name: - name = f'd{eq}d{var}' - - ignore_chainrule = { - ('xdot', 'p'): 'w', # has generic implementation in c++ code - ('xdot', 'x'): 'w', # has generic implementation in c++ code - ('w', 'w'): 'tcl', # dtcldw = 0 - ('w', 'x'): 'tcl', # dtcldx = 0 - } - # automatically detect chainrule - chainvars = [ - cv for cv in ['w', 'tcl'] - if var_in_function_signature(eq, cv) - and cv not in self._lock_total_derivative - and var is not cv - and min(self.sym(cv).shape) - and ( - (eq, var) not in ignore_chainrule - or ignore_chainrule[(eq, var)] != cv - ) - ] - if len(chainvars): - self._lock_total_derivative += chainvars - self._total_derivative(name, eq, chainvars, var) - for cv in chainvars: - self._lock_total_derivative.remove(cv) - return - - # partial derivative - sym_eq = self.eq(eq).transpose() if eq == 'Jy' else self.eq(eq) - - sym_var = self.sym(var) - - derivative = smart_jacobian(sym_eq, sym_var) - - self._eqs[name] = derivative - - # compute recursion depth based on nilpotency of jacobian. computing - # nilpotency can be done more efficiently on numerical sparsity pattern - if name == 'dwdw': - nonzeros = np.asarray( - derivative.applyfunc(lambda x: int(not x.is_zero)) - ).astype(np.int64) - recursion = nonzeros.copy() - if max(recursion.shape): - while recursion.max(): - recursion = recursion.dot(nonzeros) - self._w_recursion_depth += 1 - if self._w_recursion_depth > len(sym_eq): - raise RuntimeError( - 'dwdw is not nilpotent. Something, somewhere went ' - 'terribly wrong. Please file a bug report at ' - 'https://github.com/AMICI-dev/AMICI/issues and ' - 'attach this model.' - ) - - if name == 'dydw' and not smart_is_zero_matrix(derivative): - dwdw = self.eq('dwdw') - # h(k) = d{eq}dw*dwdw^k* (k=1) - h = smart_multiply(derivative, dwdw) - while not smart_is_zero_matrix(h): - self._eqs[name] += h - # h(k+1) = d{eq}dw*dwdw^(k+1) = h(k)*dwdw - h = smart_multiply(h, dwdw) - - def _total_derivative(self, name: str, eq: str, chainvars: List[str], - var: str, dydx_name: str = None, - dxdz_name: str = None) -> None: - """ - Creates a new symbolic variable according to a total derivative - using the chain rule - - :param name: - name of resulting symbolic variable - - :param eq: - name of the symbolic variable that defines the formula - - :param chainvars: - names of the symbolic variable that define the - identifiers with respect to which the chain rules are applied - - :param var: - name of the symbolic variable that defines the identifiers - with respect to which the derivatives are to be computed - - :param dydx_name: - defines the name of the symbolic variable that - defines the derivative of the ``eq`` with respect to ``chainvar``, - default is ``d{eq}d{chainvar}`` - - :param dxdz_name: - defines the name of the symbolic variable that - defines the derivative of the ``chainvar`` with respect to ``var``, - default is d{chainvar}d{var} - """ - # compute total derivative according to chainrule - # Dydz = dydx*dxdz + dydz - - # initialize with partial derivative dydz without chain rule - self._eqs[name] = self.sym_or_eq(name, f'd{eq}d{var}') - if not isinstance(self._eqs[name], sp.Symbol): - # if not a Symbol, create a copy using sympy API - # NB deepcopy does not work safely, see sympy issue #7672 - self._eqs[name] = self._eqs[name].copy() - - for chainvar in chainvars: - if dydx_name is None: - dydx_name = f'd{eq}d{chainvar}' - if dxdz_name is None: - dxdz_name = f'd{chainvar}d{var}' - - dydx = self.sym_or_eq(name, dydx_name) - dxdz = self.sym_or_eq(name, dxdz_name) - # Save time for large models if one multiplicand is zero, - # which is not checked for by sympy - if not smart_is_zero_matrix(dydx) and not \ - smart_is_zero_matrix(dxdz): - dydx_times_dxdz = smart_multiply(dydx, dxdz) - if dxdz.shape[1] == 1 and \ - self._eqs[name].shape[1] != dxdz.shape[1]: - for iz in range(self._eqs[name].shape[1]): - self._eqs[name][:, iz] += dydx_times_dxdz - else: - self._eqs[name] += dydx_times_dxdz - - def sym_or_eq(self, name: str, varname: str) -> sp.Matrix: - """ - Returns symbols or equations depending on whether a given - variable appears in the function signature or not. - - :param name: - name of function for which the signature should be checked - - :param varname: - name of the variable which should be contained in the - function signature - - :return: - the variable symbols if the variable is part of the signature and - the variable equations otherwise. - """ - # dwdx and dwdp will be dynamically computed and their ordering - # within a column may differ from the initialization of symbols here, - # so those are not safe to use. Not removing them from signature as - # this would break backwards compatibility. - if var_in_function_signature(name, varname) \ - and varname not in ['dwdx', 'dwdp']: - return self.sym(varname) - else: - return self.eq(varname) - - def _multiplication(self, name: str, x: str, y: str, - transpose_x: Optional[bool] = False, - sign: Optional[int] = 1): - """ - Creates a new symbolic variable according to a multiplication - - :param name: - name of resulting symbolic variable, default is ``d{eq}d{var}`` - - :param x: - name of the symbolic variable that defines the first factor - - :param y: - name of the symbolic variable that defines the second factor - - :param transpose_x: - indicates whether the first factor should be - transposed before multiplication - - :param sign: - defines the sign of the product, should be +1 or -1 - """ - if sign not in [-1, 1]: - raise TypeError(f'sign must be +1 or -1, was {sign}') - - variables = { - varname: self.sym(varname) - if var_in_function_signature(name, varname) - else self.eq(varname) - for varname in [x, y] - } - - xx = variables[x].transpose() if transpose_x else variables[x] - yy = variables[y] - - self._eqs[name] = sign * smart_multiply(xx, yy) - - def _equation_from_component(self, name: str, component: str) -> None: - """ - Generates the formulas of a symbolic variable from the attributes - - :param name: - name of resulting symbolic variable - - :param component: - name of the attribute - """ - self._eqs[name] = sp.Matrix( - [comp.get_val() for comp in getattr(self, component)] - ) - - def get_conservation_laws(self) -> List[Tuple[sp.Symbol, sp.Expr]]: - """Returns a list of states with conservation law set - - :return: - list of state identifiers - """ - return [ - (state.get_id(), state.get_x_rdata()) - for state in self._states - if state.has_conservation_law() - ] - - def _generate_value(self, name: str) -> None: - """ - Generates the numeric values of a symbolic variable from value - prototypes - - :param name: - name of resulting symbolic variable - """ - if name in self._value_prototype: - component = self._value_prototype[name] - else: - raise ValueError(f'No values for {name}') - - self._vals[name] = [comp.get_val() - for comp in getattr(self, component)] - - def _generate_name(self, name: str) -> None: - """ - Generates the names of a symbolic variable from variable prototypes or - equation prototypes - - :param name: - name of resulting symbolic variable - """ - if name in self._variable_prototype: - component = self._variable_prototype[name] - elif name in self._equation_prototype: - component = self._equation_prototype[name] - else: - raise ValueError(f'No names for {name}') - - self._names[name] = [comp.get_name() - for comp in getattr(self, component)] - - def state_has_fixed_parameter_initial_condition(self, ix: int) -> bool: - """ - Checks whether the state at specified index has a fixed parameter - initial condition - - :param ix: - state index - - :return: - boolean indicating if any of the initial condition free - variables is contained in the model constants - """ - ic = self._states[ix].get_val() - if not isinstance(ic, sp.Basic): - return False - return any( - fp in (c.get_id() for c in self._constants) - for fp in ic.free_symbols - ) - - def state_has_conservation_law(self, ix: int) -> bool: - """ - Checks whether the state at specified index has a conservation - law set - - :param ix: - state index - - :return: - boolean indicating if conservation_law is not None - """ - return self._states[ix].has_conservation_law() - - def get_solver_indices(self) -> Dict[int, int]: - """ - Returns a mapping that maps rdata species indices to solver indices - - :return: - dictionary mapping rdata species indices to solver indices - """ - solver_index = {} - ix_solver = 0 - for ix in range(len(self._states)): - if self.state_has_conservation_law(ix): - continue - solver_index[ix] = ix_solver - ix_solver += 1 - return solver_index - - def state_is_constant(self, ix: int) -> bool: - """ - Checks whether the temporal derivative of the state is zero - - :param ix: - state index - - :return: - boolean indicating if constant over time - """ - return self._states[ix].get_dt() == 0.0 - - def conservation_law_has_multispecies(self, - tcl: ConservationLaw) -> bool: - """ - Checks whether a conservation law has multiple species or it just - defines one constant species - - :param tcl: - conservation law - - :return: - boolean indicating if conservation_law is not None - """ - state_set = set(self.sym('x_rdata')) - n_species = len(state_set.intersection(tcl.get_val().free_symbols)) - return n_species > 1 - - def _expr_is_time_dependent(self, expr: sp.Expr) -> bool: - """Determine whether an expression is time-dependent. - - :param expr: - The expression. - - :returns: - Whether the expression is time-dependent. - """ - # `expr.free_symbols` will be different to `self._states.keys()`, so - # it's easier to compare as `str`. - expr_syms = {str(sym) for sym in expr.free_symbols} - - # Check if the time variable is in the expression. - if 't' in expr_syms: - return True - - # Check if any time-dependent states are in the expression. - state_syms = [str(sym) for sym in self._states] - return any( - not self.state_is_constant(state_syms.index(state)) - for state in expr_syms.intersection(state_syms) - ) - - def _get_unique_root( - self, - root_found: sp.Expr, - roots: List[Event], - ) -> Union[sp.Symbol, None]: - """ - Collects roots of Heaviside functions and events and stores them in - the roots list. It checks for redundancy to not store symbolically - equivalent root functions more than once. - - :param root_found: - equation of the root function - :param roots: - list of already known root functions with identifier - - :returns: - unique identifier for root, or ``None`` if the root is not - time-dependent - """ - if not self._expr_is_time_dependent(root_found): - return None - - for root in roots: - if sp.simplify(root_found - root.get_val()) == 0: - return root.get_id() - - # create an event for a new root function - root_symstr = f'Heaviside_{len(roots)}' - roots.append(Event( - identifier=sp.Symbol(root_symstr), - name=root_symstr, - value=root_found, - state_update=None, - )) - return roots[-1].get_id() - - def _collect_heaviside_roots( - self, - args: Sequence[sp.Expr], - ) -> List[sp.Expr]: - """ - Recursively checks an expression for the occurrence of Heaviside - functions and return all roots found - - :param args: - args attribute of the expanded expression - - :returns: - root functions that were extracted from Heaviside function - arguments - """ - root_funs = [] - for arg in args: - if arg.func == sp.Heaviside: - root_funs.append(arg.args[0]) - elif arg.has(sp.Heaviside): - root_funs.extend(self._collect_heaviside_roots(arg.args)) - - # substitute 'w' expressions into root expressions now, to avoid - # rewriting '{model_name}_root.cpp' and '{model_name}_stau.cpp' headers - # to include 'w.h' - w_sorted = toposort_symbols(dict(zip( - [expr.get_id() for expr in self._expressions], - [expr.get_val() for expr in self._expressions], - ))) - root_funs = [ - r.subs(w_sorted) - for r in root_funs - ] - - return root_funs - - def _process_heavisides( - self, - dxdt: sp.Expr, - roots: List[Event], - ) -> sp.Expr: - """ - Parses the RHS of a state variable, checks for Heaviside functions, - collects unique roots functions that can be tracked by SUNDIALS and - replaces Heaviside Functions by amici helper variables that will be - updated based on SUNDIALS root tracking. - - :param dxdt: - right-hand side of state variable - :param roots: - list of known root functions with identifier - - :returns: - dxdt with Heaviside functions replaced by amici helper variables - """ - - # expanding the rhs will in general help to collect the same - # heaviside function - dt_expanded = dxdt.expand() - # track all the old Heaviside expressions in tmp_roots_old - # replace them later by the new expressions - heavisides = [] - # run through the expression tree and get the roots - tmp_roots_old = self._collect_heaviside_roots(dt_expanded.args) - for tmp_old in tmp_roots_old: - # we want unique identifiers for the roots - tmp_new = self._get_unique_root(tmp_old, roots) - # `tmp_new` is None if the root is not time-dependent. - if tmp_new is None: - continue - # For Heavisides, we need to add the negative function as well - self._get_unique_root(sp.sympify(- tmp_old), roots) - heavisides.append((sp.Heaviside(tmp_old), tmp_new)) - - if heavisides: - # only apply subs if necessary - for heaviside_sympy, heaviside_amici in heavisides: - dxdt = dxdt.subs(heaviside_sympy, heaviside_amici) - - return dxdt - - -class ODEExporter: - """ - The ODEExporter class generates AMICI C++ files for ODE model as - defined in symbolic expressions. - - :ivar model: - ODE definition - - :ivar verbose: - more verbose output if True - - :ivar assume_pow_positivity: - if set to true, a special pow function is - used to avoid problems with state variables that may become negative - due to numerical errors - - :ivar compiler: - distutils/setuptools compiler selection to build the Python extension - - :ivar functions: - carries C++ function signatures and other specifications - - :ivar model_name: - name of the model that will be used for compilation - - :ivar model_path: - path to the generated model specific files - - :ivar model_swig_path: - path to the generated swig files - - :ivar allow_reinit_fixpar_initcond: - indicates whether reinitialization of - initial states depending on fixedParameters is allowed for this model - - :ivar _build_hints: - If the given model uses special functions, this set contains hints for - model building. - - :ivar generate_sensitivity_code: - Specifies whether code for sensitivity computation is to be generated - - .. note:: - When importing large models (several hundreds of species or - parameters), import time can potentially be reduced by using multiple - CPU cores. This is controlled by setting the ``AMICI_IMPORT_NPROCS`` - environment variable to the number of parallel processes that are to be - used (default: 1). Note that for small models this may (slightly) - increase import times. - """ - - def __init__( - self, - ode_model: ODEModel, - outdir: Optional[Union[Path, str]] = None, - verbose: Optional[Union[bool, int]] = False, - assume_pow_positivity: Optional[bool] = False, - compiler: Optional[str] = None, - allow_reinit_fixpar_initcond: Optional[bool] = True, - generate_sensitivity_code: Optional[bool] = True, - model_name: Optional[str] = 'model' - ): - """ - Generate AMICI C++ files for the ODE provided to the constructor. - - :param ode_model: - ODE definition - - :param outdir: - see :meth:`amici.ode_export.ODEExporter.set_paths` - - :param verbose: - verbosity level for logging, ``True``/``False`` default to - :data:`logging.Error`/:data:`logging.DEBUG` - - :param assume_pow_positivity: - if set to true, a special pow function is - used to avoid problems with state variables that may become - negative due to numerical errors - - :param compiler: distutils/setuptools compiler selection to build the - python extension - - :param allow_reinit_fixpar_initcond: - see :class:`amici.ode_export.ODEExporter` - - :param generate_sensitivity_code: - specifies whether code required for sensitivity computation will be - generated - - :param model_name: - name of the model to be used during code generation - """ - set_log_level(logger, verbose) - - self.verbose: bool = logger.getEffectiveLevel() <= logging.DEBUG - self.assume_pow_positivity: bool = assume_pow_positivity - self.compiler: str = compiler - - self.model_path: str = '' - self.model_swig_path: str = '' - - self.set_name(model_name) - self.set_paths(outdir) - - # Signatures and properties of generated model functions (see - # include/amici/model.h for details) - self.model: ODEModel = ode_model - - # To only generate a subset of functions, apply subselection here - self.functions: Dict[str, _FunctionInfo] = copy.deepcopy(functions) - - self.allow_reinit_fixpar_initcond: bool = allow_reinit_fixpar_initcond - self._build_hints = set() - self.generate_sensitivity_code: bool = generate_sensitivity_code - - @log_execution_time('generating cpp code', logger) - def generate_model_code(self) -> None: - """ - Generates the native C++ code for the loaded model and a Matlab - script that can be run to compile a mex file from the C++ code - """ - with _monkeypatched(sp.Pow, '_eval_derivative', - _custom_pow_eval_derivative): - - self._prepare_model_folder() - self._generate_c_code() - self._generate_m_code() - - @log_execution_time('compiling cpp code', logger) - def compile_model(self) -> None: - """ - Compiles the generated code it into a simulatable module - """ - self._compile_c_code(compiler=self.compiler, - verbose=self.verbose) - - def _prepare_model_folder(self) -> None: - """ - Create model directory or remove all files if the output directory - already exists. - """ - os.makedirs(self.model_path, exist_ok=True) - - for file in os.listdir(self.model_path): - file_path = os.path.join(self.model_path, file) - if os.path.isfile(file_path): - os.remove(file_path) - - def _generate_c_code(self) -> None: - """ - Create C++ code files for the model based on - :attribute:`ODEExporter.model`. - """ - for func_name, func_info in self.functions.items(): - if func_name in sensi_functions + sparse_sensi_functions and \ - not self.generate_sensitivity_code: - continue - - if func_info.generate_body: - dec = log_execution_time(f'writing {func_name}.cpp', logger) - dec(self._write_function_file)(func_name) - if func_name in sparse_functions and func_info.body: - self._write_function_index(func_name, 'colptrs') - self._write_function_index(func_name, 'rowvals') - - for name in self.model.sym_names(): - # only generate for those that have nontrivial implementation, - # check for both basic variables (not in functions) and function - # computed values - if (name in self.functions - and not self.functions[name].body - and name not in nobody_functions) \ - or (name not in self.functions and - len(self.model.sym(name)) == 0): - continue - self._write_index_files(name) - - self._write_wrapfunctions_cpp() - self._write_wrapfunctions_header() - self._write_model_header_cpp() - self._write_c_make_file() - self._write_swig_files() - self._write_module_setup() - - shutil.copy(CXX_MAIN_TEMPLATE_FILE, - os.path.join(self.model_path, 'main.cpp')) - - def _compile_c_code(self, - verbose: Optional[Union[bool, int]] = False, - compiler: Optional[str] = None) -> None: - """ - Compile the generated model code - - :param verbose: - Make model compilation verbose - - :param compiler: - distutils/setuptools compiler selection to build the python - extension - """ - # setup.py assumes it is run from within the model directory - module_dir = self.model_path - script_args = [sys.executable, os.path.join(module_dir, 'setup.py')] - - if verbose: - script_args.append('--verbose') - else: - script_args.append('--quiet') - - script_args.extend(['build_ext', f'--build-lib={module_dir}']) - - if compiler is not None: - script_args.extend([f'--compiler={compiler}']) - - # distutils.core.run_setup looks nicer, but does not let us check the - # result easily - try: - result = subprocess.run(script_args, - cwd=module_dir, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - check=True) - except subprocess.CalledProcessError as e: - print(e.output.decode('utf-8')) - print("Failed building the model extension.") - if self._build_hints: - print("Note:") - print('\n'.join(self._build_hints)) - raise - - if verbose: - print(result.stdout.decode('utf-8')) - - def _generate_m_code(self) -> None: - """ - Create a Matlab script for compiling code files to a mex file - """ - - # Second order code is not yet implemented. Once this is done, - # those variables will have to be replaced by - # "self.model.true()", or the corresponding "model.self.o2flag" - nxtrue_rdata = self.model.num_states_rdata() - nytrue = self.model.num_obs() - nztrue = self.model.num_eventobs() - o2flag = 0 - - lines = [ - '% This compile script was automatically created from' - ' Python SBML import.', - '% If mex compiler is set up within MATLAB, it can be run' - ' from MATLAB ', - '% in order to compile a mex-file from the Python' - ' generated C++ files.', - '', - f"modelName = '{self.model_name}';", - "amimodel.compileAndLinkModel(modelName, '', [], [], [], []);", - f"amimodel.generateMatlabWrapper({nxtrue_rdata}, " - f"{nytrue}, {self.model.num_par()}, " - f"{self.model.num_const()}, {nztrue}, {o2flag}, ...", - " [], ['simulate_' modelName '.m'], modelName, ...", - " 'lin', 1, 1);" - ] - - # write compile script (for mex) - compile_script = os.path.join(self.model_path, 'compileMexFile.m') - with open(compile_script, 'w') as fileout: - fileout.write('\n'.join(lines)) - - def _write_index_files(self, name: str) -> None: - """ - Write index file for a symbolic array. - - :param name: - key in ``self.model._syms`` for which the respective file should - be written - """ - if name not in self.model.sym_names(): - raise ValueError(f'Unknown symbolic array: {name}') - - symbols = self.model.sparsesym(name) if name in sparse_functions \ - else self.model.sym(name).T - - # flatten multiobs - if isinstance(next(iter(symbols), None), list): - symbols = [symbol for obs in symbols for symbol in obs] - - lines = [] - for index, symbol in enumerate(symbols): - symbol_name = strip_pysb(symbol) - if str(symbol) == '0': - continue - if str(symbol_name) == '': - raise ValueError(f'{name} contains a symbol called ""') - lines.append(f'#define {symbol_name} {name}[{index}]') - if name == 'stau': - # we only need a single macro, as all entries have the same symbol - break - - filename = os.path.join(self.model_path, f'{self.model_name}_{name}.h') - with open(filename, 'w') as fileout: - fileout.write('\n'.join(lines)) - - def _write_function_file(self, function: str) -> None: - """ - Generate equations and write the C++ code for the function - ``function``. - - :param function: - name of the function to be written (see ``self.functions``) - """ - - # first generate the equations to make sure we have everything we - # need in subsequent steps - if function in sparse_functions: - equations = self.model.sparseeq(function) - elif not self.allow_reinit_fixpar_initcond \ - and function == 'sx0_fixedParameters': - # Not required. Will create empty function body. - equations = sp.Matrix() - else: - equations = self.model.eq(function) - - # function header - lines = [ - '#include "amici/symbolic_functions.h"', - '#include "amici/defines.h"', - '#include "sundials/sundials_types.h"', - '', - '#include ', - '#include ', - '' - ] - - func_info = self.functions[function] - - # extract symbols that need definitions from signature - # don't add includes for files that won't be generated. - # Unfortunately we cannot check for `self.functions[sym].body` - # here since it may not have been generated yet. - for sym in re.findall( - r'const (?:realtype|double) \*([\w]+)[0]*(?:,|$)', - func_info.arguments - ): - if sym not in self.model.sym_names(): - continue - - if sym in sparse_functions: - iszero = smart_is_zero_matrix(self.model.sparseeq(sym)) - elif sym in self.functions: - iszero = smart_is_zero_matrix(self.model.eq(sym)) - else: - iszero = len(self.model.sym(sym)) == 0 - - if iszero: - continue - - lines.append(f'#include "{self.model_name}_{sym}.h"') - - # include return symbols - if function in self.model.sym_names() and \ - function not in non_unique_id_symbols: - lines.append(f'#include "{self.model_name}_{function}.h"') - - lines.extend([ - '', - 'namespace amici {', - f'namespace model_{self.model_name} {{', - '', - f'{func_info.return_type} {function}_{self.model_name}' - f'({func_info.arguments}){{' - ]) - - # function body - body = self._get_function_body(function, equations) - if not body: - return - - if self.assume_pow_positivity and func_info.assume_pow_positivity: - pow_rx = re.compile(r'(^|\W)std::pow\(') - body = [ - # execute this twice to catch cases where the ending '(' would - # be the starting (^|\W) for the following match - pow_rx.sub(r'\1amici::pos_pow(', - pow_rx.sub(r'\1amici::pos_pow(', line)) - for line in body - ] - - self.functions[function].body = body - - lines += body - lines.extend([ - '}', - '', - f'}} // namespace model_{self.model_name}', - '} // namespace amici\n', - ]) - - # check custom functions - for fun in CUSTOM_FUNCTIONS: - if 'include' in fun and any(fun['c++'] in line for line in lines): - if 'build_hint' in fun: - self._build_hints.add(fun['build_hint']) - lines.insert(0, fun['include']) - - # if not body is None: - filename = os.path.join(self.model_path, - f'{self.model_name}_{function}.cpp') - with open(filename, 'w') as fileout: - fileout.write('\n'.join(lines)) - - def _write_function_index(self, function: str, indextype: str) -> None: - """ - Generate equations and write the C++ code for the function - ``function``. - - :param function: - name of the function to be written (see ``self.functions``) - - :param indextype: - type of index {'colptrs', 'rowvals'} - """ - if indextype == 'colptrs': - values = self.model.colptrs(function) - setter = 'indexptrs' - elif indextype == 'rowvals': - values = self.model.rowvals(function) - setter = 'indexvals' - else: - raise ValueError('Invalid value for indextype, must be colptrs or ' - f'rowvals: {indextype}') - - # function signature - if function in multiobs_functions: - signature = f'(SUNMatrixWrapper &{function}, int index)' - else: - signature = f'(SUNMatrixWrapper &{function})' - - lines = [ - '#include "amici/sundials_matrix_wrapper.h"', - '#include "sundials/sundials_types.h"', - '', - '#include ', - '#include ', - '', - 'namespace amici {', - f'namespace model_{self.model_name} {{', - '', - ] - - # Generate static array with indices - if len(values): - static_array_name = f"{function}_{indextype}_{self.model_name}_" - if function in multiobs_functions: - # list of index vectors - lines.append( - "static constexpr std::array, {len(values)}> " - f"{static_array_name} = {{{{" - ) - lines.extend([' {' - + ', '.join(map(str, index_vector)) + '}, ' - for index_vector in values]) - lines.append("}};") - else: - # single index vector - lines.extend([ - "static constexpr std::array {static_array_name} = {{", - ' ' + ', '.join(map(str, values)), - "};" - ]) - - lines.extend([ - '', - f'void {function}_{indextype}_{self.model_name}{signature}{{', - ]) - - if len(values): - if function in multiobs_functions: - lines.append( - f" {function}.set_{setter}" - f"(gsl::make_span({static_array_name}[index]));" - ) - else: - lines.append( - f" {function}.set_{setter}" - f"(gsl::make_span({static_array_name}));" - ) - - lines.extend([ - '}' - '', - f'}} // namespace model_{self.model_name}', - '} // namespace amici\n', - ]) - - filename = f'{self.model_name}_{function}_{indextype}.cpp' - filename = os.path.join(self.model_path, filename) - - with open(filename, 'w') as fileout: - fileout.write('\n'.join(lines)) - - def _get_function_body( - self, - function: str, - equations: sp.Matrix - ) -> List[str]: - """ - Generate C++ code for body of function ``function``. - - :param function: - name of the function to be written (see ``self.functions``) - - :param equations: - symbolic definition of the function body - - :return: - generated C++ code - """ - lines = [] - - if ( - len(equations) == 0 - or ( - isinstance(equations, (sp.Matrix, sp.ImmutableDenseMatrix)) - and min(equations.shape) == 0 - ) - ): - # dJydy is a list - return lines - - if not self.allow_reinit_fixpar_initcond and function in { - 'sx0_fixedParameters', - 'x0_fixedParameters', - }: - return lines - - if function == 'sx0_fixedParameters': - # here we only want to overwrite values where x0_fixedParameters - # was applied - - lines.extend([ - # Keep list of indices of fixed parameters occurring in x0 - " static const std::array _x0_fixedParameters_idxs = {", - " " - + ', '.join(str(x) - for x in self.model._x0_fixedParameters_idx), - " };", - "", - # Set all parameters that are to be reset to 0, so that the - # switch statement below only needs to handle non-zero entries - # (which usually reduces file size and speeds up - # compilation significantly). - " for(auto idx: reinitialization_state_idxs) {", - " if(std::find(_x0_fixedParameters_idxs.cbegin(), " - "_x0_fixedParameters_idxs.cend(), idx) != " - "_x0_fixedParameters_idxs.cend())\n" - " sx0_fixedParameters[idx] = 0.0;", - " }" - ]) - - cases = {} - for ipar in range(self.model.num_par()): - expressions = [] - for index, formula in zip( - self.model._x0_fixedParameters_idx, - equations[:, ipar] - ): - if not formula.is_zero: - expressions.extend([ - f'if(std::find(' - 'reinitialization_state_idxs.cbegin(), ' - f'reinitialization_state_idxs.cend(), {index}) != ' - 'reinitialization_state_idxs.cend())', - f' {function}[{index}] = ' - f'{self.model._code_printer.doprint(formula)};' - ]) - cases[ipar] = expressions - lines.extend(get_switch_statement('ip', cases, 1)) - - elif function == 'x0_fixedParameters': - for index, formula in zip( - self.model._x0_fixedParameters_idx, - equations - ): - lines.append( - f' if(std::find(reinitialization_state_idxs.cbegin(), ' - f'reinitialization_state_idxs.cend(), {index}) != ' - 'reinitialization_state_idxs.cend())\n ' - f'{function}[{index}] = ' - f'{self.model._code_printer.doprint(formula)};' - ) - - elif function in event_functions: - cases = { - ie: self.model._code_printer._get_sym_lines_array( - equations[ie], function, 0) - for ie in range(self.model.num_events()) - if not smart_is_zero_matrix(equations[ie]) - } - lines.extend(get_switch_statement('ie', cases, 1)) - - elif function in event_sensi_functions: - outer_cases = {} - for ie, inner_equations in enumerate(equations): - inner_lines = [] - inner_cases = { - ipar: self.model._code_printer._get_sym_lines_array( - inner_equations[:, ipar], function, 0) - for ipar in range(self.model.num_par()) - if not smart_is_zero_matrix(inner_equations[:, ipar]) - } - inner_lines.extend(get_switch_statement( - 'ip', inner_cases, 0)) - outer_cases[ie] = copy.copy(inner_lines) - lines.extend(get_switch_statement('ie', outer_cases, 1)) - - elif function in sensi_functions \ - and equations.shape[1] == self.model.num_par(): - cases = { - ipar: self.model._code_printer._get_sym_lines_array( - equations[:, ipar], function, 0) - for ipar in range(self.model.num_par()) - if not smart_is_zero_matrix(equations[:, ipar]) - } - lines.extend(get_switch_statement('ip', cases, 1)) - elif function in multiobs_functions: - if function == 'dJydy': - cases = { - iobs: self.model._code_printer._get_sym_lines_array( - equations[iobs], function, 0) - for iobs in range(self.model.num_obs()) - if not smart_is_zero_matrix(equations[iobs]) - } - else: - cases = { - iobs: self.model._code_printer._get_sym_lines_array( - equations[:, iobs], function, 0) - for iobs in range(equations.shape[1]) - if not smart_is_zero_matrix(equations[:, iobs]) - } - if function.startswith(('Jz', 'dJz', 'Jrz', 'dJrz')): - iterator = 'iz' - else: - iterator = 'iy' - lines.extend(get_switch_statement(iterator, cases, 1)) - - elif function in self.model.sym_names() \ - and function not in non_unique_id_symbols: - if function in sparse_functions: - symbols = self.model.sparsesym(function) - else: - symbols = self.model.sym(function) - lines += self.model._code_printer._get_sym_lines_symbols( - symbols, equations, function, 4) - - else: - lines += self.model._code_printer._get_sym_lines_array( - equations, function, 4) - - return [line for line in lines if line] - - def _write_wrapfunctions_cpp(self) -> None: - """ - Write model-specific 'wrapper' file (``wrapfunctions.cpp``). - """ - template_data = {'MODELNAME': self.model_name} - apply_template( - os.path.join(amiciSrcPath, 'wrapfunctions.template.cpp'), - os.path.join(self.model_path, 'wrapfunctions.cpp'), - template_data - ) - - def _write_wrapfunctions_header(self) -> None: - """ - Write model-specific header file (``wrapfunctions.h``). - """ - template_data = {'MODELNAME': str(self.model_name)} - apply_template( - os.path.join(amiciSrcPath, 'wrapfunctions.ODE_template.h'), - os.path.join(self.model_path, 'wrapfunctions.h'), - template_data - ) - - def _write_model_header_cpp(self) -> None: - """ - Write model-specific header and cpp file (MODELNAME.{h,cpp}). - """ - - tpl_data = { - 'MODELNAME': self.model_name, - 'NX_RDATA': self.model.num_states_rdata(), - 'NXTRUE_RDATA': self.model.num_states_rdata(), - 'NX_SOLVER': self.model.num_states_solver(), - 'NXTRUE_SOLVER': self.model.num_states_solver(), - 'NX_SOLVER_REINIT': self.model.num_state_reinits(), - 'NY': self.model.num_obs(), - 'NYTRUE': self.model.num_obs(), - 'NZ': self.model.num_eventobs(), - 'NZTRUE': self.model.num_eventobs(), - 'NEVENT': self.model.num_events(), - 'NOBJECTIVE': '1', - 'NW': len(self.model.sym('w')), - 'NDWDP': len(self.model.sparsesym( - 'dwdp', force_generate=self.generate_sensitivity_code - )), - 'NDWDX': len(self.model.sparsesym('dwdx')), - 'NDWDW': len(self.model.sparsesym('dwdw')), - 'NDXDOTDW': len(self.model.sparsesym('dxdotdw')), - 'NDXDOTDP_EXPLICIT': len(self.model.sparsesym( - 'dxdotdp_explicit', - force_generate=self.generate_sensitivity_code - )), - 'NDXDOTDX_EXPLICIT': len(self.model.sparsesym( - 'dxdotdx_explicit')), - 'NDJYDY': 'std::vector{%s}' - % ','.join(str(len(x)) - for x in self.model.sparsesym('dJydy')), - 'NDXRDATADXSOLVER': len(self.model.sparsesym('dx_rdatadx_solver')), - 'NDXRDATADTCL': len(self.model.sparsesym('dx_rdatadtcl')), - 'NDTOTALCLDXRDATA': len(self.model.sparsesym('dtotal_cldx_rdata')), - 'UBW': self.model.num_states_solver(), - 'LBW': self.model.num_states_solver(), - 'NP': self.model.num_par(), - 'NK': self.model.num_const(), - 'O2MODE': 'amici::SecondOrderMode::none', - # using code printer ensures proper handling of nan/inf - 'PARAMETERS': self.model._code_printer.doprint( - self.model.val('p'))[1:-1], - 'FIXED_PARAMETERS': self.model._code_printer.doprint( - self.model.val('k'))[1:-1], - 'PARAMETER_NAMES_INITIALIZER_LIST': - self._get_symbol_name_initializer_list('p'), - 'STATE_NAMES_INITIALIZER_LIST': - self._get_symbol_name_initializer_list('x_rdata'), - 'FIXED_PARAMETER_NAMES_INITIALIZER_LIST': - self._get_symbol_name_initializer_list('k'), - 'OBSERVABLE_NAMES_INITIALIZER_LIST': - self._get_symbol_name_initializer_list('y'), - 'OBSERVABLE_TRAFO_INITIALIZER_LIST': - '\n'.join( - f'ObservableScaling::{trafo}, // y[{idx}]' - for idx, trafo in enumerate( - self.model.get_observable_transformations() - ) - ), - 'EXPRESSION_NAMES_INITIALIZER_LIST': - self._get_symbol_name_initializer_list('w'), - 'PARAMETER_IDS_INITIALIZER_LIST': - self._get_symbol_id_initializer_list('p'), - 'STATE_IDS_INITIALIZER_LIST': - self._get_symbol_id_initializer_list('x_rdata'), - 'FIXED_PARAMETER_IDS_INITIALIZER_LIST': - self._get_symbol_id_initializer_list('k'), - 'OBSERVABLE_IDS_INITIALIZER_LIST': - self._get_symbol_id_initializer_list('y'), - 'EXPRESSION_IDS_INITIALIZER_LIST': - self._get_symbol_id_initializer_list('w'), - 'STATE_IDXS_SOLVER_INITIALIZER_LIST': - ', '.join( - str(idx) - for idx, state in enumerate(self.model._states) - if not state.has_conservation_law() - ), - 'REINIT_FIXPAR_INITCOND': - AmiciCxxCodePrinter.print_bool( - self.allow_reinit_fixpar_initcond), - 'AMICI_VERSION_STRING': __version__, - 'AMICI_COMMIT_STRING': __commit__, - 'W_RECURSION_DEPTH': self.model._w_recursion_depth, - 'QUADRATIC_LLH': AmiciCxxCodePrinter.print_bool( - self.model._has_quadratic_nllh), - 'ROOT_INITIAL_VALUES': - ', '.join(map( - lambda event: AmiciCxxCodePrinter.print_bool( - event.get_initial_value()), - self.model._events)), - 'Z2EVENT': - ', '.join(map(str, self.model._z2event)) - } - - for func_name, func_info in self.functions.items(): - if func_name in nobody_functions: - continue - - if not func_info.body: - tpl_data[f'{func_name.upper()}_DEF'] = '' - - if func_name in sensi_functions + sparse_sensi_functions and \ - not self.generate_sensitivity_code: - impl = '' - else: - impl = get_model_override_implementation( - func_name, self.model_name, nobody=True - ) - - tpl_data[f'{func_name.upper()}_IMPL'] = impl - - if func_name in sparse_functions: - for indexfield in ['colptrs', 'rowvals']: - if func_name in sparse_sensi_functions and \ - not self.generate_sensitivity_code: - impl = '' - else: - impl = get_sunindex_override_implementation( - func_name, self.model_name, indexfield, - nobody=True - ) - tpl_data[f'{func_name.upper()}_{indexfield.upper()}_DEF'] \ - = '' - tpl_data[f'{func_name.upper()}_{indexfield.upper()}_IMPL'] \ - = impl - continue - - tpl_data[f'{func_name.upper()}_DEF'] = \ - get_function_extern_declaration(func_name, self.model_name) - tpl_data[f'{func_name.upper()}_IMPL'] = \ - get_model_override_implementation(func_name, self.model_name) - if func_name in sparse_functions: - tpl_data[f'{func_name.upper()}_COLPTRS_DEF'] = \ - get_sunindex_extern_declaration( - func_name, self.model_name, 'colptrs') - tpl_data[f'{func_name.upper()}_COLPTRS_IMPL'] = \ - get_sunindex_override_implementation( - func_name, self.model_name, 'colptrs') - tpl_data[f'{func_name.upper()}_ROWVALS_DEF'] = \ - get_sunindex_extern_declaration( - func_name, self.model_name, 'rowvals') - tpl_data[f'{func_name.upper()}_ROWVALS_IMPL'] = \ - get_sunindex_override_implementation( - func_name, self.model_name, 'rowvals') - - if self.model.num_states_solver() == self.model.num_states_rdata(): - tpl_data['X_RDATA_DEF'] = '' - tpl_data['X_RDATA_IMPL'] = '' - - tpl_data = {k: str(v) for k, v in tpl_data.items()} - - apply_template( - os.path.join(amiciSrcPath, 'model_header.ODE_template.h'), - os.path.join(self.model_path, f'{self.model_name}.h'), - tpl_data - ) - - apply_template( - os.path.join(amiciSrcPath, 'model.ODE_template.cpp'), - os.path.join(self.model_path, f'{self.model_name}.cpp'), - tpl_data - ) - - def _get_symbol_name_initializer_list(self, name: str) -> str: - """ - Get SBML name initializer list for vector of names for the given - model entity - - :param name: - any key present in ``self.model._syms`` - - :return: - Template initializer list of names - """ - return '\n'.join( - f'"{symbol}", // {name}[{idx}]' - for idx, symbol in enumerate(self.model.name(name)) - ) - - def _get_symbol_id_initializer_list(self, name: str) -> str: - """ - Get C++ initializer list for vector of names for the given model - entity - - :param name: - any key present in ``self.model._syms`` - - :return: - Template initializer list of ids - """ - return '\n'.join( - f'"{self.model._code_printer.doprint(symbol)}", // {name}[{idx}]' - for idx, symbol in enumerate(self.model.sym(name)) - ) - - def _write_c_make_file(self): - """Write CMake ``CMakeLists.txt`` file for this model.""" - sources = '\n'.join( - f + ' ' for f in os.listdir(self.model_path) - if f.endswith('.cpp') and f != 'main.cpp' - ) - - template_data = {'MODELNAME': self.model_name, - 'SOURCES': sources, - 'AMICI_VERSION': __version__} - apply_template( - MODEL_CMAKE_TEMPLATE_FILE, - Path(self.model_path, 'CMakeLists.txt'), - template_data - ) - - def _write_swig_files(self) -> None: - """Write SWIG interface files for this model.""" - Path(self.model_swig_path).mkdir(exist_ok=True) - template_data = {'MODELNAME': self.model_name} - apply_template( - Path(amiciSwigPath, 'modelname.template.i'), - Path(self.model_swig_path, self.model_name + '.i'), - template_data - ) - shutil.copy(SWIG_CMAKE_TEMPLATE_FILE, - Path(self.model_swig_path, 'CMakeLists.txt')) - - def _write_module_setup(self) -> None: - """ - Create a setuptools ``setup.py`` file for compile the model module. - """ - - template_data = {'MODELNAME': self.model_name, - 'AMICI_VERSION': __version__, - 'PACKAGE_VERSION': '0.1.0'} - apply_template(Path(amiciModulePath, 'setup.template.py'), - Path(self.model_path, 'setup.py'), - template_data) - apply_template(Path(amiciModulePath, 'MANIFEST.template.in'), - Path(self.model_path, 'MANIFEST.in'), {}) - # write __init__.py for the model module - Path(self.model_path, self.model_name).mkdir(exist_ok=True) - - apply_template( - Path(amiciModulePath, '__init__.template.py'), - Path(self.model_path, self.model_name, '__init__.py'), - template_data - ) - - def set_paths(self, output_dir: Optional[Union[str, Path]] = None) -> None: - """ - Set output paths for the model and create if necessary - - :param output_dir: - relative or absolute path where the generated model - code is to be placed. If ``None``, this will default to - ``amici-{self.model_name}`` in the current working directory. - will be created if it does not exist. - - """ - if output_dir is None: - output_dir = os.path.join(os.getcwd(), - f'amici-{self.model_name}') - - self.model_path = os.path.abspath(output_dir) - self.model_swig_path = os.path.join(self.model_path, 'swig') - - def set_name(self, model_name: str) -> None: - """ - Sets the model name - - :param model_name: - name of the model (may only contain upper and lower case letters, - digits and underscores, and must not start with a digit) - """ - if not is_valid_identifier(model_name): - raise ValueError( - f"'{model_name}' is not a valid model name. " - "Model name may only contain upper and lower case letters, " - "digits and underscores, and must not start with a digit.") - - self.model_name = model_name - - -class TemplateAmici(Template): - """ - Template format used in AMICI (see :class:`string.Template` for more - details). - - :cvar delimiter: - delimiter that identifies template variables - """ - delimiter = 'TPL_' - - -def apply_template(source_file: Union[str, Path], - target_file: Union[str, Path], - template_data: Dict[str, str]) -> None: - """ - Load source file, apply template substitution as provided in - templateData and save as targetFile. - - :param source_file: - relative or absolute path to template file - - :param target_file: - relative or absolute path to output file - - :param template_data: - template keywords to substitute (key is template - variable without :attr:`TemplateAmici.delimiter`) - """ - with open(source_file) as filein: - src = TemplateAmici(filein.read()) - result = src.safe_substitute(template_data) - with open(target_file, 'w') as fileout: - fileout.write(result) - - -def get_function_extern_declaration(fun: str, name: str) -> str: - """ - Constructs the extern function declaration for a given function - - :param fun: - function name - :param name: - model name - - :return: - C++ function definition string - """ - f = functions[fun] - return f'extern {f.return_type} {fun}_{name}({f.arguments});' - - -def get_sunindex_extern_declaration(fun: str, name: str, - indextype: str) -> str: - """ - Constructs the function declaration for an index function of a given - function - - :param fun: - function name - - :param name: - model name - - :param indextype: - index function {'colptrs', 'rowvals'} - - :return: - C++ function declaration string - """ - index_arg = ', int index' if fun in multiobs_functions else '' - return \ - f'extern void {fun}_{indextype}_{name}' \ - f'(SUNMatrixWrapper &{indextype}{index_arg});' - - -def get_model_override_implementation(fun: str, name: str, - nobody: bool = False) -> str: - """ - Constructs ``amici::Model::*`` override implementation for a given function - - :param fun: - function name - - :param name: - model name - - :param nobody: - whether the function has a nontrivial implementation - - :return: - C++ function implementation string - """ - impl = '{return_type} f{fun}({signature}) override {{' - - if nobody: - impl += '}}\n' - else: - impl += '\n{ind8}{fun}_{name}({eval_signature});\n{ind4}}}\n' - - func_info = functions[fun] - - return impl.format( - ind4=' ' * 4, - ind8=' ' * 8, - fun=fun, - name=name, - signature=func_info.arguments, - eval_signature=remove_argument_types(func_info.arguments), - return_type=func_info.return_type - ) - - -def get_sunindex_override_implementation(fun: str, name: str, - indextype: str, - nobody: bool = False) -> str: - """ - Constructs the ``amici::Model`` function implementation for an index - function of a given function - - :param fun: - function name - - :param name: - model name - - :param indextype: - index function {'colptrs', 'rowvals'} - - :param nobody: - whether the corresponding function has a nontrivial implementation - - :return: - C++ function implementation string - """ - index_arg = ', int index' if fun in multiobs_functions else '' - index_arg_eval = ', index' if fun in multiobs_functions else '' - - impl = 'void f{fun}_{indextype}({signature}) override {{' - - if nobody: - impl += '}}\n' - else: - impl += '{ind8}{fun}_{indextype}_{name}({eval_signature});\n{ind4}}}\n' - - return impl.format( - ind4=' ' * 4, - ind8=' ' * 8, - fun=fun, - indextype=indextype, - name=name, - signature=f'SUNMatrixWrapper &{indextype}{index_arg}', - eval_signature=f'{indextype}{index_arg_eval}', - ) - - -def remove_argument_types(signature: str) -> str: - """ - Strips argument types from a function signature - - :param signature: - function signature - - :return: - string that can be used to construct function calls with the same - variable names and ordering as in the function signature - """ - # remove * prefix for pointers (pointer must always be removed before - # values otherwise we will inadvertently dereference values, - # same applies for const specifications) - # - # always add whitespace after type definition for cosmetic reasons - known_types = [ - 'const realtype *', - 'const double *', - 'const realtype ', - 'double *', - 'realtype *', - 'const int ', - 'int ', - 'SUNMatrixContent_Sparse ', - 'gsl::span' - ] - - for type_str in known_types: - signature = signature.replace(type_str, '') - - return signature - - -def is_valid_identifier(x: str) -> bool: - """ - Check whether `x` is a valid identifier for conditions, parameters, - observables... . Identifiers may only contain upper and lower case letters, - digits and underscores, and must not start with a digit. - - :param x: - string to check - - :return: - ``True`` if valid, ``False`` otherwise - """ - - return IDENTIFIER_PATTERN.match(x) is not None - - -@contextlib.contextmanager -def _monkeypatched(obj: object, name: str, patch: Any): - """ - Temporarily monkeypatches an object. - - :param obj: - object to be patched - - :param name: - name of the attribute to be patched - - :param patch: - patched value - """ - pre_patched_value = getattr(obj, name) - setattr(obj, name, patch) - try: - yield object - finally: - setattr(obj, name, pre_patched_value) - - -def _custom_pow_eval_derivative(self, s): - """ - Custom Pow derivative that removes a removable singularity for - ``self.base == 0`` and ``self.base.diff(s) == 0``. This function is - intended to be monkeypatched into :py:method:`sympy.Pow._eval_derivative`. - - :param self: - sp.Pow class - - :param s: - variable with respect to which the derivative will be computed - """ - dbase = self.base.diff(s) - dexp = self.exp.diff(s) - part1 = sp.Pow(self.base, self.exp - 1) * self.exp * dbase - part2 = self * dexp * sp.log(self.base) - if self.base.is_nonzero or dbase.is_nonzero or part2.is_zero: - # first piece never applies or is zero anyways - return part1 + part2 - - return part1 + sp.Piecewise( - (self.base, sp.And(sp.Eq(self.base, 0), sp.Eq(dbase, 0))), - (part2, True) - ) - - -def _jacobian_element(i, j, eq_i, sym_var_j): - """Compute a single element of a jacobian""" - return (i, j), eq_i.diff(sym_var_j) - - -def _parallel_applyfunc( - obj: sp.Matrix, - func: Callable -) -> sp.Matrix: - """Parallel implementation of sympy's Matrix.applyfunc""" - if (n_procs := int(os.environ.get("AMICI_IMPORT_NPROCS", 1))) == 1: - # serial - return obj.applyfunc(func) - - # parallel - from pickle import PicklingError - from sympy.matrices.dense import DenseMatrix - from multiprocessing import get_context - # "spawn" should avoid potential deadlocks occurring with fork - # see e.g. https://stackoverflow.com/a/66113051 - ctx = get_context('spawn') - with ctx.Pool(n_procs) as p: - try: - if isinstance(obj, DenseMatrix): - return obj._new(obj.rows, obj.cols, p.map(func, obj)) - elif isinstance(obj, sp.SparseMatrix): - dok = obj.todok() - mapped = p.map(func, dok.values()) - dok = {k: v for k, v in zip(dok.keys(), mapped) if v != 0} - return obj._new(obj.rows, obj.cols, dok) - else: - raise ValueError(f"Unsupported matrix type {type(obj)}") - except PicklingError as e: - raise ValueError( - f"Couldn't pickle {func}. This is likely because the argument " - "was not a module-level function. Either rewrite the argument " - "to a module-level function or disable parallelization by " - "setting `AMICI_IMPORT_NPROCS=1`." - ) from e diff --git a/python/amici/ode_model.py b/python/amici/ode_model.py deleted file mode 100644 index a21526d27d..0000000000 --- a/python/amici/ode_model.py +++ /dev/null @@ -1,616 +0,0 @@ -"""Objects for AMICI's internal ODE model representation""" - - -import sympy as sp -import numbers - -from typing import ( - Optional, Union, Dict, SupportsFloat, Set -) - -from .import_utils import ObservableTransformation, \ - generate_measurement_symbol, generate_regularization_symbol,\ - RESERVED_SYMBOLS -from .import_utils import cast_to_sym - -__all__ = [ - 'ConservationLaw', 'Constant', 'Event', 'Expression', 'LogLikelihoodY', - 'LogLikelihoodZ', 'LogLikelihoodRZ', 'ModelQuantity', 'Observable', - 'Parameter', 'SigmaY', 'SigmaZ', 'State', 'EventObservable' -] - - -class ModelQuantity: - """ - Base class for model components - """ - def __init__(self, - identifier: sp.Symbol, - name: str, - value: Union[SupportsFloat, numbers.Number, sp.Expr]): - """ - Create a new ModelQuantity instance. - - :param identifier: - unique identifier of the quantity - - :param name: - individual name of the quantity (does not need to be unique) - - :param value: - either formula, numeric value or initial value - """ - - if not isinstance(identifier, sp.Symbol): - raise TypeError(f'identifier must be sympy.Symbol, was ' - f'{type(identifier)}') - - if str(identifier) in RESERVED_SYMBOLS or \ - (hasattr(identifier, 'name') and - identifier.name in RESERVED_SYMBOLS): - raise ValueError(f'Cannot add model quantity with name "{name}", ' - f'please rename.') - self._identifier: sp.Symbol = identifier - - if not isinstance(name, str): - raise TypeError(f'name must be str, was {type(name)}') - - self._name: str = name - - self._value: sp.Expr = cast_to_sym(value, 'value') - - def __repr__(self) -> str: - """ - Representation of the ModelQuantity object - - :return: - string representation of the ModelQuantity - """ - return str(self._identifier) - - def get_id(self) -> sp.Symbol: - """ - ModelQuantity identifier - - :return: - identifier of the ModelQuantity - """ - return self._identifier - - def get_name(self) -> str: - """ - ModelQuantity name - - :return: - name of the ModelQuantity - """ - return self._name - - def get_val(self) -> sp.Expr: - """ - ModelQuantity value - - :return: - value of the ModelQuantity - """ - return self._value - - def set_val(self, val: sp.Expr): - """ - Set ModelQuantity value - - :return: - value of the ModelQuantity - """ - self._value = cast_to_sym(val, 'value') - - -class ConservationLaw(ModelQuantity): - """ - A conservation law defines the absolute the total amount of a - (weighted) sum of states - - """ - def __init__(self, - identifier: sp.Symbol, - name: str, - value: sp.Expr, - coefficients: Dict[sp.Symbol, sp.Expr], - state_id: sp.Symbol): - """ - Create a new ConservationLaw instance. - - :param identifier: - unique identifier of the ConservationLaw - - :param name: - individual name of the ConservationLaw (does not need to be - unique) - - :param value: formula (sum of states) - - :param coefficients: - coefficients of the states in the sum - - :param state_id: - identifier of the state that this conservation law replaces - """ - self._state_expr: sp.Symbol = identifier - (value - state_id) - self._coefficients: Dict[sp.Symbol, sp.Expr] = coefficients - self._ncoeff: sp.Expr = coefficients[state_id] - super(ConservationLaw, self).__init__(identifier, name, value) - - def get_ncoeff(self, state_id) -> Union[sp.Expr, int, float]: - """ - Computes the normalized coefficient a_i/a_j where i is the index of - the provided state_id and j is the index of the state that is - replaced by this conservation law. This can be used to compute both - dtotal_cl/dx_rdata (=ncoeff) and dx_rdata/dx_solver (=-ncoeff). - - :param state_id: - identifier of the state - - :return: normalized coefficent of the state - """ - return self._coefficients.get(state_id, 0.0) / self._ncoeff - - def get_x_rdata(self): - """ - Returns the expression that allows computation of x_rdata for the state - that this conservation law replaces. - - :return: x_rdata expression - """ - return self._state_expr - - -class State(ModelQuantity): - """ - A State variable defines an entity that evolves with time according to - the provided time derivative, abbreviated by ``x``. - - :ivar _conservation_law: - algebraic formula that allows computation of this - state according to a conservation law - - :ivar _dt: - algebraic formula that defines the temporal derivative of this state - - """ - def __init__(self, - identifier: sp.Symbol, - name: str, - init: sp.Expr, - dt: sp.Expr): - """ - Create a new State instance. Extends :meth:`ModelQuantity.__init__` - by ``dt`` - - :param identifier: - unique identifier of the state - - :param name: - individual name of the state (does not need to be unique) - - :param init: - initial value - - :param dt: - time derivative - """ - super(State, self).__init__(identifier, name, init) - self._dt = cast_to_sym(dt, 'dt') - self._conservation_law: Union[ConservationLaw, None] = None - - def set_conservation_law(self, law: ConservationLaw) -> None: - """ - Sets the conservation law of a state. - - If a conservation law is set, the respective state will be replaced by - an algebraic formula according to the respective conservation law. - - :param law: - linear sum of states that if added to this state remain - constant over time - """ - if not isinstance(law, ConservationLaw): - raise TypeError(f'conservation law must have type ConservationLaw' - f', was {type(law)}') - - self._conservation_law = law - - def set_dt(self, - dt: sp.Expr) -> None: - """ - Sets the time derivative - - :param dt: - time derivative - """ - self._dt = cast_to_sym(dt, 'dt') - - def get_dt(self) -> sp.Expr: - """ - Gets the time derivative - - :return: - time derivative - """ - return self._dt - - def get_free_symbols(self) -> Set[sp.Basic]: - """ - Gets the set of free symbols in time derivative and initial conditions - - :return: - free symbols - """ - return self._dt.free_symbols.union(self._value.free_symbols) - - def has_conservation_law(self): - """ - Checks whether this state has a conservation law assigned. - - :return: True if assigned, False otherwise - """ - return self._conservation_law is not None - - def get_x_rdata(self): - """ - Returns the expression that allows computation of x_rdata for this - state, accounting for conservation laws. - - :return: x_rdata expression - """ - if self._conservation_law is None: - return self.get_id() - else: - return self._conservation_law.get_x_rdata() - - def get_dx_rdata_dx_solver(self, state_id): - """ - Returns the expression that allows computation of - ``dx_rdata_dx_solver`` for this state, accounting for conservation - laws. - - :return: dx_rdata_dx_solver expression - """ - if self._conservation_law is None: - return sp.Integer(self._identifier == state_id) - else: - return -self._conservation_law.get_ncoeff(state_id) - - -class Observable(ModelQuantity): - """ - An Observable links model simulations to experimental measurements, - abbreviated by ``y``. - - :ivar _measurement_symbol: - sympy symbol used in the objective function to represent - measurements to this observable - - :ivar trafo: - observable transformation, only applies when evaluating objective - function or residuals - """ - - _measurement_symbol: Union[sp.Symbol, None] = None - - def __init__(self, - identifier: sp.Symbol, - name: str, - value: sp.Expr, - measurement_symbol: Optional[sp.Symbol] = None, - transformation: Optional[ObservableTransformation] = 'lin'): - """ - Create a new Observable instance. - - :param identifier: - unique identifier of the Observable - - :param name: - individual name of the Observable (does not need to be unique) - - :param value: - formula - - :param transformation: - observable transformation, only applies when evaluating objective - function or residuals - """ - super(Observable, self).__init__(identifier, name, value) - self._measurement_symbol = measurement_symbol - self._regularization_symbol = None - self.trafo = transformation - - def get_measurement_symbol(self) -> sp.Symbol: - if self._measurement_symbol is None: - self._measurement_symbol = generate_measurement_symbol( - self.get_id() - ) - - return self._measurement_symbol - - def get_regularization_symbol(self) -> sp.Symbol: - if self._regularization_symbol is None: - self._regularization_symbol = generate_regularization_symbol( - self.get_id() - ) - - return self._regularization_symbol - - -class EventObservable(Observable): - """ - An Event Observable links model simulations to event related experimental - measurements, abbreviated by ``z``. - - :ivar _event: - symbolic event identifier - """ - - def __init__(self, - identifier: sp.Symbol, - name: str, - value: sp.Expr, - event: sp.Symbol, - measurement_symbol: Optional[sp.Symbol] = None, - transformation: Optional[ObservableTransformation] = 'lin',): - """ - Create a new EventObservable instance. - - :param identifier: - See :py:meth:`Observable.__init__`. - - :param name: - See :py:meth:`Observable.__init__`. - - :param value: - See :py:meth:`Observable.__init__`. - - :param transformation: - See :py:meth:`Observable.__init__`. - - :param event: - Symbolic identifier of the corresponding event. - """ - super(EventObservable, self).__init__(identifier, name, value, - measurement_symbol, - transformation) - self._event: sp.Symbol = event - - def get_event(self) -> sp.Symbol: - """ - Get the symbolic identifier of the corresponding event. - - :return: symbolic identifier - """ - return self._event - - -class Sigma(ModelQuantity): - """ - A Standard Deviation Sigma rescales the distance between simulations - and measurements when computing residuals or objective functions, - abbreviated by ``sigma{y,z}``. - """ - def __init__(self, - identifier: sp.Symbol, - name: str, - value: sp.Expr): - """ - Create a new Standard Deviation instance. - - :param identifier: - unique identifier of the Standard Deviation - - :param name: - individual name of the Standard Deviation (does not need to - be unique) - - :param value: - formula - """ - if self.__class__.__name__ == "Sigma": - raise RuntimeError( - "This class is meant to be sub-classed, not used directly." - ) - super(Sigma, self).__init__(identifier, name, value) - - -class SigmaY(Sigma): - """ - Standard deviation for observables - """ - - -class SigmaZ(Sigma): - """ - Standard deviation for event observables - """ - - -class Expression(ModelQuantity): - """ - An Expression is a recurring elements in symbolic formulas. Specifying - this may yield more compact expression which may lead to substantially - shorter model compilation times, but may also reduce model simulation time. - Abbreviated by ``w``. - """ - def __init__(self, - identifier: sp.Symbol, - name: str, - value: sp.Expr): - """ - Create a new Expression instance. - - :param identifier: - unique identifier of the Expression - - :param name: - individual name of the Expression (does not need to be unique) - - :param value: - formula - """ - super(Expression, self).__init__(identifier, name, value) - - -class Parameter(ModelQuantity): - """ - A Parameter is a free variable in the model with respect to which - sensitivities may be computed, abbreviated by ``p``. - """ - - def __init__(self, - identifier: sp.Symbol, - name: str, - value: numbers.Number): - """ - Create a new Expression instance. - - :param identifier: - unique identifier of the Parameter - - :param name: - individual name of the Parameter (does not need to be - unique) - - :param value: - numeric value - """ - super(Parameter, self).__init__(identifier, name, value) - - -class Constant(ModelQuantity): - """ - A Constant is a fixed variable in the model with respect to which - sensitivities cannot be computed, abbreviated by ``k``. - """ - - def __init__(self, - identifier: sp.Symbol, - name: str, - value: numbers.Number): - """ - Create a new Expression instance. - - :param identifier: - unique identifier of the Constant - - :param name: - individual name of the Constant (does not need to be unique) - - :param value: - numeric value - """ - super(Constant, self).__init__(identifier, name, value) - - -class LogLikelihood(ModelQuantity): - """ - A LogLikelihood defines the distance between measurements and - experiments for a particular observable. The final LogLikelihood value - in the simulation will be the sum of all specified LogLikelihood - instances evaluated at all timepoints, abbreviated by ``Jy``. - """ - - def __init__(self, - identifier: sp.Symbol, - name: str, - value: sp.Expr): - """ - Create a new Expression instance. - - :param identifier: - unique identifier of the LogLikelihood - - :param name: - individual name of the LogLikelihood (does not need to be - unique) - - :param value: - formula - """ - if self.__class__.__name__ == "LogLikelihood": - raise RuntimeError( - "This class is meant to be sub-classed, not used directly." - ) - super(LogLikelihood, self).__init__(identifier, name, value) - - -class LogLikelihoodY(LogLikelihood): - """ - Loglikelihood for observables - """ - - -class LogLikelihoodZ(LogLikelihood): - """ - Loglikelihood for event observables - """ - - -class LogLikelihoodRZ(LogLikelihood): - """ - Loglikelihood for event observables regularization - """ - - -class Event(ModelQuantity): - """ - An Event defines either a SBML event or a root of the argument of a - Heaviside function. The Heaviside functions will be tracked via the - vector ``h`` during simulation and are needed to inform the ODE solver - about a discontinuity in either the right-hand side or the states - themselves, causing a reinitialization of the solver. - """ - - def __init__(self, - identifier: sp.Symbol, - name: str, - value: sp.Expr, - state_update: Union[sp.Expr, None], - initial_value: Optional[bool] = True): - """ - Create a new Event instance. - - :param identifier: - unique identifier of the Event - - :param name: - individual name of the Event (does not need to be unique) - - :param value: - formula for the root / trigger function - - :param state_update: - formula for the bolus function (None for Heaviside functions, - zero vector for events without bolus) - - :param initial_value: - initial boolean value of the trigger function at t0. If set to - `False`, events may trigger at ``t==t0``, otherwise not. - """ - super(Event, self).__init__(identifier, name, value) - # add the Event specific components - self._state_update = state_update - self._initial_value = initial_value - - def get_initial_value(self) -> bool: - """ - Return the initial value for the root function. - - :return: - initial value formula - """ - return self._initial_value - - def __eq__(self, other): - """ - Check equality of events at the level of trigger/root functions, as we - need to collect unique root functions for ``roots.cpp`` - """ - return self.get_val() == other.get_val() and \ - (self.get_initial_value() == other.get_initial_value()) diff --git a/python/amici/pandas.py b/python/amici/pandas.py deleted file mode 100644 index ad11278630..0000000000 --- a/python/amici/pandas.py +++ /dev/null @@ -1,743 +0,0 @@ -""" -Pandas Wrappers ---------------- -This module contains convenience wrappers that allow for easy interconversion -between C++ objects from :mod:`amici.amici` and pandas DataFrames -""" - -import pandas as pd -import numpy as np -import math -import copy - -from typing import List, Union, Optional, Dict, SupportsFloat -from .numpy import ExpDataView -import amici - -__all__ = [ - 'get_expressions_as_dataframe', - 'getEdataFromDataFrame', - 'getDataObservablesAsDataFrame', - 'getSimulationObservablesAsDataFrame', - 'getSimulationStatesAsDataFrame', - 'getResidualsAsDataFrame' -] - -ExpDatas = Union[ - List[amici.amici.ExpData], List[amici.ExpDataPtr], - amici.amici.ExpData, amici.ExpDataPtr -] -ReturnDatas = Union[ - List[amici.ReturnDataView], amici.ReturnDataView -] - -AmiciModel = Union[amici.ModelPtr, amici.Model] - - -def _process_edata_list(edata_list: ExpDatas) -> List[amici.amici.ExpData]: - """ - Maps single instances of :class:`amici.amici.ExpData` to lists of - :class:`amici.amici.ExpData` - - :param edata_list: - list of instances or single instance - - :return: - list of instance(s) - """ - if isinstance(edata_list, (amici.amici.ExpData, amici.ExpDataPtr)): - return [edata_list] - else: - return edata_list - - -def _process_rdata_list(rdata_list: ReturnDatas) -> List[amici.ReturnDataView]: - """ - Maps single instances of :class:`amici.ReturnData` to lists of - :class:`amici.ReturnData` - - :param rdata_list: - list of instances or single instance - - :return: - list of instance(s) - """ - if isinstance(rdata_list, amici.ReturnDataView): - return [rdata_list] - else: - return rdata_list - - -def getDataObservablesAsDataFrame( - model: AmiciModel, - edata_list: ExpDatas, - by_id: Optional[bool] = False) -> pd.DataFrame: - """ - Write Observables from experimental data as DataFrame. - - :param model: - Model instance. - - :param edata_list: - list of ExpData instances with experimental data. - May also be a single ExpData instance. - - :param by_id: - If True, uses observable ids as column names in the generated - DataFrame, otherwise the possibly more descriptive observable names - are used. - - :return: - pandas DataFrame with conditions/timepoints as rows and observables as - columns. - """ - edata_list = _process_edata_list(edata_list) - - # list of all column names using either ids or names - cols = _get_extended_observable_cols(model, by_id=by_id) - - # aggregate records - dicts = [] - for edata in edata_list: - npdata = ExpDataView(edata) - for i_time, timepoint in enumerate(edata.getTimepoints()): - datadict = { - 'time': timepoint, - 'datatype': 'data' - } - # add observables and noises - for i_obs, obs in enumerate(_get_names_or_ids( - model, 'Observable', by_id=by_id)): - datadict[obs] = npdata['observedData'][i_time, i_obs] - datadict[obs + '_std'] = \ - npdata['observedDataStdDev'][i_time, i_obs] - - # add conditions - _fill_conditions_dict(datadict, model, edata, by_id=by_id) - - dicts.append(datadict) - - return pd.DataFrame.from_records(dicts, columns=cols) - - -def getSimulationObservablesAsDataFrame( - model: amici.Model, - edata_list: ExpDatas, - rdata_list: ReturnDatas, - by_id: Optional[bool] = False -) -> pd.DataFrame: - """ - Write Observables from simulation results as DataFrame. - - :param model: - Model instance. - - :param edata_list: - list of ExpData instances with experimental data. - May also be a single ExpData instance. - - :param rdata_list: - list of ReturnData instances corresponding to ExpData. - May also be a single ReturnData instance. - - :param by_id: - If True, ids are used as identifiers, otherwise the possibly more - descriptive names. - - :return: - pandas DataFrame with conditions/timepoints as rows and observables as - columns. - """ - edata_list = _process_edata_list(edata_list) - rdata_list = _process_rdata_list(rdata_list) - - # list of all column names using either names or ids - cols = _get_extended_observable_cols(model, by_id=by_id) - - # aggregate records - dicts = [] - for edata, rdata in zip(edata_list, rdata_list): - for i_time, timepoint in enumerate(rdata['t']): - datadict = { - 'time': timepoint, - 'datatype': 'simulation', - } - # append simulations - for i_obs, obs in enumerate(_get_names_or_ids( - model, 'Observable', by_id=by_id)): - datadict[obs] = rdata['y'][i_time, i_obs] - datadict[obs + '_std'] = rdata['sigmay'][i_time, i_obs] - - # use edata to fill conditions columns - _fill_conditions_dict(datadict, model, edata, by_id=by_id) - - # append to dataframe - dicts.append(datadict) - - return pd.DataFrame.from_records(dicts, columns=cols) - - -def getSimulationStatesAsDataFrame( - model: amici.Model, - edata_list: ExpDatas, - rdata_list: ReturnDatas, - by_id: Optional[bool] = False) -> pd.DataFrame: - """ - Get model state according to lists of ReturnData and ExpData. - - :param model: - Model instance. - - :param edata_list: - list of ExpData instances with experimental data. - May also be a single ExpData instance. - - :param rdata_list: - list of ReturnData instances corresponding to ExpData. - May also be a single ReturnData instance. - - :param by_id: - If True, ids are used as identifiers, otherwise the possibly more - descriptive names. - - :return: pandas DataFrame with conditions/timepoints as rows and - state variables as columns. - """ - edata_list = _process_edata_list(edata_list) - rdata_list = _process_rdata_list(rdata_list) - - # get conditions and state column names by name or id - cols = _get_state_cols(model, by_id=by_id) - - # aggregate records - dicts = [] - for edata, rdata in zip(edata_list, rdata_list): - for i_time, timepoint in enumerate(rdata['t']): - datadict = { - 'time': timepoint, - } - - # append states - for i_state, state in enumerate( - _get_names_or_ids(model, 'State', by_id=by_id)): - datadict[state] = rdata['x'][i_time, i_state] - - # use data to fill condition columns - _fill_conditions_dict(datadict, model, edata, by_id=by_id) - - # append to dataframe - dicts.append(datadict) - - return pd.DataFrame.from_records(dicts, columns=cols) - - -def get_expressions_as_dataframe( - model: amici.Model, - edata_list: ExpDatas, - rdata_list: ReturnDatas, - by_id: Optional[bool] = False) -> pd.DataFrame: - """ - Get values of model expressions from lists of ReturnData as DataFrame. - - :param model: - Model instance. - - :param edata_list: - list of ExpData instances with experimental data. - May also be a single ExpData instance. - - :param rdata_list: - list of ReturnData instances corresponding to ExpData. - May also be a single ReturnData instance. - - :param by_id: - If True, ids are used as identifiers, otherwise the possibly more - descriptive names. - - :return: pandas DataFrame with conditions/timepoints as rows and - model expressions as columns. - """ - edata_list = _process_edata_list(edata_list) - rdata_list = _process_rdata_list(rdata_list) - - # get conditions and state column names by name or id - cols = _get_expression_cols(model, by_id=by_id) - - # aggregate records - dicts = [] - for edata, rdata in zip(edata_list, rdata_list): - for i_time, timepoint in enumerate(rdata['t']): - datadict = { - 'time': timepoint, - } - - # append expressions - for i_expr, expr in enumerate( - _get_names_or_ids(model, 'Expression', by_id=by_id)): - datadict[expr] = rdata['w'][i_time, i_expr] - - # use data to fill condition columns - _fill_conditions_dict(datadict, model, edata, by_id=by_id) - - # append to dataframe - dicts.append(datadict) - - return pd.DataFrame.from_records(dicts, columns=cols) - - -def getResidualsAsDataFrame(model: amici.Model, - edata_list: ExpDatas, - rdata_list: ReturnDatas, - by_id: Optional[bool] = False) -> pd.DataFrame: - """ - Convert a list of ReturnData and ExpData to pandas DataFrame with - residuals. - - :param model: - Model instance. - - :param edata_list: - list of ExpData instances with experimental data. May also be a - single ExpData instance. - - :param rdata_list: - list of ReturnData instances corresponding to ExpData. May also be a - single ReturnData instance. - - :param by_id: bool, optional (default = False) - If True, ids are used as identifiers, otherwise the possibly more - descriptive names. - - :return: - pandas DataFrame with conditions and residuals. - """ - edata_list = _process_edata_list(edata_list) - rdata_list = _process_rdata_list(rdata_list) - - # create observable and simulation dataframes - df_edata = getDataObservablesAsDataFrame( - model, edata_list, by_id=by_id) - df_rdata = getSimulationObservablesAsDataFrame( - model, edata_list, rdata_list, by_id=by_id) - - # get all column names using names or ids - cols = _get_observable_cols(model, by_id=by_id) - - # aggregate records - dicts = [] - for row in df_rdata.index: - datadict = { - 'time': df_rdata.loc[row]['time'], - 't_presim': df_rdata.loc[row]['t_presim'] - } - - # iterate over observables - for obs in _get_names_or_ids(model, 'Observable', by_id=by_id): - # compute residual and append to dict - datadict[obs] = abs( - (df_edata.loc[row][obs] - df_rdata.loc[row][obs]) / - df_rdata.loc[row][obs + '_std']) - - # iterate over fixed parameters - for par in _get_names_or_ids(model, 'FixedParameter', by_id=by_id): - # fill in conditions - datadict[par] = df_rdata.loc[row][par] - datadict[par + '_preeq'] = df_rdata.loc[row][par + '_preeq'] - datadict[par + '_presim'] = df_rdata.loc[row][par + '_presim'] - - # append to dataframe - dicts.append(datadict) - - return pd.DataFrame.from_records(dicts, columns=cols) - - -def _fill_conditions_dict(datadict: Dict[str, float], - model: AmiciModel, - edata: amici.amici.ExpData, - by_id: bool) -> Dict[str, float]: - """ - Helper function that fills in condition parameters from model and - edata. - - :param datadict: - dictionary in which condition parameters will be inserted - as key value pairs. - - :param model: - Model instance. - - :param edata: - ExpData instance. - - :param by_id: - If True, ids are used as identifiers, otherwise the possibly more - descriptive names. - - :return: - dictionary with filled condition parameters. - - """ - datadict['condition_id'] = edata.id - datadict['t_presim'] = edata.t_presim - - for i_par, par in enumerate( - _get_names_or_ids(model, 'FixedParameter', by_id=by_id)): - if len(edata.fixedParameters): - datadict[par] = edata.fixedParameters[i_par] - else: - datadict[par] = model.getFixedParameters()[i_par] - - if len(edata.fixedParametersPreequilibration): - datadict[par + '_preeq'] = \ - edata.fixedParametersPreequilibration[i_par] - else: - datadict[par + '_preeq'] = np.nan - - if len(edata.fixedParametersPresimulation): - datadict[par + '_presim'] = \ - edata.fixedParametersPresimulation[i_par] - else: - datadict[par + '_presim'] = np.nan - return datadict - - -def _get_extended_observable_cols(model: AmiciModel, - by_id: bool) -> List[str]: - """ - Construction helper for extended observable dataframe headers. - - :param model: - Model instance. - - :param by_id: - If True, ids are used as identifiers, otherwise the possibly more - descriptive names. - - :return: - column names as list. - """ - return \ - ['condition_id', 'time', 'datatype', 't_presim'] + \ - _get_names_or_ids(model, 'FixedParameter', by_id=by_id) + \ - [name + '_preeq' for name in - _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ - [name + '_presim' for name in - _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ - _get_names_or_ids(model, 'Observable', by_id=by_id) + \ - [name + '_std' for name in - _get_names_or_ids(model, 'Observable', by_id=by_id)] - - -def _get_observable_cols(model: AmiciModel, - by_id: bool) -> List[str]: - """ - Construction helper for observable dataframe headers. - - :param model: - Model instance. - - :param by_id: - If True, ids are used as identifiers, otherwise the possibly more - descriptive names. - - :return: - column names as list. - """ - return \ - ['condition_id', 'time', 't_presim'] + \ - _get_names_or_ids(model, 'FixedParameter', by_id=by_id) + \ - [name + '_preeq' for name in - _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ - [name + '_presim' for name in - _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ - _get_names_or_ids(model, 'Observable', by_id=by_id) - - -def _get_state_cols(model: AmiciModel, - by_id: bool) -> List[str]: - """ - Construction helper for state dataframe headers. - - :param model: - Model instance. - - :param by_id: - If True, ids are used as identifiers, otherwise the possibly more - descriptive names. - - :return: - column names as list. - """ - return \ - ['condition_id', 'time', 't_presim'] + \ - _get_names_or_ids(model, 'FixedParameter', by_id=by_id) + \ - [name + '_preeq' for name in - _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ - [name + '_presim' for name in - _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ - _get_names_or_ids(model, 'State', by_id=by_id) - - -def _get_expression_cols(model: AmiciModel, by_id: bool) -> List[str]: - """Construction helper for expression dataframe headers. - - :param model: - Model instance. - - :param by_id: - If True, ids are used as identifiers, otherwise the possibly more - descriptive names. - - :return: - column names as list. - """ - return \ - ['condition_id', 'time', 't_presim'] + \ - _get_names_or_ids(model, 'FixedParameter', by_id=by_id) + \ - [name + '_preeq' for name in - _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ - [name + '_presim' for name in - _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ - _get_names_or_ids(model, 'Expression', by_id=by_id) - - -def _get_names_or_ids(model: AmiciModel, - variable: str, - by_id: bool) -> List[str]: - """ - Obtains a unique list of identifiers for the specified variable. - First tries model.getVariableNames and then uses model.getVariableIds. - - :param model: - Model instance. - - :param variable: - variable name. - - :param by_id: - If True, ids are used as identifiers, otherwise first the possibly - more descriptive names are used. - - :return: - column names as list. - """ - # check whether variable type permitted - variable_options = [ - 'Parameter', 'FixedParameter', 'Observable', 'State', 'Expression' - ] - if variable not in variable_options: - raise ValueError('Variable must be in ' + str(variable_options)) - - # extract attributes - names = list(getattr(model, f'get{variable}Names')()) - ids = list(getattr(model, f'get{variable}Ids')()) - - # find out if model has names and ids - has_names = getattr(model, f'has{variable}Names')() - has_ids = getattr(model, f'has{variable}Ids')() - - # extract labels - if not by_id and has_names and len(set(names)) == len(names): - # use variable names - return names - elif has_ids: - # use variable ids - return ids - else: - # unable to create unique labels - if by_id: - msg = f"Model {variable} ids are not set." - else: - msg = f"Model {variable} names are not unique and " \ - f"{variable} ids are not set." - raise ValueError(msg) - - -def _get_specialized_fixed_parameters( - model: AmiciModel, - condition: Union[Dict[str, SupportsFloat], pd.Series], - overwrite: Union[Dict[str, SupportsFloat], pd.Series], - by_id: bool -) -> List[float]: - """ - Copies values in condition and overwrites them according to key - value pairs specified in overwrite. - - :param model: - Model instance. - :param condition: - fixedParameter values. - :param overwrite: - dict specifying which values in condition are to be replaced. - :param by_id: - bool - If True, ids are used as identifiers, otherwise the possibly more - descriptive names. - - :return: - overwritten FixedParameter as list. - """ - cond = copy.deepcopy(condition) - for field in overwrite: - cond[field] = overwrite[field] - return [float(cond[name]) for name in _get_names_or_ids( - model, 'FixedParameter', by_id=by_id)] - - -def constructEdataFromDataFrame( - df: pd.DataFrame, - model: AmiciModel, - condition: pd.Series, - by_id: Optional[bool] = False -) -> amici.amici.ExpData: - """ - Constructs an ExpData instance according to the provided Model - and DataFrame. - - :param df: - pd.DataFrame with Observable Names/Ids as columns. - Standard deviations may be specified by appending '_std' as suffix. - - :param model: - Model instance. - - :param condition: - pd.Series with FixedParameter Names/Ids as columns. - Preequilibration conditions may be specified by appending - '_preeq' as suffix. Presimulation conditions may be specified by - appending '_presim' as suffix. - - :param by_id: - Indicate whether in the arguments, column headers are based on ids or - names. This should correspond to the way `df` and `condition` was - created in the first place. - - :return: - ExpData instance. - """ - # initialize edata - edata = amici.ExpData(model.get()) - - # timepoints - df = df.sort_values(by='time', ascending=True) - edata.setTimepoints(df['time'].values.astype(float)) - - # get fixed parameters from condition - overwrite_preeq = {} - overwrite_presim = {} - for par in list(_get_names_or_ids(model, 'FixedParameter', by_id=by_id)): - if par + '_preeq' in condition.keys() \ - and not math.isnan(condition[par + '_preeq'].astype(float)): - overwrite_preeq[par] = condition[par + '_preeq'].astype(float) - if par + '_presim' in condition.keys() \ - and not math.isnan(condition[par + '_presim'].astype(float)): - overwrite_presim[par] = condition[par + '_presim'].astype(float) - - # fill in fixed parameters - edata.fixedParameters = condition[ - _get_names_or_ids(model, 'FixedParameter', by_id=by_id) - ].astype(float).values - - # fill in preequilibration parameters - if any([overwrite_preeq[key] != condition[key] for key in - overwrite_preeq]): - edata.fixedParametersPreequilibration = \ - _get_specialized_fixed_parameters( - model, condition, overwrite_preeq, by_id=by_id) - elif len(overwrite_preeq): - edata.fixedParametersPreequilibration = copy.deepcopy( - edata.fixedParameters - ) - - # fill in presimulation parameters - if any([overwrite_presim[key] != condition[key] for key in - overwrite_presim.keys()]): - edata.fixedParametersPresimulation = _get_specialized_fixed_parameters( - model, condition, overwrite_presim, by_id=by_id - ) - elif len(overwrite_presim.keys()): - edata.fixedParametersPresimulation = copy.deepcopy( - edata.fixedParameters - ) - - # fill in presimulation time - if 't_presim' in condition.keys(): - edata.t_presim = float(condition['t_presim']) - - # fill in data and stds - for obs_index, obs in enumerate( - _get_names_or_ids(model, 'Observable', by_id=by_id)): - if obs in df.keys(): - edata.setObservedData(df[obs].values.astype(float), obs_index) - if obs + '_std' in df.keys(): - edata.setObservedDataStdDev( - df[obs + '_std'].values.astype(float), obs_index - ) - - return edata - - -def getEdataFromDataFrame( - model: AmiciModel, - df: pd.DataFrame, - by_id: Optional[bool] = False -) -> List[amici.amici.ExpData]: - """ - Constructs a ExpData instances according to the provided Model and - DataFrame. - - :param df: - dataframe with Observable Names/Ids, FixedParameter Names/Ids - and time as columns. Standard deviations may be specified by - appending '_std' as suffix. Preequilibration fixedParameters may be - specified by appending '_preeq' as suffix. Presimulation - fixedParameters may be specified by appending '_presim' as suffix. - Presimulation time may be specified as 't_presim' column. - - :param model: - Model instance. - - :param by_id: - Whether the column names in `df` are based on ids or names, - corresponding to how the dataframe was created in the first place. - - :return: - list of ExpData instances. - """ - edata_list = [] - - # aggregate features that define a condition - - # fixed parameters - condition_parameters = _get_names_or_ids(model, 'FixedParameter', - by_id=by_id) - # preeq and presim parameters - for par in _get_names_or_ids(model, 'FixedParameter', by_id=by_id): - if par + '_preeq' in df.columns: - condition_parameters.append(par + '_preeq') - if par + '_presim' in df.columns: - condition_parameters.append(par + '_presim') - # presimulation time - if 't_presim' in df.columns: - condition_parameters.append('t_presim') - # drop duplicates to create final conditions - conditions = df[condition_parameters].drop_duplicates() - - # iterate over conditions - for ir, row in conditions.iterrows(): - # subselect rows that match condition - selected = np.ones((len(df),), dtype=bool) - for par_label, par in row.iteritems(): - if math.isnan(par): - selected = selected & np.isnan( - df[par_label].astype(float).values - ) - else: - selected = selected & (df[par_label] == par) - edata_df = df[selected] - - edata_list.append( - constructEdataFromDataFrame(edata_df, model, row, by_id=by_id) - ) - - return edata_list diff --git a/python/amici/parameter_mapping.py b/python/amici/parameter_mapping.py deleted file mode 100644 index ed3eaa2b08..0000000000 --- a/python/amici/parameter_mapping.py +++ /dev/null @@ -1,427 +0,0 @@ -""" -Parameter mapping ------------------ - -When performing parameter inference, often parameters need to be mapped from -simulation to estimation parameters, and parameters can differ between -conditions. This can be handled using the `ParameterMapping`. - -Note -~~~~ - -While the parameter mapping can be used directly with AMICI, it was developed -for usage together with PEtab, for which the whole workflow of generating -the mapping is automatized. -""" -from __future__ import annotations - -import numbers -import warnings -from typing import Any, Dict, List, Union, Set -from collections.abc import Sequence -from itertools import chain - -import amici -import numpy as np -from petab.C import * # noqa: F403 - - -SingleParameterMapping = Dict[str, Union[numbers.Number, str]] -SingleScaleMapping = Dict[str, str] -AmiciModel = Union[amici.Model, amici.ModelPtr] - - -class ParameterMappingForCondition: - """Parameter mapping for condition. - - Contains mappings for free parameters, fixed parameters, and fixed - preequilibration parameters, both for parameters and scales. - - In the scale mappings, for each simulation parameter the scale - on which the value is passed (and potentially gradients are to be - returned) is given. In the parameter mappings, for each simulation - parameter a corresponding optimization parameter (or a numeric value) - is given. - - If a mapping is not passed, the parameter mappings are assumed to be empty, - and if a scale mapping is not passed, all scales are set to linear. - - :param map_sim_var: - Mapping for free simulation parameters. - :param scale_map_sim_var: - Scales for free simulation parameters. - :param map_preeq_fix: - Mapping for fixed preequilibration parameters. - :param scale_map_preeq_fix: - Scales for fixed preequilibration parameters. - :param map_sim_fix: - Mapping for fixed simulation parameters. - :param scale_map_sim_fix: - Scales for fixed simulation parameters. - """ - - def __init__( - self, - map_sim_var: SingleParameterMapping = None, - scale_map_sim_var: SingleScaleMapping = None, - map_preeq_fix: SingleParameterMapping = None, - scale_map_preeq_fix: SingleScaleMapping = None, - map_sim_fix: SingleParameterMapping = None, - scale_map_sim_fix: SingleScaleMapping = None, - ): - if map_sim_var is None: - map_sim_var = {} - self.map_sim_var = map_sim_var - - if scale_map_sim_var is None: - scale_map_sim_var = {key: LIN for key in map_sim_var} - self.scale_map_sim_var = scale_map_sim_var - - if map_preeq_fix is None: - map_preeq_fix = {} - self.map_preeq_fix = map_preeq_fix - - if scale_map_preeq_fix is None: - scale_map_preeq_fix = {key: LIN for key in map_preeq_fix} - self.scale_map_preeq_fix = scale_map_preeq_fix - - if map_sim_fix is None: - map_sim_fix = {} - self.map_sim_fix = map_sim_fix - - if scale_map_sim_fix is None: - scale_map_sim_fix = {key: LIN for key in map_sim_fix} - self.scale_map_sim_fix = scale_map_sim_fix - - def __repr__(self): - return (f"{self.__class__.__name__}(" - f"map_sim_var={repr(self.map_sim_var)}," - f"scale_map_sim_var={repr(self.scale_map_sim_var)}," - f"map_preeq_fix={repr(self.map_preeq_fix)}," - f"scale_map_preeq_fix={repr(self.scale_map_preeq_fix)}," - f"map_sim_fix={repr(self.map_sim_fix)}," - f"scale_map_sim_fix={repr(self.scale_map_sim_fix)})") - - @property - def free_symbols(self) -> Set[str]: - """Get IDs of all (symbolic) parameters present in this mapping""" - return { - p for p in chain( - self.map_sim_var.values(), - self.map_preeq_fix.values(), - self.map_sim_fix.values() - ) - if isinstance(p, str) - } - - -class ParameterMapping(Sequence): - r"""Parameter mapping for multiple conditions. - - This can be used like a list of :class:`ParameterMappingForCondition`\ s. - - :param parameter_mappings: - List of parameter mappings for specific conditions. - """ - - def __init__( - self, - parameter_mappings: List[ParameterMappingForCondition] = None - ): - super().__init__() - if parameter_mappings is None: - parameter_mappings = [] - self.parameter_mappings = parameter_mappings - - def __iter__(self): - yield from self.parameter_mappings - - def __getitem__( - self, item - ) -> Union[ParameterMapping, ParameterMappingForCondition]: - result = self.parameter_mappings[item] - if isinstance(result, ParameterMappingForCondition): - return result - return ParameterMapping(result) - - def __len__(self): - return len(self.parameter_mappings) - - def append( - self, - parameter_mapping_for_condition: ParameterMappingForCondition - ): - """Append a condition specific parameter mapping.""" - self.parameter_mappings.append(parameter_mapping_for_condition) - - def __repr__(self): - return f"{self.__class__.__name__}({repr(self.parameter_mappings)})" - - @property - def free_symbols(self) -> Set[str]: - """Get IDs of all (symbolic) parameters present in this mapping""" - return set.union(*(mapping.free_symbols for mapping in self)) - - -def fill_in_parameters( - edatas: List[amici.ExpData], - problem_parameters: Dict[str, numbers.Number], - scaled_parameters: bool, - parameter_mapping: ParameterMapping, - amici_model: AmiciModel -) -> None: - """Fill fixed and dynamic parameters into the edatas (in-place). - - :param edatas: - List of experimental datas :class:`amici.amici.ExpData` with - everything except parameters filled. - :param problem_parameters: - Problem parameters as parameterId=>value dict. Only - parameters included here will be set. Remaining parameters will - be used as currently set in `amici_model`. - :param scaled_parameters: - If True, problem_parameters are assumed to be on the scale provided - in the parameter mapping. If False, they are assumed - to be in linear scale. - :param parameter_mapping: - Parameter mapping for all conditions. - :param amici_model: - AMICI model. - """ - if unused_parameters := (set(problem_parameters.keys()) - - parameter_mapping.free_symbols): - warnings.warn("The following problem parameters were not used: " - + str(unused_parameters), RuntimeWarning) - - for edata, mapping_for_condition in zip(edatas, parameter_mapping): - fill_in_parameters_for_condition( - edata, problem_parameters, scaled_parameters, - mapping_for_condition, amici_model) - - -def fill_in_parameters_for_condition( - edata: amici.ExpData, - problem_parameters: Dict[str, numbers.Number], - scaled_parameters: bool, - parameter_mapping: ParameterMappingForCondition, - amici_model: AmiciModel) -> None: - """Fill fixed and dynamic parameters into the edata for condition - (in-place). - - :param edata: - Experimental data object to fill parameters into. - :param problem_parameters: - Problem parameters as parameterId=>value dict. Only - parameters included here will be set. Remaining parameters will - be used as already set in `amici_model` and `edata`. - :param scaled_parameters: - If True, problem_parameters are assumed to be on the scale provided - in the parameter mapping. If False, they - are assumed to be in linear scale. - :param parameter_mapping: - Parameter mapping for current condition. - :param amici_model: - AMICI model - """ - map_sim_var = parameter_mapping.map_sim_var - scale_map_sim_var = parameter_mapping.scale_map_sim_var - map_preeq_fix = parameter_mapping.map_preeq_fix - scale_map_preeq_fix = parameter_mapping.scale_map_preeq_fix - map_sim_fix = parameter_mapping.map_sim_fix - scale_map_sim_fix = parameter_mapping.scale_map_sim_fix - - # Parameter mapping may contain parameter_ids as values, these *must* - # be replaced - - def _get_par(model_par, value, mapping): - """Replace parameter IDs in mapping dicts by values from - problem_parameters where necessary""" - if isinstance(value, str): - try: - # estimated parameter - return problem_parameters[value] - except KeyError: - # condition table overrides must have been handled already, - # e.g. by the PEtab parameter mapping, but parameters from - # InitialAssignments may still be present. - return _get_par(value, mapping[value], mapping) - if model_par in problem_parameters: - # user-provided - return problem_parameters[model_par] - # prevent nan-propagation in derivative - if np.isnan(value): - return 0.0 - # constant value - return value - - map_preeq_fix = {key: _get_par(key, val, map_preeq_fix) - for key, val in map_preeq_fix.items()} - map_sim_fix = {key: _get_par(key, val, map_sim_fix) - for key, val in map_sim_fix.items()} - map_sim_var = {key: _get_par(key, val, dict(map_sim_fix, **map_sim_var)) - for key, val in map_sim_var.items()} - - # If necessary, (un)scale parameters - if scaled_parameters: - unscale_parameters_dict(map_preeq_fix, scale_map_preeq_fix) - unscale_parameters_dict(map_sim_fix, scale_map_sim_fix) - if not scaled_parameters: - # We scale all parameters to the scale they are estimated on, and pass - # that information to amici via edata.{parameters,pscale}. - # The scaling is necessary to obtain correct derivatives. - scale_parameters_dict(map_sim_var, scale_map_sim_var) - # We can skip preequilibration parameters, because they are identical - # with simulation parameters, and only the latter are used from here - # on. - - ########################################################################## - # variable parameters and parameter scale - - # parameter list from mapping dict - parameters = [map_sim_var[par_id] - for par_id in amici_model.getParameterIds()] - - # scales list from mapping dict - scales = [petab_to_amici_scale(scale_map_sim_var[par_id]) - for par_id in amici_model.getParameterIds()] - - # plist - plist = [ - ip for ip, par_id in enumerate(amici_model.getParameterIds()) - if isinstance(parameter_mapping.map_sim_var[par_id], str) - ] - - if parameters: - edata.parameters = np.asarray(parameters, dtype=float) - - if scales: - edata.pscale = amici.parameterScalingFromIntVector(scales) - - if plist: - edata.plist = plist - - ########################################################################## - # fixed parameters preequilibration - if map_preeq_fix: - fixed_pars_preeq = [map_preeq_fix[par_id] - for par_id in amici_model.getFixedParameterIds()] - edata.fixedParametersPreequilibration = fixed_pars_preeq - - ########################################################################## - # fixed parameters simulation - if map_sim_fix: - fixed_pars_sim = [map_sim_fix[par_id] - for par_id in amici_model.getFixedParameterIds()] - edata.fixedParameters = fixed_pars_sim - - -def petab_to_amici_scale(petab_scale: str) -> int: - """Convert petab scale id to amici scale id.""" - if petab_scale == LIN: - return amici.ParameterScaling_none - if petab_scale == LOG10: - return amici.ParameterScaling_log10 - if petab_scale == LOG: - return amici.ParameterScaling_ln - raise ValueError(f"PEtab scale not recognized: {petab_scale}") - - -def amici_to_petab_scale(amici_scale: int) -> str: - """Convert amici scale id to petab scale id.""" - if amici_scale == amici.ParameterScaling_none: - return LIN - if amici_scale == amici.ParameterScaling_log10: - return LOG10 - if amici_scale == amici.ParameterScaling_ln: - return LOG - raise ValueError(f"AMICI scale not recognized: {amici_scale}") - - -def scale_parameter(value: numbers.Number, - petab_scale: str) -> numbers.Number: - """Bring parameter from linear scale to target scale. - - :param value: - Value to scale - :param petab_scale: - Target scale of ``value`` - - :return: - ``value`` on target scale - """ - if petab_scale == LIN: - return value - if petab_scale == LOG10: - return np.log10(value) - if petab_scale == LOG: - return np.log(value) - raise ValueError(f"Unknown parameter scale {petab_scale}. " - f"Must be from {(LIN, LOG, LOG10)}") - - -def unscale_parameter(value: numbers.Number, - petab_scale: str) -> numbers.Number: - """Bring parameter from scale to linear scale. - - :param value: - Value to scale - :param petab_scale: - Target scale of ``value`` - - :return: - ``value`` on linear scale - """ - if petab_scale == LIN: - return value - if petab_scale == LOG10: - return np.power(10, value) - if petab_scale == LOG: - return np.exp(value) - raise ValueError(f"Unknown parameter scale {petab_scale}. " - f"Must be from {(LIN, LOG, LOG10)}") - - -def scale_parameters_dict( - value_dict: Dict[Any, numbers.Number], - petab_scale_dict: Dict[Any, str]) -> None: - """ - Bring parameters from linear scale to target scale. - - Bring values in ``value_dict`` from linear scale to the scale - provided in ``petab_scale_dict`` (in-place). - Both arguments are expected to have the same length and matching keys. - - :param value_dict: - Values to scale - - :param petab_scale_dict: - Target scales of ``values`` - """ - if value_dict.keys() != petab_scale_dict.keys(): - raise AssertionError("Keys don't match.") - - for key, value in value_dict.items(): - value_dict[key] = scale_parameter(value, petab_scale_dict[key]) - - -def unscale_parameters_dict( - value_dict: Dict[Any, numbers.Number], - petab_scale_dict: Dict[Any, str]) -> None: - """ - Bring parameters from target scale to linear scale. - - Bring values in ``value_dict`` from linear scale to the scale - provided in ``petab_scale_dict`` (in-place). - Both arguments are expected to have the same length and matching keys. - - :param value_dict: - Values to scale - - :param petab_scale_dict: - Target scales of ``values`` - """ - if value_dict.keys() != petab_scale_dict.keys(): - raise AssertionError("Keys don't match.") - - for key, value in value_dict.items(): - value_dict[key] = unscale_parameter(value, petab_scale_dict[key]) diff --git a/python/amici/petab_import.py b/python/amici/petab_import.py deleted file mode 100644 index 61909340c3..0000000000 --- a/python/amici/petab_import.py +++ /dev/null @@ -1,830 +0,0 @@ -""" -PEtab Import ------------- -Import a model in the :mod:`petab` (https://github.com/PEtab-dev/PEtab) format -into AMICI. -""" -import argparse -import importlib -import logging -import os -import re -import shutil -import tempfile -from _collections import OrderedDict -from itertools import chain -from pathlib import Path -from typing import Dict, List, Optional, Tuple, Union -from warnings import warn - -import libsbml -import pandas as pd -import petab -import sympy as sp -from petab.C import * -from petab.parameters import get_valid_parameters_for_parameter_table - -import amici -from amici.logging import get_logger, log_execution_time, set_log_level - -try: - from amici.petab_import_pysb import PysbPetabProblem, import_model_pysb -except ModuleNotFoundError: - # pysb not available - PysbPetabProblem = None - import_model_pysb = None - -logger = get_logger(__name__, logging.WARNING) - -# ID of model parameter that is to be added to SBML model to indicate -# preequilibration -PREEQ_INDICATOR_ID = 'preequilibration_indicator' - - -def _add_global_parameter(sbml_model: libsbml.Model, - parameter_id: str, - parameter_name: str = None, - constant: bool = False, - units: str = 'dimensionless', - value: float = 0.0) -> libsbml.Parameter: - """Add new global parameter to SBML model - - Arguments: - sbml_model: SBML model - parameter_id: ID of the new parameter - parameter_name: Name of the new parameter - constant: Is parameter constant? - units: SBML unit ID - value: parameter value - - Returns: - The created parameter - """ - if parameter_name is None: - parameter_name = parameter_id - - p = sbml_model.createParameter() - p.setId(parameter_id) - p.setName(parameter_name) - p.setConstant(constant) - p.setValue(value) - p.setUnits(units) - return p - - -def get_fixed_parameters( - petab_problem: petab.Problem -) -> List[str]: - """ - Determine, set and return fixed model parameters. - - Non-estimated parameters and parameters specified in the condition table - are turned into constants (unless they are overridden). - Only global SBML parameters are considered. Local parameters are ignored. - - :param petab_problem: - The PEtab problem instance - - :return: - List of IDs of parameters which are to be considered constant. - """ - # initial concentrations for species or initial compartment sizes in - # condition table will need to be turned into fixed parameters - - # if there is no initial assignment for that species, we'd need - # to create one. to avoid any naming collision right away, we don't - # allow that for now - - # we can't handle them yet - compartments = [ - col for col in petab_problem.condition_df - if petab_problem.sbml_model.getCompartment(col) is not None - ] - if compartments: - raise NotImplementedError("Can't handle initial compartment sizes " - "at the moment. Consider creating an " - f"initial assignment for {compartments}") - - # if we have a parameter table, all parameters that are allowed to be - # listed in the parameter table, but are not marked as estimated, can be - # turned in to AMICI constants - # due to legacy API, we might not always have a parameter table, though - fixed_parameters = set() - if petab_problem.parameter_df is not None: - all_parameters = get_valid_parameters_for_parameter_table( - model=petab_problem.model, - condition_df=petab_problem.condition_df, - observable_df=petab_problem.observable_df - if petab_problem.observable_df is not None - else pd.DataFrame(columns=petab.OBSERVABLE_DF_REQUIRED_COLS), - measurement_df=petab_problem.measurement_df - if petab_problem.measurement_df is not None - else pd.DataFrame(columns=petab.MEASUREMENT_DF_REQUIRED_COLS), - ) - estimated_parameters = petab_problem.parameter_df.index.values[ - petab_problem.parameter_df[ESTIMATE] == 1] - fixed_parameters = set(all_parameters) - set(estimated_parameters) - - sbml_model = petab_problem.sbml_model - condition_df = petab_problem.condition_df - - # Column names are model parameter IDs, compartment IDs or species IDs. - # Thereof, all parameters except for any overridden ones should be made - # constant. - # (Could potentially still be made constant, but leaving them might - # increase model reusability) - - # handle parameters in condition table - if condition_df is not None: - logger.debug(f'Condition table: {condition_df.shape}') - - # remove overridden parameters (`object`-type columns) - fixed_parameters.update( - p for p in condition_df.columns - # get rid of conditionName column - if p != CONDITION_NAME - # there is no parametric override - # TODO: could check if the final overriding parameter is estimated - # or not, but for now, we skip the parameter if there is any kind - # of overriding - if condition_df[p].dtype != 'O' - # p is a parameter - and sbml_model.getParameter(p) is not None - # but not a rule target - and sbml_model.getRuleByVariable(p) is None - ) - - # Ensure mentioned parameters exist in the model. Remove additional ones - # from list - for fixed_parameter in fixed_parameters.copy(): - # check global parameters - if not sbml_model.getParameter(fixed_parameter): - logger.warning(f"Parameter or species '{fixed_parameter}'" - " provided in condition table but not present in" - " model. Ignoring.") - fixed_parameters.remove(fixed_parameter) - - return list(sorted(fixed_parameters)) - - -def species_to_parameters(species_ids: List[str], - sbml_model: 'libsbml.Model') -> List[str]: - """ - Turn a SBML species into parameters and replace species references - inside the model instance. - - :param species_ids: - List of SBML species ID to convert to parameters with the same ID as - the replaced species. - - :param sbml_model: - SBML model to modify - - :return: - List of IDs of species which have been converted to parameters - """ - transformables = [] - - for species_id in species_ids: - species = sbml_model.getSpecies(species_id) - - if species.getHasOnlySubstanceUnits(): - logger.warning( - f"Ignoring {species.getId()} which has only substance units." - " Conversion not yet implemented.") - continue - - if math.isnan(species.getInitialConcentration()): - logger.warning( - f"Ignoring {species.getId()} which has no initial " - "concentration. Amount conversion not yet implemented.") - continue - - transformables.append(species_id) - - # Must not remove species while iterating over getListOfSpecies() - for species_id in transformables: - species = sbml_model.removeSpecies(species_id) - par = sbml_model.createParameter() - par.setId(species.getId()) - par.setName(species.getName()) - par.setConstant(True) - par.setValue(species.getInitialConcentration()) - par.setUnits(species.getUnits()) - - # Remove from reactants and products - for reaction in sbml_model.getListOfReactions(): - for species_id in transformables: - # loop, since removeX only removes one instance - while reaction.removeReactant(species_id): - # remove from reactants - pass - while reaction.removeProduct(species_id): - # remove from products - pass - while reaction.removeModifier(species_id): - # remove from modifiers - pass - - return transformables - - -def import_petab_problem( - petab_problem: petab.Problem, - model_output_dir: Union[str, Path, None] = None, - model_name: str = None, - force_compile: bool = False, - **kwargs) -> 'amici.Model': - """ - Import model from petab problem. - - :param petab_problem: - A petab problem containing all relevant information on the model. - - :param model_output_dir: - Directory to write the model code to. Will be created if doesn't - exist. Defaults to current directory. - - :param model_name: - Name of the generated model. If model file name was provided, - this defaults to the file name without extension, otherwise - the model ID will be used. - - :param force_compile: - Whether to compile the model even if the target folder is not empty, - or the model exists already. - - :param kwargs: - Additional keyword arguments to be passed to - :meth:`amici.sbml_import.SbmlImporter.sbml2amici`. - - :return: - The imported model. - """ - # generate folder and model name if necessary - if model_output_dir is None: - if PysbPetabProblem and isinstance(petab_problem, PysbPetabProblem): - raise ValueError("Parameter `model_output_dir` is required.") - - model_output_dir = \ - _create_model_output_dir_name(petab_problem.sbml_model) - else: - model_output_dir = os.path.abspath(model_output_dir) - - if PysbPetabProblem and isinstance(petab_problem, PysbPetabProblem) \ - and model_name is None: - model_name = petab_problem.pysb_model.name - elif model_name is None: - model_name = _create_model_name(model_output_dir) - - # create folder - if not os.path.exists(model_output_dir): - os.makedirs(model_output_dir) - - # check if compilation necessary - if force_compile or not _can_import_model(model_name, model_output_dir): - # check if folder exists - if os.listdir(model_output_dir) and not force_compile: - raise ValueError( - f"Cannot compile to {model_output_dir}: not empty. " - "Please assign a different target or set `force_compile`.") - - # remove folder if exists - if os.path.exists(model_output_dir): - shutil.rmtree(model_output_dir) - - logger.info(f"Compiling model {model_name} to {model_output_dir}.") - # compile the model - if PysbPetabProblem and isinstance(petab_problem, PysbPetabProblem): - import_model_pysb( - petab_problem, - model_name=model_name, - model_output_dir=model_output_dir, - **kwargs) - else: - import_model_sbml( - petab_problem=petab_problem, - model_name=model_name, - model_output_dir=model_output_dir, - **kwargs) - - # import model - model_module = amici.import_model_module(model_name, model_output_dir) - model = model_module.getModel() - - logger.info(f"Successfully loaded model {model_name} " - f"from {model_output_dir}.") - - return model - - -def _create_model_output_dir_name(sbml_model: 'libsbml.Model') -> Path: - """ - Find a folder for storing the compiled amici model. - If possible, use the sbml model id, otherwise create a random folder. - The folder will be located in the `amici_models` subfolder of the current - folder. - """ - BASE_DIR = Path("amici_models").absolute() - BASE_DIR.mkdir(exist_ok=True) - # try sbml model id - if sbml_model_id := sbml_model.getId(): - return BASE_DIR / sbml_model_id - - # create random folder name - return Path(tempfile.mkdtemp(dir=BASE_DIR)) - - -def _create_model_name(folder: Union[str, Path]) -> str: - """ - Create a name for the model. - Just re-use the last part of the folder. - """ - return os.path.split(os.path.normpath(folder))[-1] - - -def _can_import_model( - model_name: str, - model_output_dir: Union[str, Path] -) -> bool: - """ - Check whether a module of that name can already be imported. - """ - # try to import (in particular checks version) - try: - with amici.add_path(model_output_dir): - model_module = importlib.import_module(model_name) - except ModuleNotFoundError: - return False - - # no need to (re-)compile - return hasattr(model_module, "getModel") - - -@log_execution_time('Importing PEtab model', logger) -def import_model_sbml( - sbml_model: Union[str, Path, 'libsbml.Model'] = None, - condition_table: Optional[Union[str, Path, pd.DataFrame]] = None, - observable_table: Optional[Union[str, Path, pd.DataFrame]] = None, - measurement_table: Optional[Union[str, Path, pd.DataFrame]] = None, - petab_problem: petab.Problem = None, - model_name: Optional[str] = None, - model_output_dir: Optional[Union[str, Path]] = None, - verbose: Optional[Union[bool, int]] = True, - allow_reinit_fixpar_initcond: bool = True, - validate: bool = True, - **kwargs) -> amici.SbmlImporter: - """ - Create AMICI model from PEtab problem - - :param sbml_model: - PEtab SBML model or SBML file name. - Deprecated, pass ``petab_problem`` instead. - - :param condition_table: - PEtab condition table. If provided, parameters from there will be - turned into AMICI constant parameters (i.e. parameters w.r.t. which - no sensitivities will be computed). - Deprecated, pass ``petab_problem`` instead. - - :param observable_table: - PEtab observable table. Deprecated, pass ``petab_problem`` instead. - - :param measurement_table: - PEtab measurement table. Deprecated, pass ``petab_problem`` instead. - - :param petab_problem: - PEtab problem. - - :param model_name: - Name of the generated model. If model file name was provided, - this defaults to the file name without extension, otherwise - the SBML model ID will be used. - - :param model_output_dir: - Directory to write the model code to. Will be created if doesn't - exist. Defaults to current directory. - - :param verbose: - Print/log extra information. - - :param allow_reinit_fixpar_initcond: - See :class:`amici.ode_export.ODEExporter`. Must be enabled if initial - states are to be reset after preequilibration. - - :param validate: - Whether to validate the PEtab problem - - :param kwargs: - Additional keyword arguments to be passed to - :meth:`amici.sbml_import.SbmlImporter.sbml2amici`. - - :return: - The created :class:`amici.sbml_import.SbmlImporter` instance. - """ - from petab.models.sbml_model import SbmlModel - - set_log_level(logger, verbose) - - logger.info("Importing model ...") - - if any([sbml_model, condition_table, observable_table, measurement_table]): - warn("The `sbml_model`, `condition_table`, `observable_table`, and " - "`measurement_table` arguments are deprecated and will be " - "removed in a future version. Use `petab_problem` instead.", - DeprecationWarning, stacklevel=2) - if petab_problem: - raise ValueError("Must not pass a `petab_problem` argument in " - "combination with any of `sbml_model`, " - "`condition_table`, `observable_table`, or " - "`measurement_table`.") - - petab_problem = petab.Problem( - model=SbmlModel(sbml_model) - if isinstance(sbml_model, libsbml.Model) - else SbmlModel.from_file(sbml_model), - condition_df=petab.get_condition_df(condition_table), - observable_df=petab.get_observable_df(observable_table), - ) - - if petab_problem.observable_df is None: - raise NotImplementedError("PEtab import without observables table " - "is currently not supported.") - - assert isinstance(petab_problem.model, SbmlModel) - - if validate: - logger.info("Validating PEtab problem ...") - petab.lint_problem(petab_problem) - - # Model name from SBML ID or filename - if model_name is None: - if not (model_name := petab_problem.model.sbml_model.getId()): - if not isinstance(sbml_model, (str, Path)): - raise ValueError("No `model_name` was provided and no model " - "ID was specified in the SBML model.") - model_name = os.path.splitext(os.path.split(sbml_model)[-1])[0] - - if model_output_dir is None: - model_output_dir = os.path.join( - os.getcwd(), f"{model_name}-amici{amici.__version__}" - ) - - logger.info(f"Model name is '{model_name}'.\n" - f"Writing model code to '{model_output_dir}'.") - - # Create a copy, because it will be modified by SbmlImporter - sbml_doc = petab_problem.model.sbml_model.getSBMLDocument().clone() - sbml_model = sbml_doc.getModel() - - show_model_info(sbml_model) - - sbml_importer = amici.SbmlImporter(sbml_model) - sbml_model = sbml_importer.sbml - - allow_n_noise_pars = \ - not petab.lint.observable_table_has_nontrivial_noise_formula( - petab_problem.observable_df - ) - if petab_problem.measurement_df is not None and \ - petab.lint.measurement_table_has_timepoint_specific_mappings( - petab_problem.measurement_df, - allow_scalar_numeric_noise_parameters=allow_n_noise_pars - ): - raise ValueError( - 'AMICI does not support importing models with timepoint specific ' - 'mappings for noise or observable parameters. Please flatten ' - 'the problem and try again.' - ) - - if petab_problem.observable_df is not None: - observables, noise_distrs, sigmas = \ - get_observation_model(petab_problem.observable_df) - else: - observables = noise_distrs = sigmas = None - - logger.info(f'Observables: {len(observables)}') - logger.info(f'Sigmas: {len(sigmas)}') - - if len(sigmas) != len(observables): - raise AssertionError( - f'Number of provided observables ({len(observables)}) and sigmas ' - f'({len(sigmas)}) do not match.') - - # TODO: adding extra output parameters is currently not supported, - # so we add any output parameters to the SBML model. - # this should be changed to something more elegant - # - formulas = chain((val['formula'] for val in observables.values()), - sigmas.values()) - output_parameters = OrderedDict() - for formula in formulas: - # we want reproducible parameter ordering upon repeated import - free_syms = sorted(sp.sympify(formula).free_symbols, - key=lambda symbol: symbol.name) - for free_sym in free_syms: - sym = str(free_sym) - if sbml_model.getElementBySId(sym) is None and sym != 'time' \ - and sym not in observables: - output_parameters[sym] = None - logger.debug("Adding output parameters to model: " - f"{list(output_parameters.keys())}") - for par in output_parameters.keys(): - _add_global_parameter(sbml_model, par) - # - - # TODO: to parameterize initial states or compartment sizes, we currently - # need initial assignments. if they occur in the condition table, we - # create a new parameter initial_${startOrCompartmentID}. - # feels dirty and should be changed (see also #924) - # - - initial_states = [col for col in petab_problem.condition_df - if element_is_state(sbml_model, col)] - fixed_parameters = [] - if initial_states: - # add preequilibration indicator variable - # NOTE: would only be required if we actually have preequilibration - # adding it anyways. can be optimized-out later - if sbml_model.getParameter(PREEQ_INDICATOR_ID) is not None: - raise AssertionError("Model already has a parameter with ID " - f"{PREEQ_INDICATOR_ID}. Cannot handle " - "species and compartments in condition table " - "then.") - indicator = sbml_model.createParameter() - indicator.setId(PREEQ_INDICATOR_ID) - indicator.setName(PREEQ_INDICATOR_ID) - # Can only reset parameters after preequilibration if they are fixed. - fixed_parameters.append(PREEQ_INDICATOR_ID) - logger.debug("Adding preequilibration indicator " - f"constant {PREEQ_INDICATOR_ID}") - logger.debug(f"Adding initial assignments for {initial_states}") - for assignee_id in initial_states: - init_par_id_preeq = f"initial_{assignee_id}_preeq" - init_par_id_sim = f"initial_{assignee_id}_sim" - for init_par_id in [init_par_id_preeq, init_par_id_sim]: - if sbml_model.getElementBySId(init_par_id) is not None: - raise ValueError( - "Cannot create parameter for initial assignment " - f"for {assignee_id} because an entity named " - f"{init_par_id} exists already in the model.") - init_par = sbml_model.createParameter() - init_par.setId(init_par_id) - init_par.setName(init_par_id) - assignment = sbml_model.getInitialAssignment(assignee_id) - if assignment is None: - assignment = sbml_model.createInitialAssignment() - assignment.setSymbol(assignee_id) - else: - logger.debug('The SBML model has an initial assignment defined ' - f'for model entity {assignee_id}, but this entity ' - 'also has an initial value defined in the PEtab ' - 'condition table. The SBML initial assignment will ' - 'be overwritten to handle preequilibration and ' - 'initial values specified by the PEtab problem.') - formula = f'{PREEQ_INDICATOR_ID} * {init_par_id_preeq} ' \ - f'+ (1 - {PREEQ_INDICATOR_ID}) * {init_par_id_sim}' - math_ast = libsbml.parseL3Formula(formula) - assignment.setMath(math_ast) - # - - fixed_parameters.extend( - get_fixed_parameters( - petab_problem=petab_problem, - )) - - logger.debug(f"Fixed parameters are {fixed_parameters}") - logger.info(f"Overall fixed parameters: {len(fixed_parameters)}") - logger.info("Variable parameters: " - + str(len(sbml_model.getListOfParameters()) - - len(fixed_parameters))) - - # Create Python module from SBML model - sbml_importer.sbml2amici( - model_name=model_name, - output_dir=model_output_dir, - observables=observables, - constant_parameters=fixed_parameters, - sigmas=sigmas, - allow_reinit_fixpar_initcond=allow_reinit_fixpar_initcond, - noise_distributions=noise_distrs, - verbose=verbose, - **kwargs) - - return sbml_importer - - -# for backwards compatibility -import_model = import_model_sbml - - -def get_observation_model( - observable_df: pd.DataFrame, -) -> Tuple[Dict[str, Dict[str, str]], Dict[str, str], - Dict[str, Union[str, float]]]: - """ - Get observables, sigmas, and noise distributions from PEtab observation - table in a format suitable for - :meth:`amici.sbml_import.SbmlImporter.sbml2amici`. - - :param observable_df: - PEtab observables table - - :return: - Tuple of dicts with observables, noise distributions, and sigmas. - """ - - if observable_df is None: - return {}, {}, {} - - observables = {} - sigmas = {} - - nan_pat = r'^[nN]a[nN]$' - for _, observable in observable_df.iterrows(): - oid = str(observable.name) - # need to sanitize due to https://github.com/PEtab-dev/PEtab/issues/447 - name = re.sub(nan_pat, '', str(observable.get(OBSERVABLE_NAME, ''))) - formula_obs = re.sub(nan_pat, '', str(observable[OBSERVABLE_FORMULA])) - formula_noise = re.sub(nan_pat, '', str(observable[NOISE_FORMULA])) - observables[oid] = {'name': name, 'formula': formula_obs} - sigmas[oid] = formula_noise - - # PEtab does currently not allow observables in noiseFormula and AMICI - # cannot handle states in sigma expressions. Therefore, where possible, - # replace species occurring in error model definition by observableIds. - replacements = { - sp.sympify(observable['formula']): sp.Symbol(observable_id) - for observable_id, observable in observables.items() - } - for observable_id, formula in sigmas.items(): - repl = sp.sympify(formula).subs(replacements) - sigmas[observable_id] = str(repl) - - noise_distrs = petab_noise_distributions_to_amici(observable_df) - - return observables, noise_distrs, sigmas - - -def petab_noise_distributions_to_amici(observable_df: pd.DataFrame - ) -> Dict[str, str]: - """ - Map from the petab to the amici format of noise distribution - identifiers. - - :param observable_df: - PEtab observable table - - :return: - Dictionary of observable_id => AMICI noise-distributions - """ - amici_distrs = {} - for _, observable in observable_df.iterrows(): - amici_val = '' - - if OBSERVABLE_TRANSFORMATION in observable \ - and isinstance(observable[OBSERVABLE_TRANSFORMATION], str) \ - and observable[OBSERVABLE_TRANSFORMATION]: - amici_val += observable[OBSERVABLE_TRANSFORMATION] + '-' - - if NOISE_DISTRIBUTION in observable \ - and isinstance(observable[NOISE_DISTRIBUTION], str) \ - and observable[NOISE_DISTRIBUTION]: - amici_val += observable[NOISE_DISTRIBUTION] - else: - amici_val += 'normal' - amici_distrs[observable.name] = amici_val - - return amici_distrs - - -def petab_scale_to_amici_scale(scale_str: str) -> int: - """Convert PEtab parameter scaling string to AMICI scaling integer""" - - if scale_str == petab.LIN: - return amici.ParameterScaling_none - if scale_str == petab.LOG: - return amici.ParameterScaling_ln - if scale_str == petab.LOG10: - return amici.ParameterScaling_log10 - - raise ValueError(f"Invalid parameter scale {scale_str}") - - -def show_model_info(sbml_model: 'libsbml.Model'): - """Log some model quantities""" - - logger.info(f'Species: {len(sbml_model.getListOfSpecies())}') - logger.info('Global parameters: ' - + str(len(sbml_model.getListOfParameters()))) - logger.info(f'Reactions: {len(sbml_model.getListOfReactions())}') - - -def element_is_state(sbml_model: libsbml.Model, sbml_id: str) -> bool: - """Does the element with ID `sbml_id` correspond to a state variable? - """ - if sbml_model.getCompartment(sbml_id) is not None: - return True - if sbml_model.getSpecies(sbml_id) is not None: - return True - if (rule := sbml_model.getRuleByVariable(sbml_id)) is not None \ - and rule.getTypeCode() == libsbml.SBML_RATE_RULE: - return True - - return False - - -def _parse_cli_args(): - """ - Parse command line arguments - - :return: - Parsed CLI arguments from :mod:`argparse`. - """ - - parser = argparse.ArgumentParser( - description='Import PEtab-format model into AMICI.') - - # General options: - parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', - help='More verbose output') - parser.add_argument('-o', '--output-dir', dest='model_output_dir', - help='Name of the model directory to create') - parser.add_argument('--no-compile', action='store_false', - dest='compile', - help='Only generate model code, do not compile') - parser.add_argument('--flatten', dest='flatten', default=False, - action='store_true', - help='Flatten measurement specific overrides of ' - 'observable and noise parameters') - parser.add_argument('--no-sensitivities', dest='generate_sensitivity_code', - default=True, action='store_false', - help='Skip generation of sensitivity code') - - # Call with set of files - parser.add_argument('-s', '--sbml', dest='sbml_file_name', - help='SBML model filename') - parser.add_argument('-m', '--measurements', dest='measurement_file_name', - help='Measurement table') - parser.add_argument('-c', '--conditions', dest='condition_file_name', - help='Conditions table') - parser.add_argument('-p', '--parameters', dest='parameter_file_name', - help='Parameter table') - parser.add_argument('-b', '--observables', dest='observable_file_name', - help='Observable table') - - parser.add_argument('-y', '--yaml', dest='yaml_file_name', - help='PEtab YAML problem filename') - - parser.add_argument('-n', '--model-name', dest='model_name', - help='Name of the python module generated for the ' - 'model') - - args = parser.parse_args() - - if not args.yaml_file_name \ - and not all((args.sbml_file_name, args.condition_file_name, - args.observable_file_name)): - parser.error('When not specifying a model name or YAML file, then ' - 'SBML, condition and observable file must be specified') - - return args - - -def main(): - """ - Command line interface to import a model in the PEtab - (https://github.com/PEtab-dev/PEtab/) format into AMICI. - """ - args = _parse_cli_args() - - if args.yaml_file_name: - pp = petab.Problem.from_yaml(args.yaml_file_name) - else: - pp = petab.Problem.from_files( - sbml_file=args.sbml_file_name, - condition_file=args.condition_file_name, - measurement_file=args.measurement_file_name, - parameter_file=args.parameter_file_name, - observable_files=args.observable_file_name) - - # Check for valid PEtab before potentially modifying it - petab.lint_problem(pp) - - if args.flatten: - petab.flatten_timepoint_specific_output_overrides(pp) - - import_model(model_name=args.model_name, - sbml_model=pp.sbml_model, - condition_table=pp.condition_df, - observable_table=pp.observable_df, - measurement_table=pp.measurement_df, - model_output_dir=args.model_output_dir, - compile=args.compile, - generate_sensitivity_code=args.generate_sensitivity_code, - verbose=args.verbose, - validate=False) - - -if __name__ == '__main__': - main() diff --git a/python/amici/petab_import_pysb.py b/python/amici/petab_import_pysb.py deleted file mode 100644 index 02e5aca038..0000000000 --- a/python/amici/petab_import_pysb.py +++ /dev/null @@ -1,388 +0,0 @@ -""" -PySB-PEtab Import ------------------ -Import a model in the PySB-adapted :mod:`petab` -(https://github.com/PEtab-dev/PEtab) format into AMICI. -""" - -import logging -import os -from itertools import chain -from pathlib import Path -from typing import Dict, Iterable, Optional, Tuple, Union - -import libsbml -import petab -import pysb -import sympy as sp -from petab.C import (CONDITION_FILES, CONDITION_NAME, FORMAT_VERSION, - MEASUREMENT_FILES, NOISE_FORMULA, OBSERVABLE_FILES, - OBSERVABLE_FORMULA, PARAMETER_FILE, SBML_FILES, - VISUALIZATION_FILES) - -from . import petab_import -from .logging import get_logger, log_execution_time, set_log_level - -logger = get_logger(__name__, logging.WARNING) - - -class PysbPetabProblem(petab.Problem): - """Representation of a PySB-model-based PEtab problem - - This class extends :class:`petab.Problem` with a PySB model. - The model is augmented with the observation model based on the PEtab - observable table. - For now, a dummy SBML model is created which allows used the existing - SBML-PEtab API. - - :ivar pysb_model: - PySB model instance from of this PEtab problem. - - """ - - def __init__(self, pysb_model: 'pysb.Model' = None, *args, **kwargs): - """ - Constructor - - :param pysb_model: PySB model instance for this PEtab problem - :param args: See :meth:`petab.Problem.__init__` - :param kwargs: See :meth:`petab.Problem.__init__` - """ - flatten = kwargs.pop('flatten', False) - super().__init__(*args, **kwargs) - if flatten: - petab.flatten_timepoint_specific_output_overrides(self) - - self.pysb_model: 'pysb.Model' = pysb_model - self._add_observation_model() - - if self.pysb_model is not None: - self.sbml_document, self.sbml_model = \ - create_dummy_sbml( - self.pysb_model, - observable_ids=self.observable_df.index.values - if self.observable_df is not None else None - ) - - def _add_observation_model(self): - """Extend PySB model by observation model as defined in the PEtab - observables table""" - - # add any required output parameters - local_syms = {sp.Symbol.__str__(comp): comp for comp in - self.pysb_model.components if - isinstance(comp, sp.Symbol)} - for formula in [*self.observable_df[OBSERVABLE_FORMULA], - *self.observable_df[NOISE_FORMULA]]: - sym = sp.sympify(formula, locals=local_syms) - for s in sym.free_symbols: - if not isinstance(s, pysb.Component): - p = pysb.Parameter(str(s), 1.0, _export=False) - self.pysb_model.add_component(p) - local_syms[sp.Symbol.__str__(p)] = p - - # add observables and sigmas to pysb model - for (observable_id, observable_formula, noise_formula) \ - in zip(self.observable_df.index, - self.observable_df[OBSERVABLE_FORMULA], - self.observable_df[NOISE_FORMULA]): - obs_symbol = sp.sympify(observable_formula, locals=local_syms) - if observable_id in self.pysb_model.expressions.keys(): - obs_expr = self.pysb_model.expressions[observable_id] - else: - obs_expr = pysb.Expression(observable_id, obs_symbol, - _export=False) - self.pysb_model.add_component(obs_expr) - local_syms[observable_id] = obs_expr - - sigma_id = f"{observable_id}_sigma" - sigma_symbol = sp.sympify( - noise_formula, - locals=local_syms - ) - sigma_expr = pysb.Expression(sigma_id, sigma_symbol, _export=False) - self.pysb_model.add_component(sigma_expr) - local_syms[sigma_id] = sigma_expr - - @staticmethod - def from_files( - condition_file: - Union[str, Path, Iterable[Union[str, Path]]] = None, - measurement_file: - Union[str, Path, Iterable[Union[str, Path]]] = None, - parameter_file: - Union[str, Path, Iterable[Union[str, Path]]] = None, - visualization_files: - Union[str, Path, Iterable[Union[str, Path]]] = None, - observable_files: - Union[str, Path, Iterable[Union[str, Path]]] = None, - pysb_model_file: Union[str, Path] = None, - flatten: bool = False - ) -> 'PysbPetabProblem': - """ - Factory method to load model and tables from files. - - :param condition_file: - PEtab condition table - - :param measurement_file: - PEtab measurement table - - :param parameter_file: - PEtab parameter table - - :param visualization_files: - PEtab visualization tables - - :param observable_files: - PEtab observables tables - - :param pysb_model_file: - PySB model file - - :param flatten: - Flatten the petab problem - - :return: - Petab Problem - """ - - condition_df = measurement_df = parameter_df = visualization_df = None - observable_df = None - - if condition_file: - condition_df = petab.conditions.get_condition_df(condition_file) - - if measurement_file: - # If there are multiple tables, we will merge them - measurement_df = petab.core.concat_tables( - measurement_file, petab.measurements.get_measurement_df) - - if parameter_file: - parameter_df = petab.parameters.get_parameter_df(parameter_file) - - if visualization_files: - # If there are multiple tables, we will merge them - visualization_df = petab.core.concat_tables( - visualization_files, petab.core.get_visualization_df) - - if observable_files: - # If there are multiple tables, we will merge them - observable_df = petab.core.concat_tables( - observable_files, petab.observables.get_observable_df) - from amici.pysb_import import pysb_model_from_path - return PysbPetabProblem( - pysb_model=pysb_model_from_path( - pysb_model_file=pysb_model_file), - condition_df=condition_df, - measurement_df=measurement_df, - parameter_df=parameter_df, - observable_df=observable_df, - visualization_df=visualization_df, - flatten=flatten - ) - - @staticmethod - def from_yaml(yaml_config: Union[Dict, Path, str], - flatten: bool = False) -> 'PysbPetabProblem': - """ - Factory method to load model and tables as specified by YAML file. - - NOTE: The PySB model is currently expected in the YAML file under - ``sbml_files``. - - :param yaml_config: - PEtab configuration as dictionary or YAML file name - - :param flatten: - Flatten the petab problem - - :return: - Petab Problem - """ - from petab.yaml import (load_yaml, is_composite_problem, - assert_single_condition_and_sbml_file) - if isinstance(yaml_config, (str, Path)): - path_prefix = os.path.dirname(yaml_config) - yaml_config = load_yaml(yaml_config) - else: - path_prefix = "" - - if is_composite_problem(yaml_config): - raise ValueError('petab.Problem.from_yaml() can only be used for ' - 'yaml files comprising a single model. ' - 'Consider using ' - 'petab.CompositeProblem.from_yaml() instead.') - - if yaml_config[FORMAT_VERSION] != petab.__format_version__: - raise ValueError("Provided PEtab files are of unsupported version" - f"{yaml_config[FORMAT_VERSION]}. Expected " - f"{petab.__format_version__}.") - - problem0 = yaml_config['problems'][0] - - assert_single_condition_and_sbml_file(problem0) - - if isinstance(yaml_config[PARAMETER_FILE], list): - parameter_file = [ - os.path.join(path_prefix, f) - for f in yaml_config[PARAMETER_FILE] - ] - else: - parameter_file = os.path.join( - path_prefix, yaml_config[PARAMETER_FILE]) - - return PysbPetabProblem.from_files( - pysb_model_file=os.path.join( - path_prefix, problem0[SBML_FILES][0]), - measurement_file=[os.path.join(path_prefix, f) - for f in problem0[MEASUREMENT_FILES]], - condition_file=os.path.join( - path_prefix, problem0[CONDITION_FILES][0]), - parameter_file=parameter_file, - visualization_files=[ - os.path.join(path_prefix, f) - for f in problem0.get(VISUALIZATION_FILES, [])], - observable_files=[ - os.path.join(path_prefix, f) - for f in problem0.get(OBSERVABLE_FILES, [])], - flatten=flatten - ) - - -def create_dummy_sbml( - pysb_model: 'pysb.Model', - observable_ids: Optional[Iterable[str]] = None -) -> Tuple['libsbml.Model', 'libsbml.SBMLDocument']: - """Create SBML dummy model for to use PySB models with PEtab. - - Model must at least contain PEtab problem parameter and noise parameters - for observables. - - :param pysb_model: PySB model - :param observable_ids: Observable IDs - :return: A dummy SBML model and document. - """ - - import libsbml - - document = libsbml.SBMLDocument(3, 1) - dummy_sbml_model = document.createModel() - dummy_sbml_model.setTimeUnits("second") - dummy_sbml_model.setExtentUnits("mole") - dummy_sbml_model.setSubstanceUnits('mole') - - # mandatory if there are species - c = dummy_sbml_model.createCompartment() - c.setId('dummy_compartment') - c.setConstant(False) - - # parameters are required for parameter mapping - for parameter in pysb_model.parameters: - p = dummy_sbml_model.createParameter() - p.setId(parameter.name) - p.setConstant(True) - p.setValue(0.0) - - # noise parameters are required for every observable - for observable_id in observable_ids: - p = dummy_sbml_model.createParameter() - p.setId(f"noiseParameter1_{observable_id}") - p.setConstant(True) - p.setValue(0.0) - - # pysb observables and expressions are required in case they occur in - # the observableFormula or noiseFormula. - # as this code is only temporary and not performance-critical, we just add - # all of them. we just need an sbml entity with the same ID. sbml species - # seem to be the simplest, as parameters would interfere with parameter - # mapping later on - for component in chain(pysb_model.expressions, pysb_model.observables): - s = dummy_sbml_model.createSpecies() - s.setId(component.name) - s.setInitialAmount(0.0) - s.setHasOnlySubstanceUnits(False) - s.setBoundaryCondition(False) - s.setCompartment('dummy_compartment') - s.setConstant(False) - - return document, dummy_sbml_model - - -@log_execution_time('Importing PEtab model', logger) -def import_model_pysb( - petab_problem: PysbPetabProblem, - model_output_dir: Optional[Union[str, Path]] = None, - verbose: Optional[Union[bool, int]] = True, - model_name: Optional[str] = None, - **kwargs -) -> None: - """ - Create AMICI model from PySB-PEtab problem - - :param petab_problem: - PySB PEtab problem - - :param model_output_dir: - Directory to write the model code to. Will be created if doesn't - exist. Defaults to current directory. - - :param verbose: - Print/log extra information. - - :param model_name: - Name of the generated model module - - :param kwargs: - Additional keyword arguments to be passed to - :meth:`amici.pysb_import.pysb2amici`. - """ - set_log_level(logger, verbose) - - logger.info("Importing model ...") - - observable_table = petab_problem.observable_df - pysb_model = petab_problem.pysb_model - - # For pysb, we only allow parameters in the condition table - # those must be pysb model parameters (either natively, or output - # parameters from measurement or condition table that have been added in - # PysbPetabProblem) - model_parameters = [p.name for p in pysb_model.parameters] - for x in petab_problem.condition_df.columns: - if x == CONDITION_NAME: - continue - - if x not in model_parameters: - raise NotImplementedError( - "For PySB PEtab import, only model parameters, but no states " - "or compartments are allowed in the condition table." - f"Offending column: {x}" - ) - - constant_parameters = petab_import.get_fixed_parameters( - petab_problem) - - if observable_table is None: - observables = None - sigmas = None - noise_distrs = None - else: - observables = [expr.name for expr in pysb_model.expressions - if expr.name in observable_table.index] - - sigmas = {obs_id: f"{obs_id}_sigma" for obs_id in observables} - - noise_distrs = petab_import.petab_noise_distributions_to_amici( - observable_table) - - from amici.pysb_import import pysb2amici - pysb2amici(model=pysb_model, - output_dir=model_output_dir, - model_name=model_name, - verbose=True, - observables=observables, - sigmas=sigmas, - constant_parameters=constant_parameters, - noise_distributions=noise_distrs, - **kwargs) diff --git a/python/amici/petab_objective.py b/python/amici/petab_objective.py deleted file mode 100644 index eb0f7ec6cf..0000000000 --- a/python/amici/petab_objective.py +++ /dev/null @@ -1,808 +0,0 @@ -""" -PEtab Objective ---------------- -Functionality related to running simulations or evaluating the objective -function as defined by a PEtab problem -""" - -import copy -import logging -import numbers -from typing import (List, Sequence, Optional, Dict, Tuple, Union, Any, - Collection, Iterator) - -import amici -from amici.sbml_import import get_species_initial -import libsbml -import numpy as np -import pandas as pd -import petab -import sympy as sp -from petab.C import * # noqa: F403 - -from . import AmiciModel, AmiciExpData -from .logging import get_logger, log_execution_time -from .petab_import import PREEQ_INDICATOR_ID, element_is_state -from .parameter_mapping import ( - fill_in_parameters, ParameterMappingForCondition, ParameterMapping) - -logger = get_logger(__name__) - - -# string constant definitions -LLH = 'llh' -SLLH = 'sllh' -FIM = 'fim' -S2LLH = 's2llh' -RES = 'res' -SRES = 'sres' -RDATAS = 'rdatas' - - -@log_execution_time('Simulating PEtab model', logger) -def simulate_petab( - petab_problem: petab.Problem, - amici_model: AmiciModel, - solver: Optional[amici.Solver] = None, - problem_parameters: Optional[Dict[str, float]] = None, - simulation_conditions: Union[pd.DataFrame, Dict] = None, - edatas: List[AmiciExpData] = None, - parameter_mapping: ParameterMapping = None, - scaled_parameters: Optional[bool] = False, - log_level: int = logging.WARNING, - num_threads: int = 1, - failfast: bool = True, -) -> Dict[str, Any]: - """Simulate PEtab model. - - :param petab_problem: - PEtab problem to work on. - :param amici_model: - AMICI Model assumed to be compatible with ``petab_problem``. - :param solver: - An AMICI solver. Will use default options if None. - :param problem_parameters: - Run simulation with these parameters. If None, PEtab `nominalValues` - will be used). To be provided as dict, mapping PEtab problem - parameters to SBML IDs. - :param simulation_conditions: - Result of :py:func:`petab.get_simulation_conditions`. Can be provided - to save time if this has be obtained before. - Not required if ``edatas`` and ``parameter_mapping`` are provided. - :param edatas: - Experimental data. Parameters are inserted in-place for simulation. - :param parameter_mapping: - Optional precomputed PEtab parameter mapping for efficiency, as - generated by :py:func:`create_parameter_mapping`. - :param scaled_parameters: - If ``True``, ``problem_parameters`` are assumed to be on the scale - provided in the PEtab parameter table and will be unscaled. - If ``False``, they are assumed to be in linear scale. - :param log_level: - Log level, see :mod:`amici.logging` module. - :param num_threads: - Number of threads to use for simulating multiple conditions - (only used if compiled with OpenMP). - :param failfast: - Returns as soon as an integration failure is encountered, skipping - any remaining simulations. - - :return: - Dictionary of - - * cost function value (``LLH``), - * list of :class:`amici.amici.ReturnData` (``RDATAS``), - - corresponding to the different simulation conditions. - For ordering of simulation conditions, see - :meth:`petab.Problem.get_simulation_conditions_from_measurement_df`. - """ - logger.setLevel(log_level) - - if solver is None: - solver = amici_model.getSolver() - - # Get parameters - if problem_parameters is None: - # Use PEtab nominal values as default - problem_parameters = {t.Index: getattr(t, NOMINAL_VALUE) for t in - petab_problem.parameter_df.itertuples()} - if scaled_parameters: - raise NotImplementedError( - "scaled_parameters=True in combination with " - "problem_parameters=None is currently not supported.") - - # number of amici simulations will be number of unique - # (preequilibrationConditionId, simulationConditionId) pairs. - # Can be optimized by checking for identical condition vectors. - if simulation_conditions is None and parameter_mapping is None \ - and edatas is None: - simulation_conditions = \ - petab_problem.get_simulation_conditions_from_measurement_df() - - # Get parameter mapping - if parameter_mapping is None: - parameter_mapping = create_parameter_mapping( - petab_problem=petab_problem, - simulation_conditions=simulation_conditions, - scaled_parameters=scaled_parameters, - amici_model=amici_model) - - # Get edatas - if edatas is None: - # Generate ExpData with all condition-specific information - edatas = create_edatas( - amici_model=amici_model, - petab_problem=petab_problem, - simulation_conditions=simulation_conditions) - - # Fill parameters in ExpDatas (in-place) - fill_in_parameters( - edatas=edatas, - problem_parameters=problem_parameters, - scaled_parameters=scaled_parameters, - parameter_mapping=parameter_mapping, - amici_model=amici_model) - - # Simulate - rdatas = amici.runAmiciSimulations( - amici_model, solver, edata_list=edatas, - num_threads=num_threads, failfast=failfast) - - # Compute total llh - llh = sum(rdata['llh'] for rdata in rdatas) - - # Log results - sim_cond = petab_problem.get_simulation_conditions_from_measurement_df() - for i, rdata in enumerate(rdatas): - sim_cond_id = "N/A" if sim_cond.empty else sim_cond.iloc[i, :].values - logger.debug( - f"Condition: {sim_cond_id}, status: {rdata['status']}, " - f"llh: {rdata['llh']}" - ) - - return { - LLH: llh, - RDATAS: rdatas - } - - -def create_parameterized_edatas( - amici_model: AmiciModel, - petab_problem: petab.Problem, - problem_parameters: Dict[str, numbers.Number], - scaled_parameters: bool = False, - parameter_mapping: ParameterMapping = None, - simulation_conditions: Union[pd.DataFrame, Dict] = None, -) -> List[amici.ExpData]: - """Create list of :class:amici.ExpData objects with parameters filled in. - - :param amici_model: - AMICI Model assumed to be compatible with ``petab_problem``. - :param petab_problem: - PEtab problem to work on. - :param problem_parameters: - Run simulation with these parameters. If None, PEtab `nominalValues` - will be used). To be provided as dict, mapping PEtab problem - parameters to SBML IDs. - :param scaled_parameters: - If ``True``, ``problem_parameters`` are assumed to be on the scale - provided in the PEtab parameter table and will be unscaled. - If ``False``, they are assumed to be in linear scale. - :param parameter_mapping: - Optional precomputed PEtab parameter mapping for efficiency, as - generated by :func:`create_parameter_mapping`. - :param simulation_conditions: - Result of :func:`petab.get_simulation_conditions`. Can be provided to - save time if this has been obtained before. - - :return: - List with one :class:`amici.amici.ExpData` per simulation condition, - with filled in timepoints, data and parameters. - """ - # number of amici simulations will be number of unique - # (preequilibrationConditionId, simulationConditionId) pairs. - # Can be optimized by checking for identical condition vectors. - if simulation_conditions is None: - simulation_conditions = \ - petab_problem.get_simulation_conditions_from_measurement_df() - - # Get parameter mapping - if parameter_mapping is None: - parameter_mapping = create_parameter_mapping( - petab_problem=petab_problem, - simulation_conditions=simulation_conditions, - scaled_parameters=scaled_parameters, - amici_model=amici_model) - - # Generate ExpData with all condition-specific information - edatas = create_edatas( - amici_model=amici_model, - petab_problem=petab_problem, - simulation_conditions=simulation_conditions) - - # Fill parameters in ExpDatas (in-place) - fill_in_parameters( - edatas=edatas, - problem_parameters=problem_parameters, - scaled_parameters=scaled_parameters, - parameter_mapping=parameter_mapping, - amici_model=amici_model) - - return edatas - - -def create_parameter_mapping( - petab_problem: petab.Problem, - simulation_conditions: Union[pd.DataFrame, List[Dict]], - scaled_parameters: bool, - amici_model: AmiciModel, - **parameter_mapping_kwargs, -) -> ParameterMapping: - """Generate AMICI specific parameter mapping. - - :param petab_problem: - PEtab problem - :param simulation_conditions: - Result of :func:`petab.get_simulation_conditions`. Can be provided to - save time if this has been obtained before. - :param scaled_parameters: - If ``True``, problem_parameters are assumed to be on the scale provided - in the PEtab parameter table and will be unscaled. If ``False``, they - are assumed to be in linear scale. - :param amici_model: - AMICI model. - :param parameter_mapping_kwargs: - Optional keyword arguments passed to - :func:`petab.get_optimization_to_simulation_parameter_mapping`. - To allow changing fixed PEtab problem parameters (``estimate=0``), - use ``fill_fixed_parameters=False``. - :return: - List of the parameter mappings. - """ - if simulation_conditions is None: - simulation_conditions = \ - petab_problem.get_simulation_conditions_from_measurement_df() - if isinstance(simulation_conditions, list): - simulation_conditions = pd.DataFrame(data=simulation_conditions) - - # Because AMICI globalizes all local parameters during model import, - # we need to do that here as well to prevent parameter mapping errors - # (PEtab does currently not care about SBML LocalParameters) - if petab_problem.sbml_document: - converter_config = libsbml.SBMLLocalParameterConverter() \ - .getDefaultProperties() - petab_problem.sbml_document.convert(converter_config) - else: - logger.debug("No petab_problem.sbml_document is set. Cannot convert " - "SBML LocalParameters. If the model contains " - "LocalParameters, parameter mapping will fail.") - - default_parameter_mapping_kwargs = { - "warn_unmapped": False, - "scaled_parameters": scaled_parameters, - "allow_timepoint_specific_numeric_noise_parameters": - not petab.lint.observable_table_has_nontrivial_noise_formula( - petab_problem.observable_df), - } - if parameter_mapping_kwargs is None: - parameter_mapping_kwargs = {} - - prelim_parameter_mapping = \ - petab.get_optimization_to_simulation_parameter_mapping( - condition_df=petab_problem.condition_df, - measurement_df=petab_problem.measurement_df, - parameter_df=petab_problem.parameter_df, - observable_df=petab_problem.observable_df, - model=petab_problem.model, - **dict(default_parameter_mapping_kwargs, - **parameter_mapping_kwargs) - ) - - parameter_mapping = ParameterMapping() - for (_, condition), prelim_mapping_for_condition in \ - zip(simulation_conditions.iterrows(), prelim_parameter_mapping): - mapping_for_condition = create_parameter_mapping_for_condition( - prelim_mapping_for_condition, condition, petab_problem, - amici_model) - parameter_mapping.append(mapping_for_condition) - - return parameter_mapping - - -def create_parameter_mapping_for_condition( - parameter_mapping_for_condition: petab.ParMappingDictQuadruple, - condition: Union[pd.Series, Dict], - petab_problem: petab.Problem, - amici_model: AmiciModel -) -> ParameterMappingForCondition: - """Generate AMICI specific parameter mapping for condition. - - :param parameter_mapping_for_condition: - Preliminary parameter mapping for condition. - :param condition: - :class:`pandas.DataFrame` row with ``preequilibrationConditionId`` and - ``simulationConditionId``. - :param petab_problem: - Underlying PEtab problem. - :param amici_model: - AMICI model. - - :return: - The parameter and parameter scale mappings, for fixed - preequilibration, fixed simulation, and variable simulation - parameters, and then the respective scalings. - """ - (condition_map_preeq, condition_map_sim, condition_scale_map_preeq, - condition_scale_map_sim) = parameter_mapping_for_condition - logger.debug(f"PEtab mapping: {parameter_mapping_for_condition}") - - if len(condition_map_preeq) != len(condition_scale_map_preeq) \ - or len(condition_map_sim) != len(condition_scale_map_sim): - raise AssertionError("Number of parameters and number of parameter " - "scales do not match.") - if len(condition_map_preeq) \ - and len(condition_map_preeq) != len(condition_map_sim): - logger.debug(f"Preequilibration parameter map: {condition_map_preeq}") - logger.debug(f"Simulation parameter map: {condition_map_sim}") - raise AssertionError("Number of parameters for preequilbration " - "and simulation do not match.") - - ########################################################################## - # initial states - # Initial states have been set during model import based on the SBML model. - # If initial states were overwritten in the PEtab condition table, they are - # applied here. - # During model generation, parameters for initial concentrations and - # respective initial assignments have been created for the - # relevant species, here we add these parameters to the parameter mapping. - # In absence of preequilibration this could also be handled via - # ExpData.x0, but in the case of preequilibration this would not allow for - # resetting initial states. - - states_in_condition_table = [ - col for col in petab_problem.condition_df - if element_is_state(petab_problem.sbml_model, col) - ] - if states_in_condition_table: - # set indicator fixed parameter for preeq - # (we expect here, that this parameter was added during import and - # that it was not added by the user with a different meaning...) - if condition_map_preeq: - condition_map_preeq[PREEQ_INDICATOR_ID] = 1.0 - condition_scale_map_preeq[PREEQ_INDICATOR_ID] = LIN - - condition_map_sim[PREEQ_INDICATOR_ID] = 0.0 - condition_scale_map_sim[PREEQ_INDICATOR_ID] = LIN - - def _set_initial_state(condition_id, element_id, init_par_id, - par_map, scale_map): - value = petab.to_float_if_float( - petab_problem.condition_df.loc[condition_id, element_id]) - if pd.isna(value): - element = petab_problem.sbml_model.getElementBySId(element_id) - type_code = element.getTypeCode() - initial_assignment = petab_problem.sbml_model\ - .getInitialAssignmentBySymbol(element_id) - if initial_assignment: - initial_assignment = sp.sympify( - libsbml.formulaToL3String(initial_assignment.getMath()) - ) - if type_code == libsbml.SBML_SPECIES: - value = get_species_initial(element) \ - if initial_assignment is None else initial_assignment - elif type_code == libsbml.SBML_PARAMETER: - value = element.getValue()\ - if initial_assignment is None else initial_assignment - elif type_code == libsbml.SBML_COMPARTMENT: - value = element.getSize()\ - if initial_assignment is None else initial_assignment - else: - raise NotImplementedError( - f"Don't know what how to handle {element_id} in " - "condition table.") - - try: - value = float(value) - except (ValueError, TypeError): - if sp.nsimplify(value).is_Atom: - # Get rid of multiplication with one - value = sp.nsimplify(value) - else: - raise NotImplementedError( - "Cannot handle non-trivial initial state " - f"expression for {element_id}: {value}") - # this should be a parameter ID - value = str(value) - logger.debug(f'The species {element_id} has no initial value ' - f'defined for the condition {condition_id} in ' - 'the PEtab conditions table. The initial value is ' - f'now set to {value}, which is the initial value ' - 'defined in the SBML model.') - par_map[init_par_id] = value - if isinstance(value, float): - # numeric initial state - scale_map[init_par_id] = petab.LIN - else: - # parametric initial state - scale_map[init_par_id] = \ - petab_problem.parameter_df[PARAMETER_SCALE]\ - .get(value, petab.LIN) - - for element_id in states_in_condition_table: - # for preequilibration - init_par_id = f'initial_{element_id}_preeq' - if condition.get(PREEQUILIBRATION_CONDITION_ID): - condition_id = condition[PREEQUILIBRATION_CONDITION_ID] - _set_initial_state( - condition_id, element_id, init_par_id, condition_map_preeq, - condition_scale_map_preeq) - else: - # need to set dummy value for preeq parameter anyways, as it - # is expected below (set to 0, not nan, because will be - # multiplied with indicator variable in initial assignment) - condition_map_sim[init_par_id] = 0.0 - condition_scale_map_sim[init_par_id] = LIN - - # for simulation - condition_id = condition[SIMULATION_CONDITION_ID] - init_par_id = f'initial_{element_id}_sim' - _set_initial_state( - condition_id, element_id, init_par_id, condition_map_sim, - condition_scale_map_sim) - - ########################################################################## - # separate fixed and variable AMICI parameters, because we may have - # different fixed parameters for preeq and sim condition, but we cannot - # have different variable parameters. without splitting, - # merge_preeq_and_sim_pars_condition below may fail. - # TODO: This can be done already in parameter mapping creation. - variable_par_ids = amici_model.getParameterIds() - fixed_par_ids = amici_model.getFixedParameterIds() - - condition_map_preeq_var, condition_map_preeq_fix = \ - subset_dict(condition_map_preeq, variable_par_ids, fixed_par_ids) - - condition_scale_map_preeq_var, condition_scale_map_preeq_fix = \ - subset_dict(condition_scale_map_preeq, variable_par_ids, fixed_par_ids) - - condition_map_sim_var, condition_map_sim_fix = \ - subset_dict(condition_map_sim, variable_par_ids, fixed_par_ids) - - condition_scale_map_sim_var, condition_scale_map_sim_fix = \ - subset_dict(condition_scale_map_sim, variable_par_ids, fixed_par_ids) - - logger.debug("Fixed parameters preequilibration: " - f"{condition_map_preeq_fix}") - logger.debug("Fixed parameters simulation: " - f"{condition_map_sim_fix}") - logger.debug("Variable parameters preequilibration: " - f"{condition_map_preeq_var}") - logger.debug("Variable parameters simulation: " - f"{condition_map_sim_var}") - - petab.merge_preeq_and_sim_pars_condition( - condition_map_preeq_var, condition_map_sim_var, - condition_scale_map_preeq_var, condition_scale_map_sim_var, - condition) - logger.debug(f"Merged: {condition_map_sim_var}") - - parameter_mapping_for_condition = ParameterMappingForCondition( - map_preeq_fix=condition_map_preeq_fix, - map_sim_fix=condition_map_sim_fix, - map_sim_var=condition_map_sim_var, - scale_map_preeq_fix=condition_scale_map_preeq_fix, - scale_map_sim_fix=condition_scale_map_sim_fix, - scale_map_sim_var=condition_scale_map_sim_var - ) - - return parameter_mapping_for_condition - - -def create_edatas( - amici_model: AmiciModel, - petab_problem: petab.Problem, - simulation_conditions: Union[pd.DataFrame, Dict] = None, -) -> List[amici.ExpData]: - """Create list of :class:`amici.amici.ExpData` objects for PEtab problem. - - :param amici_model: - AMICI model. - :param petab_problem: - Underlying PEtab problem. - :param simulation_conditions: - Result of :func:`petab.get_simulation_conditions`. Can be provided to - save time if this has be obtained before. - - :return: - List with one :class:`amici.amici.ExpData` per simulation condition, - with filled in timepoints and data. - """ - if simulation_conditions is None: - simulation_conditions = \ - petab_problem.get_simulation_conditions_from_measurement_df() - - observable_ids = amici_model.getObservableIds() - - measurement_groupvar = [petab.SIMULATION_CONDITION_ID] - if petab.PREEQUILIBRATION_CONDITION_ID in simulation_conditions: - measurement_groupvar.append(petab.PREEQUILIBRATION_CONDITION_ID) - measurement_dfs = dict(list( - petab_problem.measurement_df.groupby(measurement_groupvar) - )) - - edatas = [] - for _, condition in simulation_conditions.iterrows(): - # Create amici.ExpData for each simulation - if petab.PREEQUILIBRATION_CONDITION_ID in condition: - measurement_index = ( - condition.get(petab.SIMULATION_CONDITION_ID), - condition.get(petab.PREEQUILIBRATION_CONDITION_ID) - ) - else: - measurement_index = condition.get(petab.SIMULATION_CONDITION_ID) - edata = create_edata_for_condition( - condition=condition, - amici_model=amici_model, - measurement_df=measurement_dfs[measurement_index], - petab_problem=petab_problem, - observable_ids=observable_ids, - ) - edatas.append(edata) - - return edatas - - -def create_edata_for_condition( - condition: Union[Dict, pd.Series], - measurement_df: pd.DataFrame, - amici_model: AmiciModel, - petab_problem: petab.Problem, - observable_ids: List[str], -) -> amici.ExpData: - """Get :class:`amici.amici.ExpData` for the given PEtab condition. - - Sets timepoints, observed data and sigmas. - - :param condition: - :class:`pandas.DataFrame` row with ``preequilibrationConditionId`` and - ``simulationConditionId``. - :param measurement_df: - :class:`pandas.DataFrame` with measurements for the given condition. - :param amici_model: - AMICI model - :param petab_problem: - Underlying PEtab problem - :param observable_ids: - List of observable IDs - - :return: - ExpData instance. - """ - if amici_model.nytrue != len(observable_ids): - raise AssertionError("Number of AMICI model observables does not " - "match number of PEtab observables.") - - # create an ExpData object - edata = amici.ExpData(amici_model) - edata.id = condition[SIMULATION_CONDITION_ID] - if condition.get(PREEQUILIBRATION_CONDITION_ID): - edata.id += "+" + condition.get(PREEQUILIBRATION_CONDITION_ID) - ########################################################################## - # enable initial parameters reinitialization - states_in_condition_table = [ - col for col in petab_problem.condition_df - if not pd.isna(petab_problem.condition_df.loc[ - condition[SIMULATION_CONDITION_ID], col]) - and element_is_state(petab_problem.sbml_model, col) - ] - if condition.get(PREEQUILIBRATION_CONDITION_ID) \ - and states_in_condition_table: - state_ids = amici_model.getStateIds() - state_idx_reinitalization = [state_ids.index(s) - for s in states_in_condition_table] - edata.reinitialization_state_idxs_sim = state_idx_reinitalization - logger.debug("Enabling state reinitialization for condition " - f"{condition.get(PREEQUILIBRATION_CONDITION_ID, '')} - " - f"{condition.get(SIMULATION_CONDITION_ID)} " - f"{states_in_condition_table}") - - ########################################################################## - # timepoints - - # find replicate numbers of time points - timepoints_w_reps = _get_timepoints_with_replicates( - df_for_condition=measurement_df) - edata.setTimepoints(timepoints_w_reps) - - ########################################################################## - # measurements and sigmas - y, sigma_y = _get_measurements_and_sigmas( - df_for_condition=measurement_df, timepoints_w_reps=timepoints_w_reps, - observable_ids=observable_ids) - edata.setObservedData(y.flatten()) - edata.setObservedDataStdDev(sigma_y.flatten()) - - return edata - - -def subset_dict(full: Dict[Any, Any], - *args: Collection[Any]) -> Iterator[Dict[Any, Any]]: - """Get subset of dictionary based on provided keys - - :param full: - Dictionary to subset - :param args: - Collections of keys to be contained in the different subsets - - :return: - subsetted dictionary - """ - for keys in args: - yield {key: val for (key, val) in full.items() if key in keys} - - -def _get_timepoints_with_replicates( - df_for_condition: pd.DataFrame) -> List[numbers.Number]: - """ - Get list of timepoints including replicate measurements - - :param df_for_condition: - PEtab measurement table subset for a single condition. - - :return: - Sorted list of timepoints, including multiple timepoints accounting - for replicate measurements. - """ - # create sorted list of all timepoints for which measurements exist - timepoints = sorted(df_for_condition[TIME].unique().astype(float)) - - # find replicate numbers of time points - timepoints_w_reps = [] - for time in timepoints: - # subselect for time - df_for_time = df_for_condition[ - df_for_condition.time.astype(float) == time - ] - # rep number is maximum over rep numbers for observables - n_reps = max(df_for_time.groupby( - [OBSERVABLE_ID, TIME]).size()) - # append time point n_rep times - timepoints_w_reps.extend([time] * n_reps) - - return timepoints_w_reps - - -def _get_measurements_and_sigmas( - df_for_condition: pd.DataFrame, - timepoints_w_reps: Sequence[numbers.Number], - observable_ids: Sequence[str], - ) -> Tuple[np.array, np.array]: - """ - Get measurements and sigmas - - Generate arrays with measurements and sigmas in AMICI format from a - PEtab measurement table subset for a single condition. - - :param df_for_condition: - Subset of PEtab measurement table for one condition - - :param timepoints_w_reps: - Timepoints for which there exist measurements, including replicates - - :param observable_ids: - List of observable IDs for mapping IDs to indices. - - :return: - arrays for measurement and sigmas - """ - # prepare measurement matrix - y = np.full(shape=(len(timepoints_w_reps), len(observable_ids)), - fill_value=np.nan) - # prepare sigma matrix - sigma_y = y.copy() - - timepoints = sorted(df_for_condition[TIME].unique().astype(float)) - - for time in timepoints: - # subselect for time - df_for_time = df_for_condition[df_for_condition[TIME] == time] - time_ix_0 = timepoints_w_reps.index(time) - - # remember used time indices for each observable - time_ix_for_obs_ix = {} - - # iterate over measurements - for _, measurement in df_for_time.iterrows(): - # extract observable index - observable_ix = observable_ids.index(measurement[OBSERVABLE_ID]) - - # update time index for observable - if observable_ix in time_ix_for_obs_ix: - time_ix_for_obs_ix[observable_ix] += 1 - else: - time_ix_for_obs_ix[observable_ix] = time_ix_0 - - # fill observable and possibly noise parameter - y[time_ix_for_obs_ix[observable_ix], - observable_ix] = measurement[MEASUREMENT] - if isinstance(measurement.get(NOISE_PARAMETERS, None), - numbers.Number): - sigma_y[time_ix_for_obs_ix[observable_ix], - observable_ix] = measurement[NOISE_PARAMETERS] - return y, sigma_y - - -def rdatas_to_measurement_df( - rdatas: Sequence[amici.ReturnData], - model: AmiciModel, - measurement_df: pd.DataFrame) -> pd.DataFrame: - """ - Create a measurement dataframe in the PEtab format from the passed - ``rdatas`` and own information. - - :param rdatas: - A sequence of rdatas with the ordering of - :func:`petab.get_simulation_conditions`. - - :param model: - AMICI model used to generate ``rdatas``. - - :param measurement_df: - PEtab measurement table used to generate ``rdatas``. - - :return: - A dataframe built from the rdatas in the format of ``measurement_df``. - """ - simulation_conditions = petab.get_simulation_conditions( - measurement_df) - - observable_ids = model.getObservableIds() - rows = [] - # iterate over conditions - for (_, condition), rdata in zip(simulation_conditions.iterrows(), rdatas): - # current simulation matrix - y = rdata.y - # time array used in rdata - t = list(rdata.ts) - - # extract rows for condition - cur_measurement_df = petab.get_rows_for_condition( - measurement_df, condition) - - # iterate over entries for the given condition - # note: this way we only generate a dataframe entry for every - # row that existed in the original dataframe. if we want to - # e.g. have also timepoints non-existent in the original file, - # we need to instead iterate over the rdata['y'] entries - for _, row in cur_measurement_df.iterrows(): - # copy row - row_sim = copy.deepcopy(row) - - # extract simulated measurement value - timepoint_idx = t.index(row[TIME]) - observable_idx = observable_ids.index(row[OBSERVABLE_ID]) - measurement_sim = y[timepoint_idx, observable_idx] - - # change measurement entry - row_sim[MEASUREMENT] = measurement_sim - - rows.append(row_sim) - - return pd.DataFrame(rows) - - -def rdatas_to_simulation_df( - rdatas: Sequence[amici.ReturnData], - model: AmiciModel, - measurement_df: pd.DataFrame) -> pd.DataFrame: - """Create a PEtab simulation dataframe from - :class:`amici.amici.ReturnData` s. - - See :func:`rdatas_to_measurement_df` for details, only that model outputs - will appear in column ``simulation`` instead of ``measurement``.""" - - df = rdatas_to_measurement_df(rdatas=rdatas, model=model, - measurement_df=measurement_df) - - return df.rename(columns={MEASUREMENT: SIMULATION}) diff --git a/python/amici/petab_simulate.py b/python/amici/petab_simulate.py deleted file mode 100644 index 71744ff51b..0000000000 --- a/python/amici/petab_simulate.py +++ /dev/null @@ -1,110 +0,0 @@ -""" -PEtab Simulate --------------- -Functionality related to the use of AMICI for simulation with PEtab's -Simulator class. - -Use cases: - -- generate data for use with PEtab's plotting methods -- generate synthetic data -""" - -import inspect -import sys -from typing import Callable - -import pandas as pd - -from amici import SensitivityMethod_none -from amici import AmiciModel -from amici.petab_import import import_petab_problem -from amici.petab_objective import (simulate_petab, - rdatas_to_measurement_df, - RDATAS) -import petab - -AMICI_MODEL = 'amici_model' -AMICI_SOLVER = 'solver' -MODEL_NAME = 'model_name' -MODEL_OUTPUT_DIR = 'model_output_dir' - -PETAB_PROBLEM = 'petab_problem' - - -class PetabSimulator(petab.simulate.Simulator): - """Implementation of the PEtab `Simulator` class that uses AMICI.""" - def __init__(self, *args, amici_model: AmiciModel = None, **kwargs): - super().__init__(*args, **kwargs) - self.amici_model = amici_model - - def simulate_without_noise(self, **kwargs) -> pd.DataFrame: - """ - See :py:func:`petab.simulate.Simulator.simulate()` docstring. - - Additional keyword arguments can be supplied to specify arguments for - the AMICI PEtab import, simulate, and export methods. See the - docstrings for the respective methods for argument options: - - :py:func:`amici.petab_import.import_petab_problem`, and - - :py:func:`amici.petab_objective.simulate_petab`. - - Note that some arguments are expected to have already been specified - in the Simulator constructor (including the PEtab problem). - """ - if AMICI_MODEL in {*kwargs, *dir(self)} and ( - any(k in kwargs for k in - inspect.signature(import_petab_problem).parameters)): - print('Arguments related to the PEtab import are unused if ' - f'`{AMICI_MODEL}` is specified, or the ' - '`PetabSimulator.simulate()` method was previously called.') - - kwargs[PETAB_PROBLEM] = self.petab_problem - - # The AMICI model instance for the PEtab problem is saved in the state, - # such that it need not be supplied with each request for simulated - # data. Any user-supplied AMICI model will overwrite the model saved - # in the state. - if AMICI_MODEL not in kwargs: - if self.amici_model is None: - if MODEL_NAME not in kwargs: - kwargs[MODEL_NAME] = AMICI_MODEL - # If the model name is the name of a module that is already - # cached, it can cause issues during import. - while kwargs[MODEL_NAME] in sys.modules: - kwargs[MODEL_NAME] += str(self.rng.integers(10)) - if MODEL_OUTPUT_DIR not in kwargs: - kwargs[MODEL_OUTPUT_DIR] = self.working_dir - self.amici_model = subset_call(import_petab_problem, kwargs) - kwargs[AMICI_MODEL] = self.amici_model - self.amici_model = kwargs[AMICI_MODEL] - - if AMICI_SOLVER not in kwargs: - kwargs[AMICI_SOLVER] = self.amici_model.getSolver() - kwargs[AMICI_SOLVER].setSensitivityMethod( - SensitivityMethod_none) - - result = subset_call(simulate_petab, kwargs) - return rdatas_to_measurement_df(result[RDATAS], - self.amici_model, - self.petab_problem.measurement_df) - - -def subset_call(method: Callable, kwargs: dict): - """ - Helper function to call a method with the intersection of arguments in the - method signature and the supplied arguments. - - :param method: - The method to be called. - :param kwargs: - The argument superset as a dictionary, similar to `**kwargs` in method - signatures. - :return: - The output of `method`, called with the applicable arguments in - `kwargs`. - """ - method_args = inspect.signature(method).parameters - subset_kwargs = {k: v - for k, v in kwargs.items() - if k in method_args} - return method(**subset_kwargs) diff --git a/python/amici/plotting.py b/python/amici/plotting.py deleted file mode 100644 index d2917de9fe..0000000000 --- a/python/amici/plotting.py +++ /dev/null @@ -1,90 +0,0 @@ -""" -Plotting --------- -Plotting related functions -""" -from . import ReturnDataView, Model - -import matplotlib.pyplot as plt -from matplotlib.axes import Axes -from typing import Optional, Iterable - - -def plotStateTrajectories( - rdata: ReturnDataView, - state_indices: Optional[Iterable[int]] = None, - ax: Optional[Axes] = None, - model: Model = None -) -> None: - """ - Plot state trajectories - - :param rdata: - AMICI simulation results as returned by - :func:`amici.amici.runAmiciSimulation` - - :param state_indices: - Indices of states for which trajectories are to be plotted - - :param ax: - matplotlib Axes instance to plot into - - :param model: - amici model instance - """ - if not ax: - fig, ax = plt.subplots() - if not state_indices: - state_indices = range(rdata['x'].shape[1]) - for ix in state_indices: - if model is None: - label = f'$x_{{{ix}}}$' - elif model.getStateNames()[ix]: - label = model.getStateNames()[ix] - else: - label = model.getStateIds()[ix] - ax.plot(rdata['t'], rdata['x'][:, ix], label=label) - ax.set_xlabel('$t$') - ax.set_ylabel('$x(t)$') - ax.legend() - ax.set_title('State trajectories') - - -def plotObservableTrajectories( - rdata: ReturnDataView, - observable_indices: Optional[Iterable[int]] = None, - ax: Optional[Axes] = None, - model: Model = None -) -> None: - """ - Plot observable trajectories - - :param rdata: - AMICI simulation results as returned by - :func:`amici.amici.runAmiciSimulation` - - :param observable_indices: - Indices of observables for which trajectories are to be plotted - - :param ax: - matplotlib Axes instance to plot into - - :param model: - amici model instance - """ - if not ax: - fig, ax = plt.subplots() - if not observable_indices: - observable_indices = range(rdata['y'].shape[1]) - for iy in observable_indices: - if model is None: - label = f'$y_{{{iy}}}$' - elif model.getObservableNames()[iy]: - label = model.getObservableNames()[iy] - else: - label = model.getObservableIds()[iy] - ax.plot(rdata['t'], rdata['y'][:, iy], label=label) - ax.set_xlabel('$t$') - ax.set_ylabel('$y(t)$') - ax.legend() - ax.set_title('Observable trajectories') diff --git a/python/amici/pysb_import.py b/python/amici/pysb_import.py deleted file mode 100644 index 0929283877..0000000000 --- a/python/amici/pysb_import.py +++ /dev/null @@ -1,1415 +0,0 @@ -""" -PySB Import ------------- -This module provides all necessary functionality to import a model specified -in the :class:`pysb.core.Model` format. -""" - -import itertools -import logging -import os -import sys -from pathlib import Path -from typing import (Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, - Union) - -import numpy as np -import pysb -import pysb.bng -import pysb.pattern -import sympy as sp - -from .import_utils import (_get_str_symbol_identifiers, - _parse_special_functions, - generate_measurement_symbol, - noise_distribution_to_cost_function, - noise_distribution_to_observable_transformation) -from .logging import get_logger, log_execution_time, set_log_level -from .ode_export import (Constant, Expression, LogLikelihoodY, ODEExporter, - ODEModel, Observable, Parameter, SigmaY, State) - -CL_Prototype = Dict[str, Dict[str, Any]] -ConservationLaw = Dict[str, Union[Dict, str, sp.Basic]] - -logger = get_logger(__name__, logging.ERROR) - - -def pysb2amici( - model: pysb.Model, - output_dir: Optional[Union[str, Path]] = None, - observables: List[str] = None, - constant_parameters: List[str] = None, - sigmas: Dict[str, str] = None, - noise_distributions: Optional[Dict[str, Union[str, Callable]]] = None, - verbose: Union[int, bool] = False, - assume_pow_positivity: bool = False, - compiler: str = None, - compute_conservation_laws: bool = True, - compile: bool = True, - simplify: Callable = lambda x: sp.powsimp(x, deep=True), - # Do not enable by default without testing. - # See https://github.com/AMICI-dev/AMICI/pull/1672 - cache_simplify: bool = False, - generate_sensitivity_code: bool = True, - model_name: Optional[str] = None, -): - r""" - Generate AMICI C++ files for the provided model. - - .. warning:: - **PySB models with Compartments** - - When importing a PySB model with ``pysb.Compartment``\ s, BioNetGen - scales reaction fluxes with the compartment size. Instead of using the - respective symbols, the compartment size Parameter or Expression is - evaluated when generating equations. This may lead to unexpected - results if the compartment size parameter is changed for AMICI - simulations. - - :param model: - pysb model, :attr:`pysb.Model.name` will determine the name of the - generated module - - :param output_dir: - see :meth:`amici.ode_export.ODEExporter.set_paths` - - :param observables: - list of :class:`pysb.core.Expression` or :class:`pysb.core.Observable` - names in the provided model that should be mapped to observables - - :param sigmas: - dict of :class:`pysb.core.Expression` names that should be mapped to - sigmas - - :param noise_distributions: - dict with names of observable Expressions as keys and a noise type - identifier, or a callable generating a custom noise formula string - (see :py:func:`amici.import_utils.noise_distribution_to_cost_function` - ). If nothing is passed for some observable id, a normal model is - assumed as default. - - :param constant_parameters: - list of :class:`pysb.core.Parameter` names that should be mapped as - fixed parameters - - :param verbose: verbosity level for logging, True/False default to - :attr:`logging.DEBUG`/:attr:`logging.ERROR` - - :param assume_pow_positivity: - if set to ``True``, a special pow function is used to avoid problems - with state variables that may become negative due to numerical - errors - - :param compiler: - distutils/setuptools compiler selection to build the python - extension - - :param compute_conservation_laws: - if set to ``True``, conservation laws are automatically computed and - applied such that the state-jacobian of the ODE right-hand-side has - full rank. This option should be set to ``True`` when using the Newton - algorithm to compute steadystates - - :param compile: - If ``True``, build the python module for the generated model. If false, - just generate the source code. - - :param simplify: - see :attr:`amici.ODEModel._simplify` - - :param cache_simplify: - see :func:`amici.ODEModel.__init__` - Note that there are possible issues with PySB models: - https://github.com/AMICI-dev/AMICI/pull/1672 - - :param generate_sensitivity_code: - if set to ``False``, code for sensitivity computation will not be - generated - - :param model_name: - Name for the generated model module. If None, :attr:`pysb.Model.name` - will be used. - """ - if observables is None: - observables = [] - if constant_parameters is None: - constant_parameters = [] - - if sigmas is None: - sigmas = {} - - model_name = model_name or model.name - - set_log_level(logger, verbose) - ode_model = ode_model_from_pysb_importer( - model, constant_parameters=constant_parameters, - observables=observables, sigmas=sigmas, - noise_distributions=noise_distributions, - compute_conservation_laws=compute_conservation_laws, - simplify=simplify, - cache_simplify=cache_simplify, - verbose=verbose, - ) - exporter = ODEExporter( - ode_model, - outdir=output_dir, - model_name=model_name, - verbose=verbose, - assume_pow_positivity=assume_pow_positivity, - compiler=compiler, - generate_sensitivity_code=generate_sensitivity_code - ) - exporter.generate_model_code() - - if compile: - exporter.compile_model() - - -@log_execution_time('creating ODE model', logger) -def ode_model_from_pysb_importer( - model: pysb.Model, - constant_parameters: List[str] = None, - observables: List[str] = None, - sigmas: Dict[str, str] = None, - noise_distributions: Optional[Dict[str, Union[str, Callable]]] = None, - compute_conservation_laws: bool = True, - simplify: Callable = sp.powsimp, - # Do not enable by default without testing. - # See https://github.com/AMICI-dev/AMICI/pull/1672 - cache_simplify: bool = False, - verbose: Union[int, bool] = False, -) -> ODEModel: - """ - Creates an :class:`amici.ODEModel` instance from a :class:`pysb.Model` - instance. - - :param model: - see :func:`amici.pysb_import.pysb2amici` - - :param constant_parameters: - see :func:`amici.pysb_import.pysb2amici` - - :param observables: - see :func:`amici.pysb_import.pysb2amici` - - :param sigmas: - dict with names of observable Expressions as keys and names of sigma - Expressions as value sigma - - :param noise_distributions: - see :func:`amici.pysb_import.pysb2amici` - - :param compute_conservation_laws: - see :func:`amici.pysb_import.pysb2amici` - - :param simplify: - see :attr:`amici.ODEModel._simplify` - - :param cache_simplify: - see :func:`amici.ODEModel.__init__` - Note that there are possible issues with PySB models: - https://github.com/AMICI-dev/AMICI/pull/1672 - - :param verbose: verbosity level for logging, True/False default to - :attr:`logging.DEBUG`/:attr:`logging.ERROR` - - :return: - New ODEModel instance according to pysbModel - """ - - ode = ODEModel( - verbose=verbose, - simplify=simplify, - cache_simplify=cache_simplify, - ) - - if constant_parameters is None: - constant_parameters = [] - - if observables is None: - observables = [] - - if sigmas is None: - sigmas = {} - - pysb.bng.generate_equations(model, verbose=verbose) - - _process_pysb_species(model, ode) - _process_pysb_parameters(model, ode, constant_parameters) - if compute_conservation_laws: - _process_pysb_conservation_laws(model, ode) - _process_pysb_observables(model, ode, observables, sigmas, - noise_distributions) - _process_pysb_expressions(model, ode, observables, sigmas, - noise_distributions) - ode._has_quadratic_nllh = not noise_distributions or all( - noise_distr in ['normal', 'lin-normal', 'log-normal', 'log10-normal'] - for noise_distr in noise_distributions.values() - ) - - _process_stoichiometric_matrix(model, ode, constant_parameters) - - ode.generate_basic_variables() - - return ode - - -@log_execution_time('processing PySB stoich. matrix', logger) -def _process_stoichiometric_matrix(pysb_model: pysb.Model, - ode_model: ODEModel, - constant_parameters: List[str]) -> None: - - """ - Exploits the PySB stoichiometric matrix to generate xdot derivatives - - :param pysb_model: - pysb model instance - - :param ode_model: - ODEModel instance - - :param constant_parameters: - list of constant parameters - """ - - x = ode_model.sym('x') - w = list(ode_model.sym('w')) - p = list(ode_model.sym('p')) - x_rdata = list(ode_model.sym('x_rdata')) - - n_x = len(x) - n_w = len(w) - n_p = len(p) - n_r = len(pysb_model.reactions) - - solver_index = ode_model.get_solver_indices() - dflux_dx_dict = {} - dflux_dw_dict = {} - dflux_dp_dict = {} - - w_idx = dict() - p_idx = dict() - wx_idx = dict() - - def get_cached_index(symbol, sarray, index_cache): - idx = index_cache.get(symbol, None) - if idx is not None: - return idx - idx = sarray.index(symbol) - index_cache[symbol] = idx - return idx - - for ir, rxn in enumerate(pysb_model.reactions): - for ix in np.unique(rxn['reactants']): - idx = solver_index.get(ix, None) - if idx is not None: - # species - values = dflux_dx_dict - else: - # conservation law - idx = get_cached_index(x_rdata[ix], w, wx_idx) - values = dflux_dw_dict - - values[(ir, idx)] = sp.diff(rxn['rate'], x_rdata[ix]) - - # typically <= 3 free symbols in rate, we already account for - # species above so we only need to account for propensity, which - # can only be a parameter or expression - for fs in rxn['rate'].free_symbols: - # dw - if isinstance(fs, pysb.Expression): - var = w - idx_cache = w_idx - values = dflux_dw_dict - # dp - elif isinstance(fs, pysb.Parameter): - if fs.name in constant_parameters: - continue - var = p - idx_cache = p_idx - values = dflux_dp_dict - else: - continue - - idx = get_cached_index(fs, var, idx_cache) - values[(ir, idx)] = sp.diff(rxn['rate'], fs) - - dflux_dx = sp.ImmutableSparseMatrix(n_r, n_x, dflux_dx_dict) - dflux_dw = sp.ImmutableSparseMatrix(n_r, n_w, dflux_dw_dict) - dflux_dp = sp.ImmutableSparseMatrix(n_r, n_p, dflux_dp_dict) - - # use dok format to convert numeric csc to sparse symbolic - S = sp.ImmutableSparseMatrix( - n_x, n_r, # don't use shape here as we are eliminating rows - pysb_model.stoichiometry_matrix[ - np.asarray(list(solver_index.keys())),: - ].todok() - ) - # don't use `.dot` since it's awfully slow - ode_model._eqs['dxdotdx_explicit'] = S*dflux_dx - ode_model._eqs['dxdotdw'] = S*dflux_dw - ode_model._eqs['dxdotdp_explicit'] = S*dflux_dp - - -@log_execution_time('processing PySB species', logger) -def _process_pysb_species(pysb_model: pysb.Model, - ode_model: ODEModel) -> None: - """ - Converts pysb Species into States and adds them to the ODEModel instance - - :param pysb_model: - pysb model instance - - :param ode_model: - ODEModel instance - """ - xdot = sp.Matrix(pysb_model.odes) - - for ix, specie in enumerate(pysb_model.species): - init = sp.sympify('0.0') - for ic in pysb_model.odes.model.initials: - if pysb.pattern.match_complex_pattern( - ic.pattern, specie, exact=True): - # we don't want to allow expressions in initial conditions - if ic.value in pysb_model.expressions: - init = pysb_model.expressions[ic.value.name].expand_expr() - else: - init = ic.value - - ode_model.add_component( - State( - sp.Symbol(f'__s{ix}'), - f'{specie}', - init, - xdot[ix] - ) - ) - logger.debug(f'Finished Processing PySB species ') - - -@log_execution_time('processing PySB parameters', logger) -def _process_pysb_parameters(pysb_model: pysb.Model, - ode_model: ODEModel, - constant_parameters: List[str]) -> None: - """ - Converts pysb parameters into Parameters or Constants and adds them to - the ODEModel instance - - :param pysb_model: - pysb model - - :param constant_parameters: - list of Parameters that should be constants - - :param ode_model: - ODEModel instance - """ - for par in pysb_model.parameters: - if par.name in constant_parameters: - comp = Constant - else: - comp = Parameter - - ode_model.add_component( - comp(par, f'{par.name}', par.value) - ) - - -@log_execution_time('processing PySB expressions', logger) -def _process_pysb_expressions( - pysb_model: pysb.Model, - ode_model: ODEModel, - observables: List[str], - sigmas: Dict[str, str], - noise_distributions: Optional[Dict[str, Union[str, Callable]]] = None, -) -> None: - r""" - Converts pysb expressions/observables into Observables (with - corresponding standard deviation SigmaY and LogLikelihoodY) or - Expressions and adds them to the ODEModel instance - - :param pysb_model: - pysb model - - :param observables: - list of names of :class`pysb.Expression`\ s or - :class:`pysb.Observable`\ s that are to be mapped to ODEModel - observables - - :param sigmas: - dict with names of observable pysb.Expressions/pysb.Observables - names as keys and names of sigma pysb.Expressions as values - - :param noise_distributions: - see :func:`amici.pysb_import.pysb2amici` - - :param ode_model: - ODEModel instance - """ - # we no longer expand expressions here. pysb/bng guarantees that - # they are ordered according to their dependency and we can - # evaluate them sequentially without reordering. Important to make - # sure that observables are processed first though. - - # we use _constant and _dynamic functions to get access to derived - # expressions that are otherwise only accessible as private attribute - for expr in pysb_model.expressions_constant(include_derived=True)\ - | pysb_model.expressions_dynamic(include_derived=True): - if any( - isinstance(symbol, pysb.Tag) - for symbol in expr.expand_expr().free_symbols - ): - # we only need explicit instantiations of expressions with tags, - # which are defined in the derived expressions. The abstract - # expressions are not needed and lead to compilation errors so - # we skip them. - continue - _add_expression(expr, expr.name, expr.expr, - pysb_model, ode_model, observables, sigmas, - noise_distributions) - - -def _add_expression( - sym: sp.Symbol, - name: str, - expr: sp.Basic, - pysb_model: pysb.Model, - ode_model: ODEModel, - observables: List[str], - sigmas: Dict[str, str], - noise_distributions: Optional[Dict[str, Union[str, Callable]]] = None, -): - """ - Adds expressions to the ODE model given and adds observables/sigmas if - appropriate - - :param sym: - symbol how the expression is referenced in the model - - :param name: - name of the expression - - :param expr: - symbolic expression that the symbol refers to - - :param pysb_model: - see :py:func:`_process_pysb_expressions` - - :param observables: - see :py:func:`_process_pysb_expressions` - - :param sigmas: - see :py:func:`_process_pysb_expressions` - - :param noise_distributions: - see :py:func:`amici.pysb_import.pysb2amici` - - :param ode_model: - see :py:func:`_process_pysb_expressions` - """ - ode_model.add_component( - Expression(sym, name, _parse_special_functions(expr)) - ) - - if name in observables: - noise_dist = noise_distributions.get(name, 'normal') \ - if noise_distributions else 'normal' - - y = sp.Symbol(f'{name}') - trafo = noise_distribution_to_observable_transformation(noise_dist) - obs = Observable(y, name, sym, transformation=trafo) - ode_model.add_component(obs) - - sigma_name, sigma_value = _get_sigma_name_and_value( - pysb_model, name, sigmas - ) - - sigma = sp.Symbol(sigma_name) - ode_model.add_component(SigmaY(sigma, f'{sigma_name}', sigma_value)) - - - cost_fun_str = noise_distribution_to_cost_function(noise_dist)(name) - my = generate_measurement_symbol(obs.get_id()) - cost_fun_expr = sp.sympify(cost_fun_str, - locals=dict(zip( - _get_str_symbol_identifiers(name), - (y, my, sigma)))) - ode_model.add_component( - LogLikelihoodY( - sp.Symbol(f'llh_{name}'), - f'llh_{name}', - cost_fun_expr - ) - ) - - -def _get_sigma_name_and_value( - pysb_model: pysb.Model, - obs_name: str, - sigmas: Dict[str, str]) -> Tuple[str, sp.Basic]: - """ - Tries to extract standard deviation symbolic identifier and formula - for a given observable name from the pysb model and if no specification is - available sets default values - - :param pysb_model: - pysb model - - :param obs_name: - name of the observable - - :param sigmas: - dict of :class:`pysb.core.Expression` names that should be mapped to - sigmas - - :return: - tuple containing symbolic identifier and formula for the specified - observable - """ - if obs_name in sigmas: - sigma_name = sigmas[obs_name] - try: - # find corresponding Expression instance - sigma_expr = next(x for x in pysb_model.expressions - if x.name == sigma_name) - except StopIteration: - raise ValueError(f'value of sigma {obs_name} is not a ' - f'valid expression.') - sigma_value = sigma_expr.expand_expr() - else: - sigma_name = f'sigma_{obs_name}' - sigma_value = sp.sympify(1.0) - - return sigma_name, sigma_value - - -@log_execution_time('processing PySB observables', logger) -def _process_pysb_observables( - pysb_model: pysb.Model, - ode_model: ODEModel, - observables: List[str], - sigmas: Dict[str, str], - noise_distributions: Optional[Dict[str, Union[str, Callable]]] = None, -) -> None: - """ - Converts :class:`pysb.core.Observable` into - :class:`ODEModel.Expressions` and adds them to the ODEModel instance - - :param pysb_model: - pysb model - - :param ode_model: - ODEModel instance - - :param observables: - list of names of pysb.Expressions or pysb.Observables that are to be - mapped to ODEModel observables - - :param sigmas: - dict with names of observable pysb.Expressions/pysb.Observables - names as keys and names of sigma pysb.Expressions as values - - :param noise_distributions: - see :func:`amici.pysb_import.pysb2amici` - """ - # only add those pysb observables that occur in the added - # Observables as expressions - for obs in pysb_model.observables: - _add_expression(obs, obs.name, obs.expand_obs(), - pysb_model, ode_model, observables, sigmas, - noise_distributions) - - -@log_execution_time('computing PySB conservation laws', logger) -def _process_pysb_conservation_laws(pysb_model: pysb.Model, - ode_model: ODEModel) -> None: - """ - Removes species according to conservation laws to ensure that the - jacobian has full rank - - :param pysb_model: - pysb model - - :param ode_model: - ODEModel instance - """ - - monomers_without_conservation_law = set() - for rule in pysb_model.rules: - monomers_without_conservation_law |= \ - _get_unconserved_monomers(rule, pysb_model) - - monomers_without_conservation_law |= \ - _compute_monomers_with_fixed_initial_conditions(pysb_model) - - cl_prototypes = _generate_cl_prototypes( - monomers_without_conservation_law, pysb_model, ode_model - ) - conservation_laws = _construct_conservation_from_prototypes( - cl_prototypes, pysb_model - ) - _add_conservation_for_constant_species(ode_model, conservation_laws) - - _flatten_conservation_laws(conservation_laws) - - for cl in conservation_laws: - ode_model.add_conservation_law(**cl) - - -def _compute_monomers_with_fixed_initial_conditions( - pysb_model: pysb.Model) -> Set[str]: - """ - Computes the set of monomers in a model with species that have fixed - initial conditions - - :param pysb_model: pysb model - - :return: - set of monomer names with fixed initial conditions - """ - monomers_with_fixed_initial_conditions = set() - - for monomer in pysb_model.monomers: - # check if monomer has an initial condition that is fixed (means - # that corresponding state is constant and all conservation - # laws are broken) - if any([ - ic.fixed # true or false - for ic in pysb_model.initials - if monomer.name in extract_monomers(ic.pattern) - ]): - monomers_with_fixed_initial_conditions |= {monomer.name} - - return monomers_with_fixed_initial_conditions - - -def _generate_cl_prototypes(excluded_monomers: Iterable[str], - pysb_model: pysb.Model, - ode_model: ODEModel) -> CL_Prototype: - """ - Constructs a dict that contains preprocessed information for the - construction of conservation laws - - :param excluded_monomers: - list of monomer names for which no prototypes - should be computed - - :param pysb_model: - pysb model - - :param ode_model: - ODEModel instance - - :return: - dict('monomer.name':{'possible_indices': ..., 'target_indices': ...} - """ - cl_prototypes = dict() - - _compute_possible_indices(cl_prototypes, pysb_model, ode_model, - excluded_monomers) - _compute_dependency_idx(cl_prototypes) - _compute_target_index(cl_prototypes, ode_model) - - return cl_prototypes - - -def _compute_possible_indices(cl_prototypes: CL_Prototype, - pysb_model: pysb.Model, - ode_model: ODEModel, - excluded_monomers: Iterable[str]) -> None: - """ - Computes viable choices for target_index, ie species that could be - removed and replaced by an algebraic expression according to the - conservation law - - :param cl_prototypes: - dict in which possible indices will be written - - :param pysb_model: - pysb model - - :param ode_model: - ODEModel instance - - :param excluded_monomers: - monomers for which no conservation laws will be - computed - """ - for monomer in pysb_model.monomers: - if monomer.name not in excluded_monomers: - compartments = [ - str(mp.compartment) # string based comparison as - # compartments are not hashable - for cp in pysb_model.species - for mp in cp.monomer_patterns - if mp.monomer.name == monomer.name - ] - - if len(set(compartments)) > 1: - raise ValueError('Conservation laws involving species in ' - 'multiple compartments are currently not ' - 'supported! Please run pysb2amici with ' - 'compute_conservation_laws=False') - # TODO: implement this, multiply species by the volume of - # their respective compartment and allow total_cl to depend - # on parameters + constants and update the respective symbolic - # derivative accordingly - - prototype = dict() - prototype['possible_indices'] = [ - ix - for ix, specie in enumerate(pysb_model.species) - if monomer.name in extract_monomers(specie) - and not ode_model.state_is_constant(ix) - ] - - prototype['species_count'] = len( - prototype['possible_indices'] - ) - - if prototype['possible_indices']: - cl_prototypes[monomer.name] = prototype - - -def _compute_dependency_idx(cl_prototypes: CL_Prototype) -> None: - """ - Compute connecting species, this allows us to efficiently compute - whether the respective conservation law would induce a cyclic dependency. - Adds a 'dependency_idx' field to the prototype dict that - itself is a dict where keys correspond to indexes that, when used as - target index yield dependencies on conservation laws of monomers in - the respective values - - :param cl_prototypes: - dict in which possible indices will be written - """ - # - for monomer_i, prototype_i in cl_prototypes.items(): - if 'dependency_idx' not in prototype_i: - prototype_i['dependency_idx'] = dict() - - for monomer_j, prototype_j in cl_prototypes.items(): - if monomer_i == monomer_j: - continue - - if 'dependency_idx' not in prototype_j: - prototype_j['dependency_idx'] = dict() - - idx_overlap = set(prototype_i['possible_indices']).intersection( - set(prototype_j['possible_indices']) - ) - if len(idx_overlap) == 0: - continue - - for idx in idx_overlap: - if idx not in prototype_i['dependency_idx']: - prototype_i['dependency_idx'][idx] = set() - - if idx not in prototype_j['dependency_idx']: - prototype_j['dependency_idx'][idx] = set() - - prototype_i['dependency_idx'][idx] |= {monomer_j} - prototype_j['dependency_idx'][idx] |= {monomer_i} - - -def _compute_target_index(cl_prototypes: CL_Prototype, - ode_model: ODEModel) -> None: - """ - Computes the target index for every monomer - - :param cl_prototypes: - dict that contains possible indices for every monomer - - :param ode_model: - ODEModel instance - """ - possible_indices = list(set(list(itertools.chain(*[ - cl_prototypes[monomer]['possible_indices'] - for monomer in cl_prototypes - ])))) - - # Note: currently this function is supposed to also count appearances in - # expressions. However, expressions are currently still empty as they - # are also populated from conservation laws. In case there are many - # state heavy expressions in the model (should not be the case for mass - # action kinetics). This may lead to suboptimal results and could improved. - # As this would require substantial code shuffling, this will only be - # fixed if this becomes an actual problem - appearance_counts = ode_model.get_appearance_counts(possible_indices) - - # in this initial guess we ignore the cost of having cyclic dependencies - # between conservation laws - for monomer in cl_prototypes: - prototype = cl_prototypes[monomer] - # extract monomer specific appearance counts - prototype['appearance_counts'] = \ - [ - appearance_counts[possible_indices.index(idx)] - for idx in prototype['possible_indices'] - ] - # select target index as possible index with minimal appearance count - if len(prototype['appearance_counts']) == 0: - raise RuntimeError(f'Failed to compute conservation law for ' - f'monomer {monomer}') - - idx = np.argmin(prototype['appearance_counts']) - - # remove entries from possible indices and appearance counts so we - # do not consider them again in later iterations - prototype['target_index'] = prototype['possible_indices'].pop(idx) - prototype['appearance_count'] = prototype['appearance_counts'].pop(idx) - - # this is only an approximation as the effective species count - # of other conservation laws may also be affected by the chosen - # target index. As long as the number of unique monomers in - # multimers has a low upper bound and the species count does not - # vary too much across conservation laws, this approximation - # should be fine - prototype['fillin'] = \ - prototype['appearance_count'] * prototype['species_count'] - - # we might end up with the same index for multiple monomers, so loop until - # we have a set of unique target indices - while not _cl_prototypes_are_valid(cl_prototypes): - _greedy_target_index_update(cl_prototypes) - - -def _cl_prototypes_are_valid(cl_prototypes: CL_Prototype) -> bool: - """ - Checks consistency of cl_prototypes by asserting that target indices - are unique and there are no cyclic dependencies - - :param cl_prototypes: - dict that contains dependency and target indexes for - every monomer - """ - # target indices are unique - if len(cl_prototypes) != len(set(_get_target_indices(cl_prototypes))): - return False - # conservation law dependencies are cycle free - if any( - _cl_has_cycle(monomer, cl_prototypes) - for monomer in cl_prototypes - ): - return False - - return True - - -def _cl_has_cycle(monomer: str, cl_prototypes: CL_Prototype) -> bool: - """ - Checks whether monomer has a conservation law that is part of a - cyclic dependency - - :param monomer: - name of monomer for which conservation law is to be checked - - :param cl_prototypes: - dict that contains dependency and target indexes for every monomer - - :return: - boolean indicating whether the conservation law is cyclic - """ - - prototype = cl_prototypes[monomer] - - if prototype['target_index'] not in prototype['dependency_idx']: - return False - - visited = [monomer] - root = monomer - return any( - _is_in_cycle( - connecting_monomer, - cl_prototypes, - visited, - root - ) - for connecting_monomer in prototype['dependency_idx'][ - prototype['target_index'] - ] - ) - - -def _is_in_cycle(monomer: str, - cl_prototypes: CL_Prototype, - visited: List[str], - root: str) -> bool: - """ - Recursively checks for cycles in conservation law dependencies via - Depth First Search - - :param monomer: - current location in cl dependency graph - - :param cl_prototypes: - dict that contains dependency and target indexes for - every monomer - - :param visited: - history of visited monomers with conservation laws - - :param root: - monomer at which the cycle search was started - - :return: - boolean indicating whether the specified monomer is part of a cyclic - conservation law - - """ - if monomer == root: - return True # we found a cycle and root is part of it - - if monomer in visited: - return False # we found a cycle but root is not part of it - - visited.append(monomer) - - prototype = cl_prototypes[monomer] - - if prototype['target_index'] not in prototype['dependency_idx']: - return False - - return any( - _is_in_cycle( - connecting_monomer, - cl_prototypes, - visited, - root - ) - for connecting_monomer in prototype['dependency_idx'][ - prototype['target_index'] - ] - ) - - -def _greedy_target_index_update(cl_prototypes: CL_Prototype) -> None: - """ - Computes unique target indices for conservation laws from possible - indices such that expected fill in in symbolic derivatives is minimized - - :param cl_prototypes: - dict that contains possible indices and non-unique target indices - for every monomer - """ - - target_indices = _get_target_indices(cl_prototypes) - - for monomer, prototype in cl_prototypes.items(): - if target_indices.count(prototype['target_index']) > 1 or \ - _cl_has_cycle(monomer, cl_prototypes): - # compute how much fillin the next best target_index would yield - - # we exclude already existing target indices to avoid that - # updating the target index removes uniqueness from already unique - # target indices, this may slightly reduce chances of finding a - # solution but prevents infinite loops - for target_index in list(set(target_indices)): - try: - local_idx = prototype['possible_indices'].index( - target_index - ) - except ValueError: - local_idx = None - - if local_idx: - del prototype['possible_indices'][local_idx] - del prototype['appearance_counts'][local_idx] - - if len(prototype['possible_indices']) == 0: - prototype['diff_fillin'] = -1 - continue - - idx = np.argmin(prototype['appearance_counts']) - - prototype['local_index'] = idx - prototype['alternate_target_index'] = \ - prototype['possible_indices'][idx] - prototype['alternate_appearance_count'] = \ - prototype['appearance_counts'][idx] - - prototype['alternate_fillin'] = \ - prototype['alternate_appearance_count'] \ - * prototype['species_count'] - - prototype['diff_fillin'] = \ - prototype['alternate_fillin'] - prototype['fillin'] - else: - prototype['diff_fillin'] = -1 - - if all( - prototype['diff_fillin'] == -1 - for prototype in cl_prototypes.values() - ): - raise RuntimeError('Could not compute a valid set of conservation ' - 'laws for this model!') - - # this puts prototypes with high diff_fillin last - cl_prototypes = sorted( - cl_prototypes.items(), key=lambda kv: kv[1]['diff_fillin'] - ) - cl_prototypes = { - proto[0]: proto[1] - for proto in cl_prototypes - } - - for monomer in cl_prototypes: - prototype = cl_prototypes[monomer] - # we check that we - # A) have an alternative index computed, i.e. that - # that monomer originally had a non-unique target_index - # B) that the target_index still is not unique or part of a cyclic - # dependency. due to the sorting, this will always be the monomer - # with the highest diff_fillin (note that the target index counts - # are recomputed on the fly) - - if prototype['diff_fillin'] > -1 \ - and ( - _get_target_indices(cl_prototypes).count( - prototype['target_index'] - ) > 1 - or _cl_has_cycle(monomer, cl_prototypes) - ): - prototype['fillin'] = prototype['alternate_fillin'] - prototype['target_index'] = prototype['alternate_target_index'] - prototype['appearance_count'] = \ - prototype['alternate_appearance_count'] - - del prototype['possible_indices'][prototype['local_index']] - del prototype['appearance_counts'][prototype['local_index']] - - -def _get_target_indices( - cl_prototypes: CL_Prototype) -> List[List[int]]: - """ - Computes the list target indices for the current - conservation law prototype - - :param cl_prototypes: - dict that contains target indices for every monomer - - :return: - List of lists of target indices - """ - return [ - prototype['target_index'] for prototype in cl_prototypes.values() - ] - - -def _construct_conservation_from_prototypes( - cl_prototypes: CL_Prototype, - pysb_model: pysb.Model -) -> List[ConservationLaw]: - """ - Computes the algebraic expression for the total amount of a given - monomer - - :param cl_prototypes: - see return of :func:`_generate_cl_prototypes` - - :param pysb_model: - pysb model - - :return: - list of dicts describing conservation laws - """ - conservation_laws = [] - for monomer_name in cl_prototypes: - target_index = cl_prototypes[monomer_name]['target_index'] - coefficients = dict() - - for ix, specie in enumerate(pysb_model.species): - count = extract_monomers(specie).count(monomer_name) - if count > 0: - coefficients[sp.Symbol(f'__s{ix}')] = count - - conservation_laws.append({ - 'state': sp.Symbol(f'__s{target_index}'), - 'total_abundance': sp.Symbol(f'tcl__s{target_index}'), - 'coefficients': coefficients, - }) - - return conservation_laws - - -def _add_conservation_for_constant_species( - ode_model: ODEModel, - conservation_laws: List[ConservationLaw] -) -> None: - """ - Computes the algebraic expression for the total amount of a given - monomer - - :param ode_model: - ODEModel instance to which the conservation laws will be added - - :param conservation_laws: - see return of :func:`_construct_conservation_from_prototypes` - - """ - - for ix in range(ode_model.num_states_rdata()): - if ode_model.state_is_constant(ix): - conservation_laws.append({ - 'state': sp.Symbol(f'__s{ix}'), - 'total_abundance': sp.Symbol(f'tcl__s{ix}'), - 'coefficients': {sp.Symbol(f'__s{ix}'): 1.0} - }) - - -def _flatten_conservation_laws( - conservation_laws: List[ConservationLaw]) -> None: - """ - Flatten the conservation laws such that the state_expr not longer - depend on any states that are replaced by conservation laws - - :param conservation_laws: - see return of :func:`_construct_conservation_from_prototypes` - """ - conservation_law_subs = \ - _get_conservation_law_subs(conservation_laws) - - while conservation_law_subs: - for cl in conservation_laws: - # only update if we changed something - if any( - _apply_conseration_law_sub(cl, sub) - for sub in conservation_law_subs - ): - conservation_law_subs = \ - _get_conservation_law_subs(conservation_laws) - - -def _apply_conseration_law_sub(cl: ConservationLaw, - sub: Tuple[sp.Symbol, ConservationLaw]) -> bool: - """ - Applies a substitution to a conservation law by replacing the - coefficient of the state of the - - :param cl: - conservation law - - :param sub: - substitution to apply, tuple of (state to be replaced, conservation - law) - - :return: boolean flag indicating whether the substitution was applied - """ - if not _state_in_cl_formula(sub[0], cl): - return False - - coeff = cl['coefficients'].pop(sub[0], 0.0) - # x_j = T/b_j - sum_{i≠j}(x_i * b_i) / b_j - # don't need to account for totals here as we can simply - # absorb that into the new total - for k, v in sub[1].items(): - if k == sub[0]: - continue - update = - coeff * v / sub[1][sub[0]] - - if k in cl['coefficients']: - cl['coefficients'][k] += update - else: - cl['coefficients'][k] = update - - return True - - -def _state_in_cl_formula( - state: sp.Symbol, cl: ConservationLaw -) -> bool: - """ - Checks whether state appears in the formula the provided cl - - :param state: - state - - :param cl: - conservation law - - :return: - boolean indicator - """ - if cl['state'] == state: - return False - - return cl['coefficients'].get(state, 0.0) != 0.0 - - -def _get_conservation_law_subs( - conservation_laws: List[ConservationLaw] -) -> List[Tuple[sp.Symbol, Dict[sp.Symbol, sp.Expr]]]: - """ - Computes a list of (state, coeffs) tuples for conservation laws that still - appear in other conservation laws - - :param conservation_laws: - see return of :func:`_flatten_conservation_laws` - - :return: - list of tuples containing substitution rules to be used with sympy - subs - """ - return [ - (cl['state'], cl['coefficients']) for cl in conservation_laws - if any( - _state_in_cl_formula(cl['state'], other_cl) - for other_cl in conservation_laws - ) - ] - -def has_fixed_parameter_ic(specie: pysb.core.ComplexPattern, - pysb_model: pysb.Model, - ode_model: ODEModel) -> bool: - """ - Wrapper to interface - :meth:`ode_export.ODEModel.state_has_fixed_parameter_initial_condition` - from a pysb specie/model arguments - - :param specie: - pysb species - - :param pysb_model: - pysb model - - :param ode_model: - ODE model - - :return: - ``False`` if the species does not have an initial condition at all. - Otherwise the return value of - :meth:`ode_export.ODEModel.state_has_fixed_parameter_initial_condition` - """ - # ComplexPatterns are not hashable, so we have to compare by string - ic_index = next( - ( - ic - for ic, condition in enumerate(pysb_model.initials) - if pysb.pattern.match_complex_pattern(condition[0], - specie, exact=True) - ), - None - ) - if ic_index is None: - return False - else: - return ode_model.state_has_fixed_parameter_initial_condition( - ic_index - ) - - -def extract_monomers( - complex_patterns: Union[pysb.ComplexPattern, - List[pysb.ComplexPattern]] -) -> List[str]: - """ - Constructs a list of monomer names contained in complex patterns. - Multiplicity of names corresponds to the stoichiometry in the complex. - - :param complex_patterns: - (list of) complex pattern(s) - - :return: - list of monomer names - """ - if not isinstance(complex_patterns, list): - complex_patterns = [complex_patterns] - return [ - mp.monomer.name - for cp in complex_patterns - if cp is not None - for mp in cp.monomer_patterns - ] - - -def _get_unconserved_monomers(rule: pysb.Rule, - pysb_model: pysb.Model) -> Set[str]: - """ - Constructs the set of monomer names for which the specified rule changes - the stoichiometry of the monomer in the specified model. - - :param rule: - the pysb rule - - :param pysb_model: - pysb model - - :return: - set of monomer names for which the stoichiometry is not conserved - """ - unconserved_monomers = set() - - if not rule.delete_molecules \ - and len(rule.product_pattern.complex_patterns) == 0: - # if delete_molecules is not True but we have a degradation rule, - # we have to actually go through the reactions that are created by - # the rule - for reaction in [r for r in pysb_model.reactions - if rule.name in r['rule']]: - unconserved_monomers |= _get_changed_stoichiometries( - [pysb_model.species[ix] for ix in reaction['reactants']], - [pysb_model.species[ix] for ix in reaction['products']] - ) - else: - # otherwise we can simply extract all information for the rule - # itself, which is computationally much more efficient - unconserved_monomers |= _get_changed_stoichiometries( - rule.reactant_pattern.complex_patterns, - rule.product_pattern.complex_patterns - ) - - return unconserved_monomers - - -def _get_changed_stoichiometries( - reactants: Union[pysb.ComplexPattern, List[pysb.ComplexPattern]], - products: Union[pysb.ComplexPattern, List[pysb.ComplexPattern]] -) -> Set[str]: - """ - Constructs the set of monomer names which have different - stoichiometries in reactants and products. - - :param reactants: - (list of) complex pattern(s) - :param products: - (list of) complex pattern(s) - - :returns: - set of monomer name for which the stoichiometry changed - """ - - changed_stoichiometries = set() - - reactant_monomers = extract_monomers( - reactants - ) - - product_monomers = extract_monomers( - products - ) - - for monomer in set(reactant_monomers + product_monomers): - if reactant_monomers.count(monomer) != product_monomers.count(monomer): - changed_stoichiometries.add(monomer) - - return changed_stoichiometries - - -def pysb_model_from_path(pysb_model_file: Union[str, Path]) -> pysb.Model: - """Load a pysb model module and return the :class:`pysb.Model` instance - - :param pysb_model_file: Full or relative path to the PySB model module - :return: The pysb Model instance - """ - - pysb_model_module_name = \ - os.path.splitext(os.path.split(pysb_model_file)[-1])[0] - - import importlib.util - spec = importlib.util.spec_from_file_location( - pysb_model_module_name, pysb_model_file) - module = importlib.util.module_from_spec(spec) - sys.modules[pysb_model_module_name] = module - spec.loader.exec_module(module) - - return module.model diff --git a/python/amici/sbml_import.py b/python/amici/sbml_import.py deleted file mode 100644 index e1f35245e4..0000000000 --- a/python/amici/sbml_import.py +++ /dev/null @@ -1,2365 +0,0 @@ -""" -SBML Import ------------ -This module provides all necessary functionality to import a model specified -in the `Systems Biology Markup Language (SBML) `_. -""" -import copy -import itertools as itt -import logging -import math -import os -import re -import warnings -from pathlib import Path -from typing import (Any, Callable, Dict, Iterable, List, Optional, Tuple, - Union) - -import libsbml as sbml -import sympy as sp - -from . import has_clibs -from .constants import SymbolId -from .import_utils import (RESERVED_SYMBOLS, - _check_unsupported_functions, - _get_str_symbol_identifiers, - _parse_special_functions, - generate_measurement_symbol, - generate_regularization_symbol, - noise_distribution_to_cost_function, - noise_distribution_to_observable_transformation, - smart_subs, smart_subs_dict, toposort_symbols) -from .logging import get_logger, log_execution_time, set_log_level -from .ode_export import ( - ODEExporter, ODEModel, symbol_with_assumptions, _default_simplify -) - - -class SBMLException(Exception): - pass - - -SymbolicFormula = Dict[sp.Symbol, sp.Expr] - - -default_symbols = { - symbol: {} for symbol in SymbolId -} - -ConservationLaw = Dict[str, Union[str, sp.Expr]] - -logger = get_logger(__name__, logging.ERROR) - - -class SbmlImporter: - """ - Class to generate AMICI C++ files for a model provided in the Systems - Biology Markup Language (SBML). - - :ivar show_sbml_warnings: - indicates whether libSBML warnings should be - displayed - - :ivar symbols: - dict carrying symbolic definitions - - :ivar sbml_reader: - - The libSBML sbml reader - - .. warning:: - Not storing this may result in a segfault. - - :ivar sbml_doc: - document carrying the sbml definition - - .. warning:: - Not storing this may result in a segfault. - - :ivar sbml: - SBML model to import - - :ivar compartments: - dict of compartment ids and compartment volumes - - :ivar stoichiometric_matrix: - stoichiometric matrix of the model - - :ivar flux_vector: - reaction kinetic laws - - :ivar flux_ids: - identifiers for elements of flux_vector - - :ivar _local_symbols: - model symbols for sympy to consider during sympification - see `locals`argument in `sympy.sympify` - - :ivar species_assignment_rules: - Assignment rules for species. - Key is symbolic identifier and value is assignment value - - :ivar compartment_assignment_rules: - Assignment rules for compartments. - Key is symbolic identifier and value is assignment value - - :ivar parameter_assignment_rules: - assignment rules for parameters, these parameters are not permissible - for sensitivity analysis - - :ivar initial_assignments: - initial assignments for parameters, these parameters are not - permissible for sensitivity analysis - - :ivar sbml_parser_settings: - sets behaviour of SBML Formula parsing - - """ - - def __init__(self, - sbml_source: Union[str, Path, sbml.Model], - show_sbml_warnings: bool = False, - from_file: bool = True) -> None: - """ - Create a new Model instance. - - :param sbml_source: - Either a path to SBML file where the model is specified, - or a model string as created by sbml.sbmlWriter( - ).writeSBMLToString() or an instance of `libsbml.Model`. - - :param show_sbml_warnings: - Indicates whether libSBML warnings should be displayed. - - :param from_file: - Whether `sbml_source` is a file name (True, default), or an SBML - string - """ - if isinstance(sbml_source, sbml.Model): - self.sbml_doc: sbml.Document = sbml_source.getSBMLDocument() - else: - self.sbml_reader: sbml.SBMLReader = sbml.SBMLReader() - if from_file: - sbml_doc = self.sbml_reader.readSBMLFromFile(str(sbml_source)) - else: - sbml_doc = self.sbml_reader.readSBMLFromString(sbml_source) - self.sbml_doc = sbml_doc - - self.show_sbml_warnings: bool = show_sbml_warnings - - # process document - self._process_document() - - self.sbml: sbml.Model = self.sbml_doc.getModel() - - # Long and short names for model components - self.symbols: Dict[SymbolId, Dict[sp.Symbol, Dict[str, Any]]] = {} - - self._local_symbols: Dict[str, Union[sp.Expr, sp.Function]] = {} - self.compartments: SymbolicFormula = {} - self.compartment_assignment_rules: SymbolicFormula = {} - self.species_assignment_rules: SymbolicFormula = {} - self.parameter_assignment_rules: SymbolicFormula = {} - self.initial_assignments: SymbolicFormula = {} - - self._reset_symbols() - - # http://sbml.org/Software/libSBML/5.18.0/docs/python-api/classlibsbml_1_1_l3_parser_settings.html#abcfedd34efd3cae2081ba8f42ea43f52 - # all defaults except disable unit parsing - self.sbml_parser_settings = sbml.L3ParserSettings( - self.sbml, sbml.L3P_PARSE_LOG_AS_LOG10, - sbml.L3P_EXPAND_UNARY_MINUS, sbml.L3P_NO_UNITS, - sbml.L3P_AVOGADRO_IS_CSYMBOL, - sbml.L3P_COMPARE_BUILTINS_CASE_INSENSITIVE, None, - sbml.L3P_MODULO_IS_PIECEWISE - ) - - def _process_document(self) -> None: - """ - Validate and simplify document. - """ - # Ensure we got a valid SBML model, otherwise further processing - # might lead to undefined results - self.sbml_doc.validateSBML() - _check_lib_sbml_errors(self.sbml_doc, self.show_sbml_warnings) - - # apply several model simplifications that make our life substantially - # easier - if self.sbml_doc.getModel().getNumFunctionDefinitions(): - convert_config = sbml.SBMLFunctionDefinitionConverter()\ - .getDefaultProperties() - self.sbml_doc.convert(convert_config) - - convert_config = sbml.SBMLLocalParameterConverter().\ - getDefaultProperties() - self.sbml_doc.convert(convert_config) - - # If any of the above calls produces an error, this will be added to - # the SBMLError log in the sbml document. Thus, it is sufficient to - # check the error log just once after all conversion/validation calls. - _check_lib_sbml_errors(self.sbml_doc, self.show_sbml_warnings) - - def _reset_symbols(self) -> None: - """ - Reset the symbols attribute to default values - """ - self.symbols = copy.deepcopy(default_symbols) - self._local_symbols = {} - - def sbml2amici( - self, - model_name: str, - output_dir: Union[str, Path] = None, - observables: Dict[str, Dict[str, str]] = None, - event_observables: Dict[str, Dict[str, str]] = None, - constant_parameters: Iterable[str] = None, - sigmas: Dict[str, Union[str, float]] = None, - event_sigmas: Dict[str, Union[str, float]] = None, - noise_distributions: Dict[str, Union[str, Callable]] = None, - event_noise_distributions: Dict[str, Union[str, Callable]] = None, - verbose: Union[int, bool] = logging.ERROR, - assume_pow_positivity: bool = False, - compiler: str = None, - allow_reinit_fixpar_initcond: bool = True, - compile: bool = True, - compute_conservation_laws: bool = True, - simplify: Optional[Callable] = _default_simplify, - cache_simplify: bool = False, - log_as_log10: bool = True, - generate_sensitivity_code: bool = True, - ) -> None: - """ - Generate and compile AMICI C++ files for the model provided to the - constructor. - - The resulting model can be imported as a regular Python module (if - `compile=True`), or used from Matlab or C++ as described in the - documentation of the respective AMICI interface. - - Note that this generates model ODEs for changes in concentrations, not - amounts unless the `hasOnlySubstanceUnits` attribute has been - defined for a particular species. - - Sensitivity analysis for local parameters is enabled by creating - global parameters _{reactionId}_{localParameterName}. - - :param model_name: - name of the model/model directory - - :param output_dir: - see :meth:`amici.ode_export.ODEExporter.set_paths` - - :param observables: - dictionary( observableId:{'name':observableName - (optional), 'formula':formulaString)}) to be added to the model - - :param event_observables: - dictionary( eventObservableId:{'name':eventObservableName - (optional), 'event':eventId, 'formula':formulaString)}) to be - added to the model - - :param constant_parameters: - list of SBML Ids identifying constant parameters - - :param sigmas: - dictionary(observableId: sigma value or (existing) parameter name) - - :param event_sigmas: - dictionary(eventObservableId: sigma value or (existing) parameter - name) - - :param noise_distributions: - dictionary(observableId: noise type). - If nothing is passed for some observable id, a normal model is - assumed as default. Either pass a noise type identifier, or a - callable generating a custom noise string. - - :param event_noise_distributions: - dictionary(eventObservableId: noise type). - If nothing is passed for some observable id, a normal model is - assumed as default. Either pass a noise type identifier, or a - callable generating a custom noise string. - - :param verbose: - verbosity level for logging, ``True``/``False`` default to - ``logging.Error``/``logging.DEBUG`` - - :param assume_pow_positivity: - if set to ``True``, a special pow function is - used to avoid problems with state variables that may become - negative due to numerical errors - - :param compiler: - distutils/setuptools compiler selection to build the - python extension - - :param allow_reinit_fixpar_initcond: - see :class:`amici.ode_export.ODEExporter` - - :param compile: - If ``True``, compile the generated Python package, - if ``False``, just generate code. - - :param compute_conservation_laws: - if set to ``True``, conservation laws are automatically computed - and applied such that the state-jacobian of the ODE - right-hand-side has full rank. This option should be set to - ``True`` when using the Newton algorithm to compute steadystate - sensitivities. - Conservation laws for constant species are enabled by default. - Support for conservation laws for non-constant species is - experimental and may be enabled by setting an environment variable - ``AMICI_EXPERIMENTAL_SBML_NONCONST_CLS`` to either ``demartino`` - to use the algorithm proposed by De Martino et al. (2014) - https://doi.org/10.1371/journal.pone.0100750, or to any other value - to use the deterministic algorithm implemented in - ``conserved_moieties2.py``. In some cases, the ``demartino`` may - run for a very long time. This has been observed for example in the - case of stoichiometric coefficients with many significant digits. - - :param simplify: - see :attr:`ODEModel._simplify` - - :param cache_simplify: - see :func:`amici.ODEModel.__init__` - - :param log_as_log10: - If ``True``, log in the SBML model will be parsed as ``log10`` - (default), if ``False``, log will be parsed as natural logarithm - ``ln`` - - :param generate_sensitivity_code: - If ``False``, the code required for sensitivity computation will - not be generated - """ - set_log_level(logger, verbose) - - ode_model = self._build_ode_model( - observables=observables, - event_observables=event_observables, - constant_parameters=constant_parameters, - sigmas=sigmas, - event_sigmas=event_sigmas, - noise_distributions=noise_distributions, - event_noise_distributions=event_noise_distributions, - verbose=verbose, - compute_conservation_laws=compute_conservation_laws, - simplify=simplify, - cache_simplify=cache_simplify, - log_as_log10=log_as_log10, - ) - - exporter = ODEExporter( - ode_model, - model_name=model_name, - outdir=output_dir, - verbose=verbose, - assume_pow_positivity=assume_pow_positivity, - compiler=compiler, - allow_reinit_fixpar_initcond=allow_reinit_fixpar_initcond, - generate_sensitivity_code=generate_sensitivity_code - ) - exporter.generate_model_code() - - if compile: - if not has_clibs: - warnings.warn('AMICI C++ extensions have not been built. ' - 'Generated model code, but unable to compile.') - exporter.compile_model() - - def _build_ode_model( - self, - observables: Dict[str, Dict[str, str]] = None, - event_observables: Dict[str, Dict[str, str]] = None, - constant_parameters: Iterable[str] = None, - sigmas: Dict[str, Union[str, float]] = None, - event_sigmas: Dict[str, Union[str, float]] = None, - noise_distributions: Dict[str, Union[str, Callable]] = None, - event_noise_distributions: Dict[str, Union[str, Callable]] = None, - verbose: Union[int, bool] = logging.ERROR, - compute_conservation_laws: bool = True, - simplify: Optional[Callable] = _default_simplify, - cache_simplify: bool = False, - log_as_log10: bool = True, - ) -> ODEModel: - """Generate an ODEModel from this SBML model. - - See :py:func:`sbml2amici` for parameters. - """ - constant_parameters = list(constant_parameters) \ - if constant_parameters else [] - - if sigmas is None: - sigmas = {} - - if event_sigmas is None: - event_sigmas = {} - - if noise_distributions is None: - noise_distributions = {} - - if event_noise_distributions is None: - event_noise_distributions = {} - - self._reset_symbols() - self.sbml_parser_settings.setParseLog( - sbml.L3P_PARSE_LOG_AS_LOG10 if log_as_log10 else - sbml.L3P_PARSE_LOG_AS_LN - ) - self._process_sbml(constant_parameters) - if self.symbols.get(SymbolId.EVENT, False): - if compute_conservation_laws: - logger.warning( - 'Conservation laws are currently not supported for models ' - 'with events, and will be turned off.' - ) - compute_conservation_laws = False - - self._process_observables( - observables, - sigmas, - noise_distributions - ) - self._process_event_observables( - event_observables, - event_sigmas, - event_noise_distributions - ) - self._replace_compartments_with_volumes() - - self._clean_reserved_symbols() - self._process_time() - - ode_model = ODEModel( - verbose=verbose, - simplify=simplify, - cache_simplify=cache_simplify, - ) - ode_model.import_from_sbml_importer( - self, compute_cls=compute_conservation_laws) - return ode_model - - @log_execution_time('importing SBML', logger) - def _process_sbml(self, constant_parameters: List[str] = None) -> None: - """ - Read parameters, species, reactions, and so on from SBML model - - :param constant_parameters: - SBML Ids identifying constant parameters - """ - self.check_support() - self._gather_locals() - self._process_parameters(constant_parameters) - self._process_compartments() - self._process_species() - self._process_reactions() - self._process_rules() - self._process_initial_assignments() - self._process_species_references() - self._process_events() - - def check_support(self) -> None: - """ - Check whether all required SBML features are supported. - Also ensures that the SBML contains at least one reaction, or rate - rule, or assignment rule, to produce change in the system over time. - """ - - # Check for required but unsupported SBML extensions - if self.sbml_doc.getLevel() != 3 \ - and hasattr(self.sbml, 'all_elements_from_plugins') \ - and self.sbml.all_elements_from_plugins.getSize(): - raise SBMLException('SBML extensions are currently not supported!') - - if self.sbml_doc.getLevel() == 3: - # the "required" attribute is only available in SBML Level 3 - for i_plugin in range(self.sbml.getNumPlugins()): - plugin = self.sbml.getPlugin(i_plugin) - if plugin.getPackageName() in ('layout',): - # 'layout' plugin does not have the 'required' attribute - continue - if hasattr(plugin, 'getRequired') and not plugin.getRequired(): - # if not "required", this has no impact on model - # simulation, and we can safely ignore it - continue - # Check if there are extension elements. If not, we can safely - # ignore the enabled package - if plugin.getListOfAllElements(): - raise SBMLException( - f'Required SBML extension {plugin.getPackageName()} ' - f'is currently not supported!') - - if any(not rule.isAssignment() and not isinstance( - self.sbml.getElementBySId(rule.getVariable()), - (sbml.Compartment, sbml.Species, sbml.Parameter) - ) for rule in self.sbml.getListOfRules()): - raise SBMLException('Algebraic rules are currently not supported, ' - 'and rate rules are only supported for ' - 'species, compartments, and parameters.') - - if any(not (rule.isAssignment() or rule.isRate()) - and isinstance( - self.sbml.getElementBySId(rule.getVariable()), - (sbml.Compartment, sbml.Species, sbml.Parameter) - ) for rule in self.sbml.getListOfRules()): - raise SBMLException('Only assignment and rate rules are ' - 'currently supported for compartments, ' - 'species, and parameters!') - - if any(r.getFast() for r in self.sbml.getListOfReactions()): - raise SBMLException('Fast reactions are currently not supported!') - - # Check events for unsupported functionality - self.check_event_support() - - def check_event_support(self) -> None: - """ - Check possible events in the model, as AMICI does currently not support - - * delays in events - * priorities of events - * events fired at initial time - - Furthermore, event triggers are optional (e.g., if an event is fired at - initial time, no trigger function is necessary). - In this case, warn that this event will have no effect. - """ - for event in self.sbml.getListOfEvents(): - event_id = event.getId() - # Check for delays in events - delay = event.getDelay() - if delay is not None: - try: - delay_time = float(self._sympy_from_sbml_math(delay)) - if delay_time != 0: - raise ValueError - # `TypeError` would be raised in the above `float(...)` - # if the delay is not a fixed time - except (TypeError, ValueError): - raise SBMLException('Events with execution delays are ' - 'currently not supported in AMICI.') - # Check for priorities - if event.getPriority() is not None: - raise SBMLException(f'Event {event_id} has a priority ' - 'specified. This is currently not ' - 'supported in AMICI.') - - # check trigger - trigger_sbml = event.getTrigger() - if trigger_sbml is None: - logger.warning(f'Event {event_id} trigger has no trigger, ' - 'so will be skipped.') - continue - if trigger_sbml.getMath() is None: - logger.warning(f'Event {event_id} trigger has no trigger ' - 'expression, so a dummy trigger will be set.') - - if not trigger_sbml.getPersistent(): - raise SBMLException( - f'Event {event_id} has a non-persistent trigger.' - 'This is currently not supported in AMICI.' - ) - - @log_execution_time('gathering local SBML symbols', logger) - def _gather_locals(self) -> None: - """ - Populate self.local_symbols with all model entities. - - This is later used during sympifications to avoid sympy builtins - shadowing model entities as well as to avoid possibly costly - symbolic substitutions - """ - self._gather_base_locals() - self._gather_dependent_locals() - - def _gather_base_locals(self): - """ - Populate self.local_symbols with pure symbol definitions that do not - depend on any other symbol. - """ - - special_symbols_and_funs = { - # oo is sympy infinity - 'INF': sp.oo, - 'NaN': sp.nan, - 'rem': sp.Mod, - 'time': symbol_with_assumptions('time'), - # SBML L3 explicitly defines this value, which is not equal - # to the most recent SI definition. - 'avogadro': sp.Float(6.02214179e23), - 'exponentiale': sp.E, - } - for s, v in special_symbols_and_funs.items(): - self.add_local_symbol(s, v) - - for c in itt.chain(self.sbml.getListOfSpecies(), - self.sbml.getListOfParameters(), - self.sbml.getListOfCompartments()): - if not c.isSetId(): - continue - - self.add_local_symbol(c.getId(), _get_identifier_symbol(c)) - - for x_ref in _get_list_of_species_references(self.sbml): - if not x_ref.isSetId(): - continue - if x_ref.isSetStoichiometry() and not \ - self.is_assignment_rule_target(x_ref): - value = sp.Float(x_ref.getStoichiometry()) - else: - value = _get_identifier_symbol(x_ref) - - ia_sym = self._get_element_initial_assignment(x_ref.getId()) - if ia_sym is not None: - value = ia_sym - - self.add_local_symbol(x_ref.getId(), value) - - for r in self.sbml.getListOfReactions(): - for e in itt.chain(r.getListOfReactants(), r.getListOfProducts()): - if isinstance(e, sbml.SpeciesReference): - continue - - if not (e.isSetId() and e.isSetStoichiometry()) or \ - self.is_assignment_rule_target(e): - continue - - self.add_local_symbol(e.getId(), - sp.Float(e.getStoichiometry())) - - def _gather_dependent_locals(self): - """ - Populate self.local_symbols with symbol definitions that may depend on - other symbol definitions. - """ - for r in self.sbml.getListOfReactions(): - if not r.isSetId(): - continue - self.add_local_symbol( - r.getId(), - self._sympy_from_sbml_math(r.getKineticLaw()) - ) - - def add_local_symbol(self, key: str, value: sp.Expr): - """ - Add local symbols with some sanity checking for duplication which - would indicate redefinition of internals, which SBML permits, - but we don't. - - :param key: - local symbol key - - :param value: - local symbol value - """ - if key in self._local_symbols.keys(): - raise SBMLException( - f'AMICI tried to add a local symbol {key} with value {value}, ' - f'but {key} was already instantiated with ' - f'{self._local_symbols[key]}. This means that there ' - f'are multiple SBML elements with SId {key}, which is ' - f'invalid SBML. This can be fixed by renaming ' - f'the elements with SId {key}.' - ) - if key in {'True', 'False', 'true', 'false', 'pi'}: - raise SBMLException( - f'AMICI tried to add a local symbol {key} with value {value}, ' - f'but {key} is a reserved symbol in AMICI. This can be fixed ' - f'by renaming the element with SId {key}.' - ) - self._local_symbols[key] = value - - @log_execution_time('processing SBML compartments', logger) - def _process_compartments(self) -> None: - """ - Get compartment information, stoichiometric matrix and fluxes from - SBML model. - """ - compartments = self.sbml.getListOfCompartments() - self.compartments = {} - for comp in compartments: - init = sp.Float(1.0) - - if comp.isSetVolume(): - init = self._sympy_from_sbml_math(comp.getVolume()) - - ia_sym = self._get_element_initial_assignment(comp.getId()) - if ia_sym is not None: - init = ia_sym - - self.compartments[_get_identifier_symbol(comp)] = init - - @log_execution_time('processing SBML species', logger) - def _process_species(self) -> None: - """ - Get species information from SBML model. - """ - if self.sbml.isSetConversionFactor(): - conversion_factor = symbol_with_assumptions( - self.sbml.getConversionFactor() - ) - else: - conversion_factor = 1 - - for s in self.sbml.getListOfSpecies(): - if self.is_assignment_rule_target(s): - continue - self.symbols[SymbolId.SPECIES][_get_identifier_symbol(s)] = { - 'name': s.getName() if s.isSetName() else s.getId(), - 'compartment': _get_species_compartment_symbol(s), - 'constant': s.getConstant() or s.getBoundaryCondition(), - 'amount': s.getHasOnlySubstanceUnits(), - 'conversion_factor': symbol_with_assumptions( - s.getConversionFactor() - ) - if s.isSetConversionFactor() - else conversion_factor, - 'index': len(self.symbols[SymbolId.SPECIES]), - } - - self._convert_event_assignment_parameter_targets_to_species() - self._process_species_initial() - self._process_rate_rules() - - @log_execution_time('processing SBML species initials', logger) - def _process_species_initial(self): - """ - Extract initial values and initial assignments from species - """ - for species_variable in self.sbml.getListOfSpecies(): - initial = get_species_initial(species_variable) - - species_id = _get_identifier_symbol(species_variable) - # If species_id is a target of an AssignmentRule, species will be - # None, but we don't have to account for the initial definition - # of the species itself and SBML doesn't permit AssignmentRule - # targets to have InitialAssignments. - species = self.symbols[SymbolId.SPECIES].get(species_id, None) - - ia_initial = self._get_element_initial_assignment( - species_variable.getId() - ) - if ia_initial is not None: - if species and species['amount'] \ - and 'compartment' in species: - ia_initial *= self.compartments.get( - species['compartment'], species['compartment'] - ) - initial = ia_initial - if species: - species['init'] = initial - - # don't assign this since they need to stay in order - sorted_species = toposort_symbols(self.symbols[SymbolId.SPECIES], - 'init') - for species in self.symbols[SymbolId.SPECIES].values(): - species['init'] = smart_subs_dict(species['init'], - sorted_species, - 'init') - - @log_execution_time('processing SBML rate rules', logger) - def _process_rate_rules(self): - """ - Process rate rules for species, compartments and parameters. - Compartments and parameters with rate rules are implemented as species. - Note that, in the case of species, rate rules may describe the change - in amount, not concentration, of a species. - """ - rules = self.sbml.getListOfRules() - # compartments with rules are replaced with constants in the relevant - # equations during the _replace_in_all_expressions call inside - # _process_rules - for rule in rules: - if rule.getTypeCode() != sbml.SBML_RATE_RULE: - continue - - variable = symbol_with_assumptions(rule.getVariable()) - formula = self._sympy_from_sbml_math(rule) - if formula is None: - continue - - # Species rules are processed first, to avoid processing - # compartments twice (as compartments with rate rules are - # implemented as species). - ia_init = self._get_element_initial_assignment(rule.getVariable()) - if variable in self.symbols[SymbolId.SPECIES]: - init = self.symbols[SymbolId.SPECIES][variable]['init'] - name = None - - if variable in self.compartments: - init = self.compartments[variable] - name = str(variable) - del self.compartments[variable] - - elif variable in self.symbols[SymbolId.PARAMETER]: - init = self._sympy_from_sbml_math( - self.symbols[SymbolId.PARAMETER][variable]['value'], - ) - name = self.symbols[SymbolId.PARAMETER][variable]['name'] - del self.symbols[SymbolId.PARAMETER][variable] - - # parameter with initial assignment, cannot use - # self.initial_assignments as it is not filled at this - # point - elif ia_init is not None: - init = ia_init - par = self.sbml.getElementBySId(rule.getVariable()) - name = par.getName() if par.isSetName() else par.getId() - - self.add_d_dt(formula, variable, init, name) - - def add_d_dt( - self, - d_dt: sp.Expr, - variable: sp.Symbol, - variable0: Union[float, sp.Expr], - name: str, - ) -> None: - """ - Creates or modifies species, to implement rate rules for - compartments and species, respectively. - - :param d_dt: - The rate rule (or, right-hand side of an ODE). - - :param variable: - The subject of the rate rule. - - :param variable0: - The initial value of the variable. - - :param name: - Species name, only applicable if this function generates a new - species - """ - if variable in self.symbols[SymbolId.SPECIES]: - # only update dt if species was already generated - self.symbols[SymbolId.SPECIES][variable]['dt'] = d_dt - else: - # update initial values - for species_id, species in self.symbols[SymbolId.SPECIES].items(): - variable0 = smart_subs(variable0, species_id, species['init']) - - for species in self.symbols[SymbolId.SPECIES].values(): - species['init'] = smart_subs(species['init'], - variable, variable0) - - # add compartment/parameter species - self.symbols[SymbolId.SPECIES][variable] = { - 'name': name, - 'init': variable0, - 'amount': False, - 'conversion_factor': 1.0, - 'constant': False, - 'index': len(self.symbols[SymbolId.SPECIES]), - 'dt': d_dt, - } - - @log_execution_time('processing SBML parameters', logger) - def _process_parameters(self, - constant_parameters: List[str] = None) -> None: - """ - Get parameter information from SBML model. - - :param constant_parameters: - SBML Ids identifying constant parameters - """ - - if constant_parameters is None: - constant_parameters = [] - - # Ensure specified constant parameters exist in the model - for parameter in constant_parameters: - if not self.sbml.getParameter(parameter): - raise KeyError('Cannot make %s a constant parameter: ' - 'Parameter does not exist.' % parameter) - - fixed_parameters = [ - parameter - for parameter in self.sbml.getListOfParameters() - if parameter.getId() in constant_parameters - ] - for parameter in fixed_parameters: - if self._get_element_initial_assignment(parameter.getId()) is not \ - None or self.is_assignment_rule_target(parameter) or \ - self.is_rate_rule_target(parameter): - raise SBMLException( - f'Cannot turn parameter {parameter.getId()} into a ' - 'constant/fixed parameter since it either has an ' - 'initial assignment or is the target of an assignment or ' - 'rate rule.' - ) - - parameters = [ - parameter for parameter - in self.sbml.getListOfParameters() - if parameter.getId() not in constant_parameters - and self._get_element_initial_assignment(parameter.getId()) is None - and not self.is_assignment_rule_target(parameter) - ] - - loop_settings = { - SymbolId.PARAMETER: {'var': parameters, 'name': 'parameter'}, - SymbolId.FIXED_PARAMETER: {'var': fixed_parameters, - 'name': 'fixed_parameter'} - } - - for partype, settings in loop_settings.items(): - for par in settings['var']: - self.symbols[partype][_get_identifier_symbol(par)] = { - 'name': par.getName() if par.isSetName() else par.getId(), - 'value': par.getValue() - } - - @log_execution_time('processing SBML reactions', logger) - def _process_reactions(self): - """ - Get reactions from SBML model. - """ - reactions = self.sbml.getListOfReactions() - # nr (number of reactions) should have a minimum length of 1. This is - # to ensure that, if there are no reactions, the stoichiometric matrix - # and flux vector multiply to a zero vector with dimensions (nx, 1). - nr = max(1, len(reactions)) - nx = len(self.symbols[SymbolId.SPECIES]) - # stoichiometric matrix - self.stoichiometric_matrix = sp.SparseMatrix(sp.zeros(nx, nr)) - self.flux_vector = sp.zeros(nr, 1) - # Use reaction IDs as IDs for flux expressions (note that prior to SBML - # level 3 version 2 the ID attribute was not mandatory and may be - # unset) - self.flux_ids = [ - f"flux_{reaction.getId()}" if reaction.isSetId() - else f"flux_r{reaction_idx}" - for reaction_idx, reaction in enumerate(reactions) - ] or ['flux_r0'] - - reaction_ids = [ - reaction.getId() for reaction in reactions - if reaction.isSetId() - ] - - for reaction_index, reaction in enumerate(reactions): - for element_list, sign in [(reaction.getListOfReactants(), -1), - (reaction.getListOfProducts(), 1)]: - for element in element_list: - stoichiometry = self._get_element_stoichiometry( - element - ) - sbml_species = self.sbml.getSpecies(element.getSpecies()) - if self.is_assignment_rule_target(sbml_species): - continue - species_id = _get_identifier_symbol(sbml_species) - species = self.symbols[SymbolId.SPECIES][species_id] - - if species['constant']: - continue - - # Division by species compartment size (to find the - # rate of change in species concentration) now occurs - # in the `dx_dt` method in "ode_export.py", which also - # accounts for possibly variable compartments. - self.stoichiometric_matrix[species['index'], - reaction_index] += \ - sign * stoichiometry * species['conversion_factor'] - if reaction.isSetId(): - sym_math = self._local_symbols[reaction.getId()] - else: - sym_math = self._sympy_from_sbml_math(reaction.getKineticLaw()) - - self.flux_vector[reaction_index] = sym_math - if any( - str(symbol) in reaction_ids - for symbol in self.flux_vector[reaction_index].free_symbols - ): - raise SBMLException( - 'Kinetic laws involving reaction ids are currently' - ' not supported!' - ) - - @log_execution_time('processing SBML rules', logger) - def _process_rules(self) -> None: - """ - Process Rules defined in the SBML model. - """ - for rule in self.sbml.getListOfRules(): - # rate rules are processed in _process_species - if rule.getTypeCode() == sbml.SBML_RATE_RULE: - continue - - sbml_var = self.sbml.getElementBySId(rule.getVariable()) - sym_id = symbol_with_assumptions(rule.getVariable()) - formula = self._sympy_from_sbml_math(rule) - if formula is None: - continue - - if isinstance(sbml_var, sbml.Species): - self.species_assignment_rules[sym_id] = formula - - elif isinstance(sbml_var, sbml.Compartment): - self.compartment_assignment_rules[sym_id] = formula - self.compartments[sym_id] = formula - - elif isinstance(sbml_var, sbml.Parameter): - self.parameter_assignment_rules[sym_id] = formula - - self.symbols[SymbolId.EXPRESSION][sym_id] = { - 'name': str(sym_id), - 'value': formula - } - - self.symbols[SymbolId.EXPRESSION] = toposort_symbols( - self.symbols[SymbolId.EXPRESSION], 'value' - ) - - # expressions must not occur in definition of x0 - for species in self.symbols[SymbolId.SPECIES].values(): - species['init'] = self._make_initial( - smart_subs_dict(species['init'], - self.symbols[SymbolId.EXPRESSION], - 'value') - ) - - def _process_time(self) -> None: - """ - Convert time_symbol into cpp variable. - """ - sbml_time_symbol = symbol_with_assumptions('time') - amici_time_symbol = symbol_with_assumptions('t') - self.amici_time_symbol = amici_time_symbol - - self._replace_in_all_expressions(sbml_time_symbol, amici_time_symbol) - - def _convert_event_assignment_parameter_targets_to_species(self): - """ - Convert parameters that are targets of event assignments to species. - - This is for the convenience of only implementing event assignments for - "species". - """ - parameter_targets = \ - _collect_event_assignment_parameter_targets(self.sbml) - for parameter_target in parameter_targets: - # Parameter rate rules already exist as species. - if parameter_target in self.symbols[SymbolId.SPECIES]: - continue - if parameter_target in self.parameter_assignment_rules: - raise SBMLException( - 'AMICI does not currently support models with SBML events ' - 'that affect parameters that are also the target of ' - 'assignment rules.' - ) - parameter_def = None - for symbol_id in {SymbolId.PARAMETER, SymbolId.FIXED_PARAMETER}: - if parameter_target in self.symbols[symbol_id]: - # `parameter_target` should only exist in one of the - # `symbol_id` dictionaries. - if parameter_def is not None: - raise AssertionError( - 'Unexpected error. The parameter target of an ' - 'event assignment was processed twice.' - ) - parameter_def = \ - self.symbols[symbol_id].pop(parameter_target) - if parameter_def is None: - # this happens for parameters that have initial assignments - # or are assignment rule targets - par = self.sbml.getElementBySId(str(parameter_target)) - ia_init = self._get_element_initial_assignment( - par.getId() - ) - parameter_def = { - 'name': par.getName() if par.isSetName() else par.getId(), - 'value': par.getValue() if ia_init is None else ia_init - } - # Fixed parameters are added as species such that they can be - # targets of events. - self.symbols[SymbolId.SPECIES][parameter_target] = { - 'name': parameter_def['name'], - 'init': sp.Float(parameter_def['value']), - # 'compartment': None, # can ignore for amounts - 'constant': False, - 'amount': True, - # 'conversion_factor': 1.0, # can be ignored - 'index': len(self.symbols[SymbolId.SPECIES]), - 'dt': sp.Float(0), - } - - @log_execution_time('processing SBML events', logger) - def _process_events(self) -> None: - """Process SBML events.""" - events = self.sbml.getListOfEvents() - - def get_empty_bolus_value() -> sp.Float: - """ - Used in the event update vector for species that are not affected - by the event. - """ - return sp.Symbol('AMICI_EMTPY_BOLUS') - - # Used to update species concentrations when an event affects a - # compartment. - concentration_species_by_compartment = { - symbol_with_assumptions(c.getId()): [] - for c in self.sbml.getListOfCompartments() - } - for species, species_def in self.symbols[SymbolId.SPECIES].items(): - if ( - # Species is a concentration - not species_def.get('amount', True) and - # Species has a compartment - 'compartment' in species_def - ): - concentration_species_by_compartment[ - species_def['compartment'] - ].append(species) - - for ievent, event in enumerate(events): - # get the event id (which is optional unfortunately) - event_id = event.getId() - if event_id is None or event_id == '': - event_id = f'event_{ievent}' - event_sym = sp.Symbol(event_id) - - # get and parse the trigger function - trigger_sbml = event.getTrigger() - trigger_sym = self._sympy_from_sbml_math(trigger_sbml) - trigger = _parse_event_trigger(trigger_sym) - - # Currently, all event assignment targets must exist in - # self.symbols[SymbolId.SPECIES] - state_vector = list(self.symbols[SymbolId.SPECIES].keys()) - - # parse the boluses / event assignments - bolus = [get_empty_bolus_value() for _ in state_vector] - event_assignments = event.getListOfEventAssignments() - compartment_event_assignments = set() - for event_assignment in event_assignments: - variable_sym = \ - symbol_with_assumptions(event_assignment.getVariable()) - if event_assignment.getMath() is None: - # Ignore event assignments with no change in value. - continue - formula = self._sympy_from_sbml_math(event_assignment) - try: - # Try to find the species in the state vector. - index = state_vector.index(variable_sym) - bolus[index] = formula - except ValueError: - raise SBMLException( - 'Could not process event assignment for ' - f'{str(variable_sym)}. AMICI currently only allows ' - 'event assignments to species; parameters; or, ' - 'compartments with rate rules, at the moment.' - ) - try: - # Try working with the formula now to detect errors - # here instead of at multiple points downstream. - _ = formula - variable_sym - except TypeError: - raise SBMLException( - 'Could not process event assignment for ' - f'{str(variable_sym)}. AMICI only allows symbolic ' - 'expressions as event assignments.' - ) - if variable_sym in concentration_species_by_compartment: - compartment_event_assignments.add(variable_sym) - - for comp, assignment in \ - self.compartment_assignment_rules.items(): - if variable_sym not in assignment.free_symbols: - continue - compartment_event_assignments.add(comp) - - # Update the concentration of species with concentration units - # in compartments that were affected by the event assignments. - for compartment_sym in compartment_event_assignments: - for species_sym in concentration_species_by_compartment[ - compartment_sym - ]: - # If the species was not affected by an event assignment - # then the old value should be updated. - if ( - bolus[state_vector.index(species_sym)] - == get_empty_bolus_value() - ): - species_value = species_sym - # else the species was affected by an event assignment, - # hence the updated value should be updated further. - else: - species_value = bolus[state_vector.index(species_sym)] - # New species value is old amount / new volume. - bolus[state_vector.index(species_sym)] = ( - species_value * compartment_sym / formula - ) - - # Subtract the current species value from each species with an - # update, as the bolus will be added on to the current species - # value during simulation. - for index in range(len(bolus)): - if bolus[index] != get_empty_bolus_value(): - bolus[index] -= state_vector[index] - bolus[index] = bolus[index].subs(get_empty_bolus_value(), - sp.Float(0.0)) - - self.symbols[SymbolId.EVENT][event_sym] = { - 'name': event_id, - 'value': trigger, - 'state_update': sp.MutableDenseMatrix(bolus), - 'initial_value': - trigger_sbml.getInitialValue() if trigger_sbml is not None - else True, - } - - @log_execution_time('processing SBML observables', logger) - def _process_observables( - self, - observables: Union[Dict[str, Dict[str, str]], None], - sigmas: Dict[str, Union[str, float]], - noise_distributions: Dict[str, str] - ) -> None: - """ - Perform symbolic computations required for observable and objective - function evaluation. - - :param observables: - dictionary(observableId: {'name':observableName - (optional), 'formula':formulaString)}) - to be added to the model - - :param sigmas: - dictionary(observableId: sigma value or (existing) - parameter name) - - :param noise_distributions: - dictionary(observableId: noise type) - See :py:func:`sbml2amici`. - """ - - _validate_observables(observables, sigmas, noise_distributions, - events=False) - - # add user-provided observables or make all species, and compartments - # with assignment rules, observable - if observables: - # gather local symbols before parsing observable and sigma formulas - for obs in observables.keys(): - self.add_local_symbol(obs, symbol_with_assumptions(obs)) - - self.symbols[SymbolId.OBSERVABLE] = { - symbol_with_assumptions(obs): { - 'name': definition.get('name', f'y{iobs}'), - 'value': self._sympy_from_sbml_math( - definition['formula'] - ), - 'transformation': - noise_distribution_to_observable_transformation( - noise_distributions.get(obs, 'normal') - ) - } - for iobs, (obs, definition) in enumerate(observables.items()) - } - # check for nesting of observables (unsupported) - observable_syms = set(self.symbols[SymbolId.OBSERVABLE].keys()) - for obs in self.symbols[SymbolId.OBSERVABLE].values(): - if any(sym in observable_syms - for sym in obs['value'].free_symbols): - raise ValueError( - "Nested observables are not supported, " - f"but observable `{obs['name']} = {obs['value']}` " - "references another observable." - ) - elif observables is None: - self._generate_default_observables() - - _check_symbol_nesting(self.symbols[SymbolId.OBSERVABLE], - 'eventObservable') - - self._process_log_likelihood(sigmas, noise_distributions) - - @log_execution_time('processing SBML event observables', logger) - def _process_event_observables( - self, - event_observables: Dict[str, Dict[str, str]], - event_sigmas: Dict[str, Union[str, float]], - event_noise_distributions: Dict[str, str] - ) -> None: - """ - Perform symbolic computations required for observable and objective - function evaluation. - - :param event_observables: - See :py:func:`sbml2amici`. - - :param event_sigmas: - See :py:func:`sbml2amici`. - - :param event_noise_distributions: - See :py:func:`sbml2amici`. - """ - if event_observables is None: - return - - _validate_observables(event_observables, event_sigmas, - event_noise_distributions, - events=True) - - # gather local symbols before parsing observable and sigma formulas - for obs, definition in event_observables.items(): - self.add_local_symbol(obs, symbol_with_assumptions(obs)) - # check corresponding event exists - if sp.Symbol(definition['event']) not in \ - self.symbols[SymbolId.EVENT]: - raise ValueError( - 'Could not find an event with the event identifier ' - f'{definition["event"]} for the event observable with name' - f'{definition["name"]}.' - ) - - self.symbols[SymbolId.EVENT_OBSERVABLE] = { - symbol_with_assumptions(obs): { - 'name': definition.get('name', f'z{iobs}'), - 'value': self._sympy_from_sbml_math( - definition['formula'] - ), - 'event': sp.Symbol(definition.get('event')), - 'transformation': - noise_distribution_to_observable_transformation( - event_noise_distributions.get(obs, 'normal') - ) - } - for iobs, (obs, definition) in - enumerate(event_observables.items()) - } - - wrong_t = sp.Symbol('t') - for eo in self.symbols[SymbolId.EVENT_OBSERVABLE].values(): - if eo['value'].has(wrong_t): - warnings.warn(f'Event observable {eo["name"]} uses `t` in ' - 'it\'s formula which is not the time variable. ' - 'For the time variable, please use `time` ' - 'instead!') - - # check for nesting of observables (unsupported) - _check_symbol_nesting(self.symbols[SymbolId.EVENT_OBSERVABLE], - 'eventObservable') - - self._process_log_likelihood(event_sigmas, event_noise_distributions, - events=True) - self._process_log_likelihood(event_sigmas, event_noise_distributions, - events=True, event_reg=True) - - def _generate_default_observables(self): - """ - Generate default observables from species, compartments and - (initial) assignment rules. - """ - self.symbols[SymbolId.OBSERVABLE] = { - symbol_with_assumptions(f'y{species_id}'): { - 'name': specie['name'], - 'value': species_id - } - for species_id, specie - in self.symbols[SymbolId.SPECIES].items() - } - - for variable, formula in itt.chain( - self.parameter_assignment_rules.items(), - self.initial_assignments.items(), - self.compartment_assignment_rules.items(), - self.species_assignment_rules.items(), - self.compartments.items() - ): - symbol = symbol_with_assumptions(f'y{variable}') - # Assignment rules take precedence over compartment volume - # definitions, so they need to be evaluated first. - # Species assignment rules always overwrite. - if symbol in self.symbols[SymbolId.OBSERVABLE] \ - and variable not in self.species_assignment_rules: - continue - self.symbols[SymbolId.OBSERVABLE][symbol] = { - 'name': str(variable), 'value': formula - } - - def _process_log_likelihood(self, - sigmas: Dict[str, Union[str, float]], - noise_distributions: Dict[str, str], - events: bool = False, - event_reg: bool = False): - """ - Perform symbolic computations required for objective function - evaluation. - - :param sigmas: - See :py:func:`SBMLImporter._process_observables` - - :param noise_distributions: - See :py:func:`SBMLImporter._process_observables` - - :param events: - indicates whether the passed definitions are for observables - (False) or for event observables (True). - - :param event_reg: - indicates whether log-likelihoods definitons should be processed - for event observable regularization (Jrz). If this is activated, - measurements are substituted by 0 and the observable by the - respective regularization symbol. - """ - - if events: - if event_reg: - obs_symbol = SymbolId.EVENT_OBSERVABLE - sigma_symbol = SymbolId.SIGMAZ - llh_symbol = SymbolId.LLHRZ - else: - obs_symbol = SymbolId.EVENT_OBSERVABLE - sigma_symbol = SymbolId.SIGMAZ - llh_symbol = SymbolId.LLHZ - else: - assert not event_reg - obs_symbol = SymbolId.OBSERVABLE - sigma_symbol = SymbolId.SIGMAY - llh_symbol = SymbolId.LLHY - - for obs_id, obs in self.symbols[obs_symbol].items(): - obs['measurement_symbol'] = generate_measurement_symbol(obs_id) - if event_reg: - obs['reg_symbol'] = generate_regularization_symbol(obs_id) - - if not event_reg: - self.symbols[sigma_symbol] = { - symbol_with_assumptions(f'sigma_{obs_id}'): { - 'name': f'sigma_{obs["name"]}', - 'value': self._sympy_from_sbml_math( - sigmas.get(str(obs_id), '1.0') - ) - } - for obs_id, obs in self.symbols[obs_symbol].items() - } - - self.symbols[llh_symbol] = {} - for (obs_id, obs), (sigma_id, sigma) in zip( - self.symbols[obs_symbol].items(), - self.symbols[sigma_symbol].items() - ): - symbol = symbol_with_assumptions(f'J{obs_id}') - dist = noise_distributions.get(str(obs_id), 'normal') - cost_fun = noise_distribution_to_cost_function(dist)(obs_id) - value = sp.sympify(cost_fun, locals=dict(zip( - _get_str_symbol_identifiers(obs_id), - (obs_id, obs['measurement_symbol'], sigma_id) - ))) - if event_reg: - value = value.subs(obs['measurement_symbol'], 0.0) - value = value.subs(obs_id, obs['reg_symbol']) - self.symbols[llh_symbol][symbol] = { - 'name': f'J{obs["name"]}', - 'value': value, - 'dist': dist, - } - - @log_execution_time('processing SBML initial assignments', logger) - def _process_initial_assignments(self): - """ - Accounts for initial assignments of parameters and species - references. Initial assignments for species and compartments are - processed in :py:func:`amici.SBMLImporter._process_initial_species` and - :py:func:`amici.SBMLImporter._process_compartments` respectively. - """ - for ia in self.sbml.getListOfInitialAssignments(): - identifier = _get_identifier_symbol(ia) - if identifier in itt.chain(self.symbols[SymbolId.SPECIES], - self.compartments): - continue - - sym_math = self._get_element_initial_assignment(ia.getId()) - if sym_math is None: - continue - - sym_math = self._make_initial(smart_subs_dict( - sym_math, self.symbols[SymbolId.EXPRESSION], 'value' - )) - self.initial_assignments[_get_identifier_symbol(ia)] = sym_math - - # sort and flatten - self.initial_assignments = toposort_symbols(self.initial_assignments) - for ia_id, ia in self.initial_assignments.items(): - self.initial_assignments[ia_id] = smart_subs_dict( - ia, self.initial_assignments - ) - - for identifier, sym_math in list(self.initial_assignments.items()): - self._replace_in_all_expressions(identifier, sym_math) - - @log_execution_time('processing SBML species references', logger) - def _process_species_references(self): - """ - Replaces species references that define anything but stoichiometries. - - Species references for stoichiometries are processed in - :py:func:`amici.SBMLImporter._process_reactions`. - """ - # doesnt look like there is a better way to get hold of those lists: - species_references = _get_list_of_species_references(self.sbml) - for species_reference in species_references: - if hasattr(species_reference, 'getStoichiometryMath') and \ - species_reference.getStoichiometryMath() is not None: - raise SBMLException('StoichiometryMath is currently not ' - 'supported for species references.') - if species_reference.getId() == '': - continue - - stoich = self._get_element_stoichiometry(species_reference) - self._replace_in_all_expressions( - _get_identifier_symbol(species_reference), - self._sympy_from_sbml_math(stoich) - ) - - def _make_initial(self, sym_math: Union[sp.Expr, None, float] - ) -> Union[sp.Expr, None, float]: - """ - Transforms an expression to its value at the initial time point by - replacing species by their initial values. - - :param sym_math: - symbolic expression - :return: - transformed expression - """ - - if not isinstance(sym_math, sp.Expr): - return sym_math - - for species_id, species in self.symbols[SymbolId.SPECIES].items(): - if 'init' in species: - sym_math = smart_subs(sym_math, species_id, species['init']) - - sym_math = smart_subs(sym_math, self._local_symbols['time'], - sp.Float(0)) - - return sym_math - - def process_conservation_laws(self, ode_model) -> None: - """ - Find conservation laws in reactions and species. - - :param ode_model: - ODEModel object with basic definitions - """ - conservation_laws = [] - - # Create conservation laws for constant species - species_solver = _add_conservation_for_constant_species( - ode_model, conservation_laws - ) - # Non-constant species processed here - if "AMICI_EXPERIMENTAL_SBML_NONCONST_CLS" in os.environ \ - or "GITHUB_ACTIONS" in os.environ: - species_solver = list(set( - self._add_conservation_for_non_constant_species( - ode_model, conservation_laws)) & set(species_solver)) - - # Check, whether species_solver is empty now. As currently, AMICI - # cannot handle ODEs without species, CLs must be switched off in this - # case - if not len(species_solver): - conservation_laws = [] - species_solver = list(range(ode_model.num_states_rdata())) - - # prune out species from stoichiometry and - self.stoichiometric_matrix = \ - self.stoichiometric_matrix[species_solver, :] - - # add the found CLs to the ode_model - for cl in conservation_laws: - ode_model.add_conservation_law(**cl) - - def _get_conservation_laws_demartino( - self, - ode_model: ODEModel, - ) -> List[Tuple[int, List[int], List[float]]]: - """Identify conservation laws based on algorithm by DeMartino et al. - (see conserved_moieties.py). - - :param ode_model: Model for which to compute conserved quantities - :returns: List of one tuple per conservation law, each containing: - (0) the index of the (solver-)species to eliminate, - (1) (solver-)indices of all species engaged in the conserved - quantity (including the eliminated one) - (2) coefficients for the species in (1) - """ - from .conserved_quantities_demartino \ - import compute_moiety_conservation_laws - - try: - stoichiometric_list = [ - float(entry) for entry in self.stoichiometric_matrix.T.flat() - ] - except TypeError: - # Due to the numerical algorithm currently used to identify - # conserved quantities, we can't have symbols in the - # stoichiometric matrix - warnings.warn("Conservation laws for non-constant species in " - "combination with parameterized stoichiometric " - "coefficients are not currently supported " - "and will be turned off.") - return [] - - if any(rule.getTypeCode() == sbml.SBML_RATE_RULE - for rule in self.sbml.getListOfRules()): - # see SBML semantic test suite, case 33 for an example - warnings.warn("Conservation laws for non-constant species in " - "models with RateRules are not currently supported " - "and will be turned off.") - return [] - - cls_state_idxs, cls_coefficients = compute_moiety_conservation_laws( - stoichiometric_list, *self.stoichiometric_matrix.shape, - rng_seed=32, - species_names=[str(x.get_id()) for x in ode_model._states] - ) - - # Sparsify conserved quantities - # ``compute_moiety_conservation_laws`` identifies conserved quantities - # with positive coefficients. The resulting system is, therefore, - # often non-sparse. This leads to circular dependencies in the - # state expressions of eliminated states. The identified conserved - # quantities are linearly independent. We can construct `A` as in - # `A * x0 = total_cl` and bring it to reduced row echelon form. The - # pivot species are the ones to be eliminated. The resulting state - # expressions are sparse and void of any circular dependencies. - A = sp.zeros(len(cls_coefficients), len(ode_model._states)) - for i_cl, (cl, coefficients) in enumerate(zip(cls_state_idxs, - cls_coefficients)): - for i, c in zip(cl, coefficients): - A[i_cl, i] = sp.Rational(c) - rref, pivots = A.rref() - - raw_cls = [] - for i_cl, target_state_model_idx in enumerate(pivots): - # collect values for species engaged in the current CL - state_idxs = [i for i, coeff in enumerate(rref[i_cl, :]) - if coeff] - coefficients = [coeff for coeff in rref[i_cl, :] if coeff] - raw_cls.append((target_state_model_idx, state_idxs, - coefficients),) - return raw_cls - - def _get_conservation_laws_rref( - self - ) -> List[Tuple[int, List[int], List[float]]]: - """Identify conservation laws based on left nullspace of the - stoichiometric matrix, computed through (numeric) Gaussian elimination - - :returns: List of one tuple per conservation law, each containing: - (0) the index of the (solver-)species to eliminate, - (1) (solver-)indices of all species engaged in the conserved - quantity (including the eliminated one) - (2) coefficients for the species in (1) - """ - import numpy as np - from numpy.linalg import matrix_rank - from .conserved_quantities_rref import nullspace_by_rref, rref - - try: - S = np.asarray(self.stoichiometric_matrix, dtype=float) - except TypeError: - # Due to the numerical algorithm currently used to identify - # conserved quantities, we can't have symbols in the - # stoichiometric matrix - warnings.warn("Conservation laws for non-constant species in " - "combination with parameterized stoichiometric " - "coefficients are not currently supported " - "and will be turned off.") - return [] - - if any(rule.getTypeCode() == sbml.SBML_RATE_RULE - for rule in self.sbml.getListOfRules()): - # see SBML semantic test suite, case 33 for an example - warnings.warn("Conservation laws for non-constant species in " - "models with RateRules are not currently supported " - "and will be turned off.") - return [] - - # Determine rank via SVD - rank = matrix_rank(S) if S.shape[0] else 0 - if rank == S.shape[0]: - return [] - kernel = nullspace_by_rref(S.T) - # Check dimensions - due to numerical errors, nullspace_by_rref may - # fail in certain situations - if kernel.shape[0] != S.shape[0] - rank: - raise AssertionError( - "Failed to determine all conserved quantities " - f"(found {kernel.shape[0]}, expected {S.shape[0] - rank}). " - "Try another algorithm, disable detection of conservation " - "laws, or submit a bug report along with the model." - ) - kernel = rref(kernel) - raw_cls = [] - for row in kernel: - state_idxs = [i for i, coeff in enumerate(row) if coeff] - coefficients = [coeff for coeff in row if coeff] - raw_cls.append((state_idxs[0], state_idxs, coefficients),) - - return raw_cls - - def _add_conservation_for_non_constant_species( - self, - ode_model: ODEModel, - conservation_laws: List[ConservationLaw] - ) -> List[int]: - """Add non-constant species to conservation laws - - :param ode_model: - ODEModel object with basic definitions - :param conservation_laws: - List of already known conservation laws - :returns: - List of species indices which later remain in the ODE solver - """ - # indices of retained species - species_solver = list(range(ode_model.num_states_rdata())) - - algorithm = os.environ.get("AMICI_EXPERIMENTAL_SBML_NONCONST_CLS", "") - if algorithm.lower() == "demartino": - raw_cls = self._get_conservation_laws_demartino(ode_model) - else: - raw_cls = self._get_conservation_laws_rref() - - if not raw_cls: - # no conservation laws identified - return species_solver - - species_to_be_removed = {x[0] for x in raw_cls} - - # keep new conservations laws separate until we know everything worked - new_conservation_laws = [] - # previously removed constant species - eliminated_state_ids = {cl['state'] for cl in conservation_laws} - - all_state_ids = [x.get_id() for x in ode_model._states] - all_compartment_sizes = [ - sp.Integer(1) - if self.symbols[SymbolId.SPECIES][state_id]['amount'] - else self.compartments[ - self.symbols[SymbolId.SPECIES][state_id]['compartment'] - ] - for state_id in all_state_ids - ] - - # iterate over list of conservation laws, create symbolic expressions, - for target_state_model_idx, state_idxs, coefficients in raw_cls: - if all_state_ids[target_state_model_idx] in eliminated_state_ids: - # constants state, already eliminated - continue - # collect values for species engaged in the current CL - state_ids = [all_state_ids[i_state] for i_state in state_idxs] - compartment_sizes = [all_compartment_sizes[i] for i in state_idxs] - - target_state_id = all_state_ids[target_state_model_idx] - total_abundance = symbol_with_assumptions(f'tcl_{target_state_id}') - - new_conservation_laws.append({ - 'state': target_state_id, - 'total_abundance': total_abundance, - 'coefficients': { - state_id: coeff * compartment - for state_id, coeff, compartment - in zip(state_ids, coefficients, compartment_sizes) - }, - }) - species_to_be_removed.add(target_state_model_idx) - - conservation_laws.extend(new_conservation_laws) - - # list of species that are not determined by conservation laws - return [ix for ix in species_solver if ix not in species_to_be_removed] - - def _replace_compartments_with_volumes(self): - """ - Replaces compartment symbols in expressions with their respective - (possibly variable) volumes. - """ - for comp, vol in self.compartments.items(): - if comp in self.symbols[SymbolId.SPECIES]: - # for comps with rate rules volume is only initial - for species in self.symbols[SymbolId.SPECIES].values(): - if isinstance(species['init'], sp.Expr): - species['init'] = smart_subs(species['init'], - comp, vol) - continue - self._replace_in_all_expressions(comp, vol) - - def _replace_in_all_expressions(self, - old: sp.Symbol, - new: sp.Expr, - replace_identifiers=False) -> None: - """ - Replace 'old' by 'new' in all symbolic expressions. - - :param old: - symbolic variables to be replaced - - :param new: - replacement symbolic variables - """ - fields = [ - 'stoichiometric_matrix', 'flux_vector', - ] - for field in fields: - if field in dir(self): - self.__setattr__(field, smart_subs( - self.__getattribute__(field), old, new - )) - - dictfields = [ - 'compartment_assignment_rules', 'parameter_assignment_rules', - 'initial_assignments' - ] - for dictfield in dictfields: - d = getattr(self, dictfield) - - # replace identifiers - if old in d and replace_identifiers: - d[new] = d[old] - del d[old] - - if dictfield == 'initial_assignments': - tmp_new = self._make_initial(new) - else: - tmp_new = new - - # replace values - for k in d: - d[k] = smart_subs(d[k], old, tmp_new) - - # replace in identifiers - if replace_identifiers: - for symbol in [SymbolId.EXPRESSION, SymbolId.SPECIES]: - # completely recreate the dict to keep ordering consistent - if old not in self.symbols[symbol]: - continue - self.symbols[symbol] = { - smart_subs(k, old, new): v - for k, v in self.symbols[symbol].items() - } - - for symbol in [SymbolId.OBSERVABLE, SymbolId.LLHY, - SymbolId.SIGMAY]: - if old not in self.symbols[symbol]: - continue - self.symbols[symbol][new] = self.symbols[symbol][old] - del self.symbols[symbol][old] - - # replace in values - for symbol in [SymbolId.OBSERVABLE, SymbolId.LLHY, SymbolId.LLHZ, - SymbolId.SIGMAY, SymbolId.SIGMAZ, SymbolId.EXPRESSION, - SymbolId.EVENT, SymbolId.EVENT_OBSERVABLE]: - if not self.symbols.get(symbol, None): - continue - for element in self.symbols[symbol].values(): - element['value'] = smart_subs(element['value'], old, new) - - # replace in event state updates (boluses) - if self.symbols.get(SymbolId.EVENT, False): - for event in self.symbols[SymbolId.EVENT].values(): - for index in range(len(event['state_update'])): - event['state_update'][index] = \ - smart_subs(event['state_update'][index], old, new) - - if SymbolId.SPECIES in self.symbols: - for species in self.symbols[SymbolId.SPECIES].values(): - species['init'] = smart_subs(species['init'], - old, self._make_initial(new)) - - fields = ['dt'] - if replace_identifiers: - fields.append('compartment') - - for field in ['dt']: - if field in species: - species[field] = smart_subs(species[field], old, new) - - # Initial compartment volume may also be specified with an assignment - # rule (at the end of the _process_species method), hence needs to be - # processed here too. - self.compartments = {smart_subs(c, old, new) if replace_identifiers - else c: - smart_subs(v, old, self._make_initial(new)) - for c, v in self.compartments.items()} - - def _clean_reserved_symbols(self) -> None: - """ - Remove all reserved symbols from self.symbols - """ - for sym in RESERVED_SYMBOLS: - old_symbol = symbol_with_assumptions(sym) - new_symbol = symbol_with_assumptions(f'amici_{sym}') - self._replace_in_all_expressions(old_symbol, new_symbol, - replace_identifiers=True) - for symbols_ids, symbols in self.symbols.items(): - if old_symbol in symbols: - # reconstitute the whole dict in order to keep the ordering - self.symbols[symbols_ids] = { - new_symbol if k is old_symbol else k: v - for k, v in symbols.items() - } - - def _sympy_from_sbml_math(self, var_or_math: [sbml.SBase, str] - ) -> Union[sp.Expr, float, None]: - """ - Sympify Math of SBML variables with all sanity checks and - transformations - - :param var_or_math: - SBML variable that has a getMath() function or math string - :return: - sympfified symbolic expression - """ - if isinstance(var_or_math, sbml.SBase): - math_string = sbml.formulaToL3StringWithSettings( - var_or_math.getMath(), - self.sbml_parser_settings - ) - ele_name = var_or_math.element_name - else: - math_string = var_or_math - ele_name = 'string' - math_string = replace_logx(math_string) - try: - try: - formula = sp.sympify(_parse_logical_operators( - math_string - ), locals=self._local_symbols) - except TypeError as err: - if str(err) == 'BooleanAtom not allowed in this context.': - formula = sp.sympify(_parse_logical_operators( - math_string - ), locals={'true': sp.Float(1.0), 'false': sp.Float(0.0), - **self._local_symbols}) - else: - raise - except (sp.SympifyError, TypeError, ZeroDivisionError) as err: - raise SBMLException(f'{ele_name} "{math_string}" ' - 'contains an unsupported expression: ' - f'{err}.') - - if isinstance(formula, sp.Expr): - formula = _parse_special_functions_sbml(formula) - _check_unsupported_functions_sbml(formula, - expression_type=ele_name) - return formula - - def _get_element_initial_assignment(self, - element_id: str) -> Union[sp.Expr, - None]: - """ - Extract value of sbml variable according to its initial assignment - - :param element_id: - sbml variable name - :return: - - """ - assignment = self.sbml.getInitialAssignment( - element_id - ) - if assignment is None: - return None - sym = self._sympy_from_sbml_math(assignment) - # this is an initial assignment so we need to use - # initial conditions - sym = self._make_initial(sym) - return sym - - def _get_element_stoichiometry(self, - ele: sbml.SBase) -> sp.Expr: - """ - Computes the stoichiometry of a reactant or product of a reaction - - :param ele: - reactant or product - :return: - symbolic variable that defines stoichiometry - """ - if ele.isSetId(): - sym = self._get_element_initial_assignment(ele.getId()) - if sym is not None: - return sym - - if self.is_assignment_rule_target(ele): - return _get_identifier_symbol(ele) - - if ele.isSetStoichiometry(): - stoichiometry: float = ele.getStoichiometry() - return sp.Integer(stoichiometry) if stoichiometry.is_integer() \ - else sp.Float(stoichiometry) - - return sp.Integer(1) - - def is_assignment_rule_target(self, element: sbml.SBase) -> bool: - """ - Checks if an element has a valid assignment rule in the specified - model. - - :param element: - SBML variable - - :return: - boolean indicating truth of function name - """ - a = self.sbml.getAssignmentRuleByVariable(element.getId()) - return a is not None and self._sympy_from_sbml_math(a) is not None - - def is_rate_rule_target(self, element: sbml.SBase) -> bool: - """ - Checks if an element has a valid assignment rule in the specified - model. - - :param element: - SBML variable - - :return: - boolean indicating truth of function name - """ - a = self.sbml.getRateRuleByVariable(element.getId()) - return a is not None and self._sympy_from_sbml_math(a) is not None - - -def _check_lib_sbml_errors(sbml_doc: sbml.SBMLDocument, - show_warnings: bool = False) -> None: - """ - Checks the error log in the current self.sbml_doc. - - :param sbml_doc: - SBML document - - :param show_warnings: - display SBML warnings - """ - num_warning = sbml_doc.getNumErrors(sbml.LIBSBML_SEV_WARNING) - num_error = sbml_doc.getNumErrors(sbml.LIBSBML_SEV_ERROR) - num_fatal = sbml_doc.getNumErrors(sbml.LIBSBML_SEV_FATAL) - - if num_warning + num_error + num_fatal: - for i_error in range(sbml_doc.getNumErrors()): - error = sbml_doc.getError(i_error) - # we ignore any info messages for now - if error.getSeverity() >= sbml.LIBSBML_SEV_ERROR \ - or (show_warnings and - error.getSeverity() >= sbml.LIBSBML_SEV_WARNING): - logger.error(f'libSBML {error.getCategoryAsString()} ' - f'({error.getSeverityAsString()}):' - f' {error.getMessage()}') - - if num_error + num_fatal: - raise SBMLException( - 'SBML Document failed to load (see error messages above)' - ) - - -def _parse_event_trigger(trigger: sp.Expr) -> sp.Expr: - """ - Recursively translates a boolean trigger function into a real valued - root function - - :param trigger: - :return: real valued root function expression - """ - # Events can be defined without trigger, i.e., the event will never fire. - # In this case, set a dummy trigger: - if trigger is None: - return sp.Float(1.0) - if trigger.is_Relational: - root = trigger.args[0] - trigger.args[1] - _check_unsupported_functions_sbml(root, 'sympy.Expression') - - # convert relational expressions into trigger functions - if isinstance(trigger, (sp.core.relational.LessThan, - sp.core.relational.StrictLessThan)): - # y < x or y <= x - return -root - if isinstance(trigger, (sp.core.relational.GreaterThan, - sp.core.relational.StrictGreaterThan)): - # y >= x or y > x - return root - - # or(x,y): any of {x,y} is > 0: sp.Max(x, y) - if isinstance(trigger, sp.Or): - return sp.Max(*[_parse_event_trigger(arg) for arg in trigger.args]) - # and(x,y): all out of {x,y} are > 0: sp.Min(x, y) - if isinstance(trigger, sp.And): - return sp.Min(*[_parse_event_trigger(arg) for arg in trigger.args]) - - raise SBMLException( - 'AMICI can not parse piecewise/event trigger functions with argument ' - f'{trigger}.' - ) - - -def _parse_logical_operators(math_str: Union[str, float, None] - ) -> Union[str, float, None]: - """ - Parses a math string in order to replace logical operators by a form - parsable for sympy - - :param math_str: - str with mathematical expression - :param math_str: - parsed math_str - """ - if not isinstance(math_str, str): - return math_str - - if ' xor(' in math_str or ' Xor(' in math_str: - raise SBMLException('Xor is currently not supported as logical ' - 'operation.') - - return (math_str.replace('&&', '&')).replace('||', '|') - - -def assignmentRules2observables(sbml_model: sbml.Model, - filter_function: Callable = lambda *_: True): - """ - Turn assignment rules into observables. - - :param sbml_model: - Model to operate on - - :param filter_function: - Callback function taking assignment variable as input and returning - ``True``/``False`` to indicate if the respective rule should be - turned into an observable. - - :return: - A dictionary(observableId:{ - 'name': observableName, - 'formula': formulaString - }) - """ - observables = {} - for p in sbml_model.getListOfParameters(): - parameter_id = p.getId() - if filter_function(p): - observables[parameter_id] = { - 'name': p.getName() if p.isSetName() else p.getId(), - 'formula': sbml_model.getAssignmentRuleByVariable( - parameter_id - ).getFormula() - } - - for parameter_id in observables: - sbml_model.removeRuleByVariable(parameter_id) - sbml_model.removeParameter(parameter_id) - - return observables - - -def _add_conservation_for_constant_species( - ode_model: ODEModel, - conservation_laws: List[ConservationLaw] -) -> List[int]: - """ - Adds constant species to conservations laws - - :param ode_model: - ODEModel object with basic definitions - - :param conservation_laws: - List of already known conservation laws - - :returns species_solver: - List of species indices which remain later in the ODE solver - """ - - # decide which species to keep in stoichiometry - species_solver = list(range(ode_model.num_states_rdata())) - - # iterate over species, find constant ones - for ix in reversed(range(ode_model.num_states_rdata())): - if ode_model.state_is_constant(ix): - # dont use sym('x') here since conservation laws need to be - # added before symbols are generated - target_state = ode_model._states[ix].get_id() - total_abundance = symbol_with_assumptions(f'tcl_{target_state}') - conservation_laws.append({ - 'state': target_state, - 'total_abundance': total_abundance, - 'coefficients': {target_state: 1.0}, - }) - # mark species to delete from stoichiometric matrix - species_solver.pop(ix) - - return species_solver - - -def _get_species_compartment_symbol(species: sbml.Species) -> sp.Symbol: - """ - Generate compartment symbol for the compartment of a specific species. - This function will always return the same unique python object for a - given species name. - - :param species: - sbml species - :return: - compartment symbol - """ - return symbol_with_assumptions(species.getCompartment()) - - -def _get_identifier_symbol(var: sbml.SBase) -> sp.Symbol: - """ - Generate identifier symbol for a sbml variable. - This function will always return the same unique python object for a - given entity. - - :param var: - sbml variable - :return: - identifier symbol - """ - return symbol_with_assumptions(var.getId()) - - -def get_species_initial(species: sbml.Species) -> sp.Expr: - """ - Extract the initial concentration from a given species - - :param species: - species index - - :return: - initial species concentration - """ - if species.isSetInitialConcentration(): - conc = species.getInitialConcentration() - if species.getHasOnlySubstanceUnits(): - return sp.Float(conc) * _get_species_compartment_symbol(species) - else: - return sp.Float(conc) - - if species.isSetInitialAmount(): - amt = species.getInitialAmount() - if math.isnan(amt): - return sp.Float(0.0) - - if species.getHasOnlySubstanceUnits(): - return sp.Float(amt) - else: - return sp.Float(amt) / _get_species_compartment_symbol(species) - - return sp.Float(0.0) - - -def _get_list_of_species_references(sbml_model: sbml.Model) \ - -> List[sbml.SpeciesReference]: - """ - Extracts list of species references as SBML doesn't provide a native - function for this. - - :param sbml_model: - SBML model instance - - :return: - ListOfSpeciesReferences - """ - return [ - reference - for element in sbml_model.all_elements - if isinstance(element, sbml.ListOfSpeciesReferences) - for reference in element - ] - - -def replace_logx(math_str: Union[str, float, None]) -> Union[str, float, None]: - """ - Replace logX(.) by log(., X) since sympy cannot parse the former - - :param math_str: - string for sympification - - :return: - sympifiable string - """ - if not isinstance(math_str, str): - return math_str - - return re.sub( - r'(^|\W)log(\d+)\(', r'\g<1>1/ln(\2)*ln(', math_str - ) - - -def _collect_event_assignment_parameter_targets(sbml_model: sbml.Model): - targets = set() - sbml_parameters = sbml_model.getListOfParameters() - sbml_parameter_ids = [p.getId() for p in sbml_parameters] - for event in sbml_model.getListOfEvents(): - for event_assignment in event.getListOfEventAssignments(): - target_id = event_assignment.getVariable() - if target_id in sbml_parameter_ids: - targets.add(_get_identifier_symbol( - sbml_parameters[sbml_parameter_ids.index(target_id)] - )) - return targets - - -def _check_unsupported_functions_sbml(sym: sp.Expr, - expression_type: str, - full_sym: Optional[sp.Expr] = None): - try: - _check_unsupported_functions(sym, expression_type, full_sym) - except RuntimeError as err: - raise SBMLException(str(err)) - - -def _parse_special_functions_sbml(sym: sp.Expr, - toplevel: bool = True) -> sp.Expr: - try: - return _parse_special_functions(sym, toplevel) - except RuntimeError as err: - raise SBMLException(str(err)) - - -def _validate_observables( - observables: Union[Dict[str, Dict[str, str]], None], - sigmas: Dict[str, Union[str, float]], - noise_distributions: Dict[str, str], - events: bool = False -) -> None: - - if observables is None or not observables: - return - - # Ensure no non-existing observableIds have been specified - # (no problem here, but usually an upstream bug) - unknown_ids = set(sigmas.keys()) - set(observables.keys()) - if unknown_ids: - raise ValueError( - f"Sigma provided for unknown " - f"{'eventO' if events else 'o'}bservableIds: " - f"{unknown_ids}.") - - # Ensure no non-existing observableIds have been specified - # (no problem here, but usually an upstream bug) - unknown_ids = set(noise_distributions.keys()) - \ - set(observables.keys()) - if unknown_ids: - raise ValueError( - f"Noise distribution provided for unknown " - f"{'eventO' if events else 'o'}bservableIds: " - f"{unknown_ids}.") - - -def _check_symbol_nesting(symbols: Dict[sp.Symbol, Dict[str, sp.Expr]], - symbol_type: str): - observable_syms = set(symbols.keys()) - for obs in symbols.values(): - if any(sym in observable_syms - for sym in obs['value'].free_symbols): - raise ValueError( - "Nested observables are not supported, " - f"but {symbol_type} `{obs['name']} = {obs['value']}` " - "references another observable." - ) diff --git a/python/amici/setup.template.py b/python/amici/setup.template.py deleted file mode 100644 index 9e5297be62..0000000000 --- a/python/amici/setup.template.py +++ /dev/null @@ -1,178 +0,0 @@ -"""AMICI model package setup""" - - -import contextlib -import os -import sys -from typing import List - -from amici import amici_path, hdf5_enabled, compiledWithOpenMP -from amici.custom_commands import (set_compiler_specific_extension_options, - compile_parallel) -from amici.setuptools import (get_blas_config, - get_hdf5_config, - add_coverage_flags_if_required, - add_debug_flags_if_required, - add_openmp_flags, - ) -from setuptools import find_packages, setup, Extension -from setuptools.command.build_ext import build_ext - - -class ModelBuildExt(build_ext): - """Custom build_ext""" - - def build_extension(self, ext): - # Work-around for compiler-specific build options - set_compiler_specific_extension_options( - ext, self.compiler.compiler_type) - - - # Monkey-patch compiler instance method for parallel compilation - # except for Windows, where this seems to be incompatible with - # providing swig files. Not investigated further... - if sys.platform != 'win32': - import setuptools._distutils.ccompiler - self.compiler.compile = compile_parallel.__get__( - self.compiler, setuptools._distutils.ccompiler.CCompiler) - - print(f"Building model extension in {os.getcwd()}") - - build_ext.build_extension(self, ext) - - def find_swig(self) -> str: - """Find SWIG executable - - Overrides horribly outdated distutils function.""" - - from amici.swig import find_swig - return find_swig() - - -def get_model_sources() -> List[str]: - """Get list of source files for the amici base library""" - import glob - model_sources = glob.glob('*.cpp') - with contextlib.suppress(ValueError): - model_sources.remove('main.cpp') - return model_sources - - -def get_amici_libs() -> List[str]: - """ - Get list of libraries for the amici base library - """ - return ['amici', 'sundials', 'suitesparse'] - - -def get_extension() -> Extension: - """Get setuptools extension object for this AMICI model package""" - - cxx_flags = [] - linker_flags = [] - - if compiledWithOpenMP(): - # Only build model with OpenMP support if AMICI base packages was built - # that way - add_openmp_flags(cxx_flags=cxx_flags, ldflags=linker_flags) - - add_coverage_flags_if_required(cxx_flags, linker_flags) - add_debug_flags_if_required(cxx_flags, linker_flags) - - h5pkgcfg = get_hdf5_config() - - blaspkgcfg = get_blas_config() - linker_flags.extend(blaspkgcfg.get('extra_link_args', [])) - - libraries = [*get_amici_libs(), *blaspkgcfg['libraries']] - if hdf5_enabled: - libraries.extend(['hdf5_hl_cpp', 'hdf5_hl', 'hdf5_cpp', 'hdf5']) - - sources = [os.path.join("swig", "TPL_MODELNAME.i"), *get_model_sources()] - - # compiler and linker flags for libamici - if 'AMICI_CXXFLAGS' in os.environ: - cxx_flags.extend(os.environ['AMICI_CXXFLAGS'].split(' ')) - if 'AMICI_LDFLAGS' in os.environ: - linker_flags.extend(os.environ['AMICI_LDFLAGS'].split(' ')) - - ext_include_dirs = [ - os.getcwd(), - os.path.join(amici_path, 'include'), - os.path.join(amici_path, "ThirdParty", "gsl"), - os.path.join(amici_path, "ThirdParty", "sundials", "include"), - os.path.join(amici_path, "ThirdParty", "SuiteSparse", "include"), - *h5pkgcfg['include_dirs'], - *blaspkgcfg['include_dirs'] - ] - - ext_library_dirs = [ - *h5pkgcfg['library_dirs'], - *blaspkgcfg['library_dirs'], - os.path.join(amici_path, 'libs') - ] - - # Build shared object - ext = Extension( - 'TPL_MODELNAME._TPL_MODELNAME', - sources=sources, - include_dirs=ext_include_dirs, - libraries=libraries, - library_dirs=ext_library_dirs, - swig_opts=[ - '-c++', '-modern', '-outdir', 'TPL_MODELNAME', - '-I%s' % os.path.join(amici_path, 'swig'), - '-I%s' % os.path.join(amici_path, 'include'), - ], - extra_compile_args=cxx_flags, - extra_link_args=linker_flags - ) - - # see `set_compiler_specific_extension_options` - ext.extra_compile_args_mingw32 = ['-std=c++14'] - ext.extra_compile_args_unix = ['-std=c++14'] - ext.extra_compile_args_msvc = ['/std:c++14'] - - return ext - - -# Change working directory to setup.py location -os.chdir(os.path.dirname(os.path.abspath(__file__))) - -MODEL_EXT = get_extension() - -CLASSIFIERS = [ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Science/Research', - 'Operating System :: POSIX :: Linux', - 'Operating System :: MacOS :: MacOS X', - 'Programming Language :: Python', - 'Programming Language :: C++', - 'Topic :: Scientific/Engineering :: Bio-Informatics', -] - -CMDCLASS = { - # For parallel compilation and custom swig finder - 'build_ext': ModelBuildExt, -} - -# Install -setup( - name='TPL_MODELNAME', - cmdclass=CMDCLASS, - version='TPL_PACKAGE_VERSION', - description='AMICI-generated module for model TPL_MODELNAME', - url='https://github.com/AMICI-dev/AMICI', - author='model-author-todo', - author_email='model-author-todo', - # license = 'BSD', - ext_modules=[MODEL_EXT], - packages=find_packages(), - install_requires=['amici==TPL_AMICI_VERSION'], - extras_require={'wurlitzer': ['wurlitzer']}, - python_requires='>=3.8', - package_data={}, - zip_safe=False, - include_package_data=True, - classifiers=CLASSIFIERS, -) diff --git a/python/amici/setuptools.py b/python/amici/setuptools.py deleted file mode 100644 index 7cfcf61fe1..0000000000 --- a/python/amici/setuptools.py +++ /dev/null @@ -1,284 +0,0 @@ -""" -setuptools ----------- -Helper functions for AMICI core and module package preparation -""" - -import os -import sys -import shlex -import subprocess - -from .swig import find_swig, get_swig_version - -try: - import pkgconfig # optional - - # pkgconfig python module might be installed without pkg-config binary - # being available - pkgconfig.exists('somePackageName') -except (ModuleNotFoundError, EnvironmentError): - pkgconfig = None - -from typing import Dict, List, Union, Tuple, Any - -PackageInfo = Dict[str, List[Union[str, Tuple[str, Any]]]] - - -def get_blas_config() -> PackageInfo: - """ - Find CBLAS-compatible BLAS - - :return: - blas related package information - """ - - blaspkgcfg = {'include_dirs': [], - 'library_dirs': [], - 'libraries': [], - 'define_macros': [], - 'extra_compile_args': [], - 'extra_link_args': [] - } - - # Check environment variables - if 'BLAS_CFLAGS' in os.environ: - blaspkgcfg['extra_compile_args'].extend( - shlex.split(os.environ['BLAS_CFLAGS']) - ) - - if 'BLAS_LIBS' in os.environ: - blaspkgcfg['extra_link_args'].extend( - shlex.split(os.environ['BLAS_LIBS']) - ) - - if 'BLAS_CFLAGS' in os.environ or 'BLAS_LIBS' in os.environ: - # If options have been provided by the user, we don't try to detect - # anything by ourselves - return blaspkgcfg - - # Try environment modules - # MKL - if 'MKLROOT' in os.environ: - if 'MKL_INC' in os.environ: - blaspkgcfg['extra_compile_args'].extend( - shlex.split(os.environ['MKL_INC']) - ) - if 'MKL_LIB' in os.environ: - blaspkgcfg['extra_link_args'].extend( - shlex.split(os.environ['MKL_LIB']) - ) - blaspkgcfg['define_macros'].append(('AMICI_BLAS_MKL', None), ) - return blaspkgcfg - - # Try pkgconfig - if pkgconfig: - for blas_name in ['cblas', 'openblas']: - if pkgconfig.exists(blas_name): - blaspkgcfg = pkgconfig.parse(blas_name) - blaspkgcfg['extra_compile_args'] = [ - pkgconfig.cflags(blas_name) - ] - blaspkgcfg['extra_link_args'] = [ - pkgconfig.libs(blas_name) - ] - - return blaspkgcfg - - # If none of the previous worked, fall back to libcblas in default paths - blaspkgcfg['libraries'] = ['cblas'] - - return blaspkgcfg - - -def get_hdf5_config() -> PackageInfo: - """ - Find HDF5 include dir and libs - - :return: - hdf5 related package information - """ - - h5pkgcfg = {'include_dirs': [], - 'library_dirs': [], - 'libraries': [], - 'define_macros': [] - } - hdf5_include_dir_found = False - hdf5_library_dir_found = False - - # try for hdf5 in standard locations - hdf5_include_dir_hints = [ - '/usr/include/hdf5/serial', - '/usr/local/include', - '/usr/include', # travis ubuntu xenial, centos - '/usr/local/Cellar/hdf5/1.10.2_1/include' # travis macOS - ] - hdf5_library_dir_hints = [ - '/usr/lib/x86_64-linux-gnu/', # travis ubuntu xenial - '/usr/lib/x86_64-linux-gnu/hdf5/serial', - '/usr/local/lib', - '/usr/lib64/', # CentOS - '/usr/local/Cellar/hdf5/1.10.2_1/lib' # travis macOS - ] - - # special treatment for conda environments - # as the conda library dir is provided first, we should also check for - # conda header files first - if 'CONDA_DIR' in os.environ: - hdf5_include_dir_hints.insert( - 0, os.path.join(os.environ['CONDA_DIR'], 'include')) - hdf5_library_dir_hints.insert( - 0, os.path.join(os.environ['CONDA_DIR'], 'lib')) - - # Check for Environment Modules variables - if 'HDF5_BASE' in os.environ: - hdf5_include_dir_hints.insert( - 0, os.path.join(os.environ['HDF5_BASE'], 'include')) - hdf5_library_dir_hints.insert( - 0, os.path.join(os.environ['HDF5_BASE'], 'lib')) - - for hdf5_include_dir_hint in hdf5_include_dir_hints: - hdf5_include_dir_found = os.path.isfile( - os.path.join(hdf5_include_dir_hint, 'hdf5.h')) - if hdf5_include_dir_found: - print(f"hdf5.h found in {hdf5_include_dir_hint}") - h5pkgcfg['include_dirs'] = [hdf5_include_dir_hint] - break - - for hdf5_library_dir_hint in hdf5_library_dir_hints: - # check for static or shared library - for lib_filename in ['libhdf5.a', 'libhdf5.so']: - hdf5_library_dir_found = os.path.isfile( - os.path.join(hdf5_library_dir_hint, lib_filename)) - if hdf5_library_dir_found: - print(f'{lib_filename} found in {hdf5_library_dir_hint}') - h5pkgcfg['library_dirs'] = [hdf5_library_dir_hint] - break - if hdf5_library_dir_found: - # break to not override hdf5_library_dir_found - break - - h5pkgcfg['found'] = hdf5_include_dir_found and hdf5_library_dir_found - if h5pkgcfg['found']: - return h5pkgcfg - - if pkgconfig: - try: - h5pkgcfg = pkgconfig.parse('hdf5') - except pkgconfig.PackageNotFoundError: - pass - # NOTE: Cannot use pkgconfig.exists('hdf5f'), since this is true - # although no libraries or include dirs are available - h5pkgcfg['found'] = 'include_dirs' in h5pkgcfg \ - and h5pkgcfg['include_dirs'] and \ - 'library_dirs' in h5pkgcfg \ - and h5pkgcfg['library_dirs'] - - return h5pkgcfg - - -def add_coverage_flags_if_required(cxx_flags: List[str], - linker_flags: List[str]) -> None: - """ - Add compiler and linker flags if gcov coverage requested - - :param cxx_flags: - list of existing cxx flags - - :param linker_flags: - list of existing linker flags - """ - if 'ENABLE_GCOV_COVERAGE' in os.environ and \ - os.environ['ENABLE_GCOV_COVERAGE'].upper() == 'TRUE': - print("ENABLE_GCOV_COVERAGE was set to TRUE." - " Building AMICI with coverage symbols.") - cxx_flags.extend(['-g', '-O0', '--coverage']) - linker_flags.extend(['--coverage', '-g']) - - -def add_debug_flags_if_required(cxx_flags: List[str], - linker_flags: List[str]) -> None: - """ - Add compiler and linker debug flags if requested - - Arguments: - :param cxx_flags: - list of existing cxx flags - - :param linker_flags: - list of existing linker flags - """ - if 'ENABLE_AMICI_DEBUGGING' in os.environ \ - and os.environ['ENABLE_AMICI_DEBUGGING'] == 'TRUE': - print("ENABLE_AMICI_DEBUGGING was set to TRUE." - " Building AMICI with debug symbols.") - cxx_flags.extend(['-g', '-O0', '-UNDEBUG']) - linker_flags.extend(['-g']) - - -def generate_swig_interface_files(swig_outdir: str = None, - with_hdf5: bool = None) -> None: - """ - Compile the swig python interface to amici - """ - - swig_exe = find_swig() - swig_version = get_swig_version(swig_exe) - - swig_args = [ - '-c++', - '-python', - '-py3', - '-threads', - '-Wall', - f'-Iamici{os.sep}swig', - f'-Iamici{os.sep}include', - ] - - print(f"Found SWIG version {swig_version}") - - # Are HDF5 includes available to generate the wrapper? - if with_hdf5 is None: - with_hdf5 = get_hdf5_config()['found'] - - if not with_hdf5: - swig_args.append('-DAMICI_SWIG_WITHOUT_HDF5') - - if swig_outdir is not None: - swig_args.extend(['-outdir', swig_outdir]) - - # Do we have -doxygen? - if swig_version >= (4, 0, 0): - swig_args.append('-doxygen') - - swig_cmd = [swig_exe, - *swig_args, - '-o', os.path.join("amici", "amici_wrap.cxx"), - os.path.join("amici", "swig", "amici.i")] - - print(f"Running SWIG: {' '.join(swig_cmd)}") - sp = subprocess.run(swig_cmd, stdout=subprocess.PIPE, - stderr=sys.stdout.buffer) - if not sp.returncode == 0: - raise AssertionError('Swigging AMICI failed:\n' - + sp.stdout.decode('utf-8')) - - -def add_openmp_flags(cxx_flags: List, ldflags: List) -> None: - """Add OpenMP flags to lists for compiler/linker flags (in-place)""" - - # Enable OpenMP support for Linux / OSX: - if sys.platform == 'linux': - print("Adding OpenMP flags...") - cxx_flags.insert(0, "-fopenmp") - ldflags.insert(0, "-fopenmp") - elif sys.platform == 'darwin': - if os.path.exists('/usr/local/lib/libomp.a'): - print("Adding OpenMP flags...") - cxx_flags[0:0] = ["-Xpreprocessor", "-fopenmp"] - ldflags[0:0] = ["-Xpreprocessor", "-fopenmp", "-lomp"] - else: - print("Not adding OpenMP flags, because /usr/local/lib/libomp.a" - " does not exist. To enable, run `brew install libomp` " - "or add flags manually.") diff --git a/python/amici/swig.py b/python/amici/swig.py deleted file mode 100644 index b2ab7c090c..0000000000 --- a/python/amici/swig.py +++ /dev/null @@ -1,183 +0,0 @@ -"""Functions for downloading/building/finding SWIG""" -import ast -import contextlib -import os -import re -import subprocess -from typing import Tuple - - -def find_swig() -> str: - """Get name and version of SWIG executable - - We need version >=3.0. Probably we should try some default paths and names, - but this should do the trick for now. - - Debian/Ubuntu systems have swig3.0 ('swig' is older versions), - OSX has swig 3.0 as 'swig'. - """ - - candidates = ['swig4.0', 'swig3.0', 'swig'] - # Environment variable has priority - if 'SWIG' in os.environ: - candidates.insert(0, os.environ['SWIG']) - - for candidate in candidates: - if swig_works(candidate): - return candidate - - raise RuntimeError( - "Unable to find SWIG executable with default names. " - "Ensure you have SWIG installed, e.g. by " - "`sudo apt install swig` or `brew install swig`. " - "As non-root user, you can install SWIG using " - "https://github.com/AMICI-dev/AMICI/blob/master/scripts/" - "downloadAndBuildSwig.sh, or by following the " - "instructions at http://www.swig.org/Doc4.0/" - "SWIGDocumentation.html#Preface_installation. " - "If was not found despite being installed, set the SWIG" - " environment variable to the full path of the correct " - "executable." - ) - - -def swig_works(swig: str, verbose: bool = True) -> bool: - """Test if `swig` looks like a working SWIG executable.""" - - try: - # For python3.6 compatibility no `capture_output=True` - result = subprocess.run([swig, '-version'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - except (FileNotFoundError, PermissionError): - if verbose: - print(f'Testing SWIG executable {swig}... FAILED.') - return False - - if verbose: - if result.returncode == 0: - print(f'Testing SWIG executable {swig}... SUCCEEDED.') - else: - print(f'Testing SWIG executable {swig}... FAILED.') - - return result.returncode == 0 - - -def get_swig_version(swig_exe: str) -> Tuple: - """Determine version of the given SWIG executable - - Returns: - Version tuple - """ - result = subprocess.run([swig_exe, '-version'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - assert result.returncode == 0 - - version = re.sub(r'(?s).*Version\s+([\S]+).*', r'\1', - result.stdout.decode('utf-8')) - - return tuple(int(x) for x in version.split('.')) - - -class TypeHintFixer(ast.NodeTransformer): - """Replaces SWIG-generated C++ typehints by corresponding Python types""" - - mapping = { - 'void': None, - 'double': ast.Name('float'), - 'int': ast.Name('int'), - 'long': ast.Name('int'), - 'ptrdiff_t': ast.Name('int'), - 'size_t': ast.Name('int'), - 'bool': ast.Name('bool'), - 'std::unique_ptr< amici::Solver >': ast.Constant('Solver'), - 'amici::InternalSensitivityMethod': - ast.Constant('InternalSensitivityMethod'), - 'amici::InterpolationType': ast.Constant('InterpolationType'), - 'amici::LinearMultistepMethod': ast.Constant('LinearMultistepMethod'), - 'amici::LinearSolver': ast.Constant('LinearSolver'), - 'amici::Model *': ast.Constant('Model'), - 'amici::Model const *': ast.Constant('Model'), - 'amici::NewtonDampingFactorMode': - ast.Constant('NewtonDampingFactorMode'), - 'amici::NonlinearSolverIteration': - ast.Constant('NonlinearSolverIteration'), - 'amici::ObservableScaling': ast.Constant('ObservableScaling'), - 'amici::ParameterScaling': ast.Constant('ParameterScaling'), - 'amici::RDataReporting': ast.Constant('RDataReporting'), - 'amici::SensitivityMethod': ast.Constant('SensitivityMethod'), - 'amici::SensitivityOrder': ast.Constant('SensitivityOrder'), - 'amici::Solver *': ast.Constant('Solver'), - 'amici::SteadyStateSensitivityMode': - ast.Constant('SteadyStateSensitivityMode'), - 'amici::realtype': ast.Name('float'), - 'DoubleVector': ast.Constant('Sequence[float]'), - 'IntVector': ast.Name('Sequence[int]'), - 'std::string': ast.Name('str'), - 'std::string const &': ast.Name('str'), - 'std::unique_ptr< amici::ExpData >': ast.Constant('ExpData'), - 'std::unique_ptr< amici::ReturnData >': ast.Constant('ReturnData'), - 'std::vector< amici::ParameterScaling,' - 'std::allocator< amici::ParameterScaling > > const &': - ast.Constant('ParameterScalingVector') - } - - def visit_FunctionDef(self, node): - # Has a return type annotation? - if node.returns: - node.returns = self._new_annot(node.returns.value) - - # Has arguments? - if node.args.args: - for arg in node.args.args: - if not arg.annotation: - continue - arg.annotation = self._new_annot(arg.annotation.value) - return node - - def _new_annot(self, old_annot: str): - with contextlib.suppress(KeyError): - return self.mapping[old_annot] - - # std::vector size type - if re.match(r"std::vector< .* >::(?:size|difference)_type", old_annot): - return ast.Name("int") - - # std::vector value type - if (value_type := re.sub( - r'std::vector< (.*) >::value_type(?: const &)?', - r'\1', old_annot)) in self.mapping: - return self.mapping[value_type] - - # std::vector - if (value_type := re.sub( - r'std::vector< (.*),std::allocator< \1 > >(?: const &)?', - r'\1', old_annot)) in self.mapping: - value_type_annot = self.mapping[value_type] - if isinstance(value_type_annot, ast.Constant): - return ast.Name(f"Tuple['{value_type_annot.value}']") - if isinstance(value_type_annot, ast.Name): - return ast.Name(f"Tuple[{value_type_annot.id}]") - - return ast.Constant(old_annot) - - -def fix_typehints(infilename, outfilename): - """Change SWIG-generated C++ typehints to Python typehints""" - # Only available from Python3.9 - if not getattr(ast, 'unparse', None): - return - - # file -> AST - with open(infilename, 'r') as f: - source = f.read() - parsed_source = ast.parse(source) - - # Change AST - fixer = TypeHintFixer() - parsed_source = fixer.visit(parsed_source) - - # AST -> file - with open(outfilename, 'w') as f: - f.write(ast.unparse(parsed_source)) diff --git a/python/amici/swig_wrappers.py b/python/amici/swig_wrappers.py deleted file mode 100644 index 394f91aea8..0000000000 --- a/python/amici/swig_wrappers.py +++ /dev/null @@ -1,237 +0,0 @@ -"""Convenience wrappers for the swig interface""" -import sys -from contextlib import contextmanager, suppress -from typing import List, Optional, Union, Sequence, Dict, Any -import amici.amici as amici_swig -from . import numpy - -__all__ = [ - 'runAmiciSimulation', 'runAmiciSimulations', 'ExpData', - 'readSolverSettingsFromHDF5', 'writeSolverSettingsToHDF5', - 'set_model_settings', 'get_model_settings', - 'AmiciModel', 'AmiciSolver', 'AmiciExpData', 'AmiciReturnData', - 'AmiciExpDataVector' -] - -AmiciModel = Union['amici.Model', 'amici.ModelPtr'] -AmiciSolver = Union['amici.Solver', 'amici.SolverPtr'] -AmiciExpData = Union['amici.ExpData', 'amici.ExpDataPtr'] -AmiciReturnData = Union['amici.ReturnData', 'amici.ReturnDataPtr'] -AmiciExpDataVector = Union['amici.ExpDataPtrVector', Sequence[AmiciExpData]] - - -try: - from wurlitzer import sys_pipes -except ModuleNotFoundError: - sys_pipes = suppress - - -@contextmanager -def _capture_cstdout(): - """Redirect C/C++ stdout to python stdout if python stdout is redirected, - e.g. in ipython notebook""" - if sys.stdout == sys.__stdout__: - yield - else: - with sys_pipes(): - yield - - -def _get_ptr( - obj: Union[AmiciModel, AmiciExpData, AmiciSolver, AmiciReturnData] -) -> Union['amici_swig.Model', 'amici_swig.ExpData', - 'amici_swig.Solver', 'amici_swig.ReturnData']: - """ - Convenience wrapper that returns the smart pointer pointee, if applicable - - :param obj: - Potential smart pointer - - :returns: - Non-smart pointer - """ - if isinstance(obj, (amici_swig.ModelPtr, amici_swig.ExpDataPtr, - amici_swig.SolverPtr, amici_swig.ReturnDataPtr)): - return obj.get() - return obj - - -def runAmiciSimulation( - model: AmiciModel, - solver: AmiciSolver, - edata: Optional[AmiciExpData] = None -) -> 'numpy.ReturnDataView': - """ - Convenience wrapper around :py:func:`amici.amici.runAmiciSimulation` - (generated by swig) - - :param model: - Model instance - -` :param solver: - Solver instance, must be generated from - :py:meth:`amici.amici.Model.getSolver` - - :param edata: - ExpData instance (optional) - - :returns: - ReturnData object with simulation results - """ - with _capture_cstdout(): - rdata = amici_swig.runAmiciSimulation( - _get_ptr(solver), _get_ptr(edata), _get_ptr(model)) - return numpy.ReturnDataView(rdata) - - -def ExpData(*args) -> 'amici_swig.ExpData': - """ - Convenience wrapper for :py:class:`amici.amici.ExpData` constructors - - :param args: arguments - - :returns: ExpData Instance - """ - if isinstance(args[0], numpy.ReturnDataView): - return amici_swig.ExpData(_get_ptr(args[0]['ptr']), *args[1:]) - elif isinstance(args[0], (amici_swig.ExpData, amici_swig.ExpDataPtr)): - # the *args[:1] should be empty, but by the time you read this, - # the constructor signature may have changed, and you are glad this - # wrapper did not break. - return amici_swig.ExpData(_get_ptr(args[0]), *args[1:]) - elif isinstance(args[0], (amici_swig.Model, amici_swig.ModelPtr)): - return amici_swig.ExpData(_get_ptr(args[0])) - else: - return amici_swig.ExpData(*args) - - -def runAmiciSimulations( - model: AmiciModel, - solver: AmiciSolver, - edata_list: AmiciExpDataVector, - failfast: bool = True, - num_threads: int = 1, -) -> List['numpy.ReturnDataView']: - """ - Convenience wrapper for loops of amici.runAmiciSimulation - - :param model: Model instance - :param solver: Solver instance, must be generated from Model.getSolver() - :param edata_list: list of ExpData instances - :param failfast: returns as soon as an integration failure is encountered - :param num_threads: number of threads to use (only used if compiled - with openmp) - - :returns: list of simulation results - """ - with _capture_cstdout(): - edata_ptr_vector = amici_swig.ExpDataPtrVector(edata_list) - rdata_ptr_list = amici_swig.runAmiciSimulations( - _get_ptr(solver), - edata_ptr_vector, - _get_ptr(model), - failfast, - num_threads - ) - return [numpy.ReturnDataView(r) for r in rdata_ptr_list] - - -def readSolverSettingsFromHDF5( - file: str, - solver: AmiciSolver, - location: Optional[str] = 'solverSettings' -) -> None: - """ - Convenience wrapper for :py:func:`amici.readSolverSettingsFromHDF5` - - :param file: hdf5 filename - :param solver: Solver instance to which settings will be transferred - :param location: location of solver settings in hdf5 file - """ - amici_swig.readSolverSettingsFromHDF5(file, _get_ptr(solver), location) - - -def writeSolverSettingsToHDF5( - solver: AmiciSolver, - file: Union[str, object], - location: Optional[str] = 'solverSettings' -) -> None: - """ - Convenience wrapper for :py:func:`amici.amici.writeSolverSettingsToHDF5` - - :param file: hdf5 filename, can also be an object created by - :py:func:`amici.amici.createOrOpenForWriting` - :param solver: Solver instance from which settings will be stored - :param location: location of solver settings in hdf5 file - """ - amici_swig.writeSolverSettingsToHDF5(_get_ptr(solver), file, location) - - -# Values are suffixes of `get[...]` and `set[...]` `amici.Model` methods. -# If either the getter or setter is not named with this pattern, then the value -# is a tuple where the first and second elements are the getter and setter -# methods, respectively. -model_instance_settings = [ - # `setParameter{List,Scale}` will clear initial state sensitivities, so - # `setParameter{List,Scale}` has to be called first. - 'ParameterList', - 'ParameterScale', # getter returns a SWIG object - 'AddSigmaResiduals', - 'AlwaysCheckFinite', - 'FixedParameters', - 'InitialStates', - ('getInitialStateSensitivities', 'setUnscaledInitialStateSensitivities'), - 'MinimumSigmaResiduals', - ('nMaxEvent', 'setNMaxEvent'), - 'Parameters', - 'ReinitializationStateIdxs', - 'ReinitializeFixedParameterInitialStates', - 'StateIsNonNegative', - 'SteadyStateSensitivityMode', - ('t0', 'setT0'), - 'Timepoints', -] - - -def get_model_settings( - model: AmiciModel, -) -> Dict[str, Any]: - """Get model settings that are set independently of the compiled model. - - :param model: The AMICI model instance. - - :returns: Keys are AMICI model attributes, values are attribute values. - """ - settings = {} - for setting in model_instance_settings: - getter = setting[0] if isinstance(setting, tuple) else f'get{setting}' - - if getter == 'getInitialStates' and not model.hasCustomInitialStates(): - settings[setting] = [] - continue - if getter == 'getInitialStateSensitivities' \ - and not model.hasCustomInitialStateSensitivities(): - settings[setting] = [] - continue - - settings[setting] = getattr(model, getter)() - # TODO `amici.Model.getParameterScale` returns a SWIG object instead - # of a Python list/tuple. - if setting == 'ParameterScale': - settings[setting] = tuple(settings[setting]) - return settings - - -def set_model_settings( - model: AmiciModel, - settings: Dict[str, Any], -) -> None: - """Set model settings. - - :param model: The AMICI model instance. - :param settings: Keys are callable attributes (setters) of an AMICI model, - values are provided to the setters. - """ - for setting, value in settings.items(): - setter = setting[1] if isinstance(setting, tuple) else f'set{setting}' - getattr(model, setter)(value) diff --git a/python/amici/testing.py b/python/amici/testing.py deleted file mode 100644 index de1f69a1cc..0000000000 --- a/python/amici/testing.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Test support functions""" -import os -import sys -from tempfile import TemporaryDirectory - -import pytest - -# Indicates whether we are currently running under valgrind -# see also https://stackoverflow.com/a/62364698 -ON_VALGRIND = any( - needle in haystack - for needle in ('valgrind', 'vgpreload') - for haystack in (os.getenv("LD_PRELOAD", ""), - os.getenv("DYLD_INSERT_LIBRARIES", "")) -) - -# Decorator to skip certain tests when we are under valgrind -# (those that are independent of the AMICI C++ parts, or that take too long, -# or that test performance) -skip_on_valgrind = pytest.mark.skipif( - ON_VALGRIND, reason="Takes too long or is meaningless under valgrind") - - -class TemporaryDirectoryWinSafe(TemporaryDirectory): - """TemporaryDirectory that will not raise if cleanup fails. - - If any extension was loaded from the temporary directory, cleanup would - otherwise fail on Windows with a ``PermissionError``. This class ignores - such failures. - """ - def cleanup(self): - try: - super().cleanup() - except PermissionError as e: - if sys.platform not in {'win32', 'cygwin'}: - raise e - except NotADirectoryError: - # Ignore exception on Windows for pyd files: - # NotADirectoryError: [WinError 267] The directory name is - # invalid: '....pyd' - pass diff --git a/python/sdist/amici/MANIFEST.template.in b/python/sdist/amici/MANIFEST.template.in deleted file mode 120000 index 7615b2cce2..0000000000 --- a/python/sdist/amici/MANIFEST.template.in +++ /dev/null @@ -1 +0,0 @@ -../../amici/MANIFEST.template.in \ No newline at end of file diff --git a/python/sdist/amici/MANIFEST.template.in b/python/sdist/amici/MANIFEST.template.in new file mode 100644 index 0000000000..eb3b1b450f --- /dev/null +++ b/python/sdist/amici/MANIFEST.template.in @@ -0,0 +1 @@ +include *.cpp *.h diff --git a/python/sdist/amici/__init__.py b/python/sdist/amici/__init__.py deleted file mode 120000 index ce45fc83fa..0000000000 --- a/python/sdist/amici/__init__.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/__init__.py \ No newline at end of file diff --git a/python/sdist/amici/__init__.py b/python/sdist/amici/__init__.py new file mode 100644 index 0000000000..46eac0cfb1 --- /dev/null +++ b/python/sdist/amici/__init__.py @@ -0,0 +1,193 @@ +""" +AMICI +----- + +The AMICI Python module provides functionality for importing SBML or PySB +models and turning them into C++ Python extensions. + +:var amici_path: + absolute root path of the amici repository or Python package +:var amiciSwigPath: + absolute path of the amici swig directory +:var amiciSrcPath: + absolute path of the amici source directory +:var amiciModulePath: + absolute root path of the amici module +:var hdf5_enabled: + boolean indicating if amici was compiled with hdf5 support +:var has_clibs: + boolean indicating if this is the full package with swig interface or + the raw package without +""" + + +import contextlib +import importlib +import os +import re +import sys +from pathlib import Path +from types import ModuleType as ModelModule +from typing import Optional, Union + + +def _get_amici_path(): + """ + Determine package installation path, or, if used directly from git + repository, get repository root + """ + basedir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) + if os.path.exists(os.path.join(basedir, '.git')): + return os.path.abspath(basedir) + return os.path.dirname(__file__) + + +def _get_commit_hash(): + """Get commit hash from file""" + basedir = os.path.dirname(os.path.dirname(os.path.dirname(amici_path))) + commitfile = next( + ( + file for file in [ + os.path.join(basedir, '.git', 'FETCH_HEAD'), + os.path.join(basedir, '.git', 'ORIG_HEAD'), ] + if os.path.isfile(file) + ), + None + ) + + if commitfile: + with open(commitfile) as f: + return str(re.search(r'^([\w]*)', f.read().strip()).group()) + return 'unknown' + + +def _imported_from_setup() -> bool: + """Check whether this module is imported from `setup.py`""" + + from inspect import getouterframes, currentframe + from os import sep + + # in case we are imported from setup.py, this will be the AMICI package + # root directory (otherwise it is most likely the Python library directory, + # we are not interested in) + package_root = os.path.realpath(os.path.dirname(os.path.dirname(__file__))) + + for frame in getouterframes(currentframe(), context=0): + # Need to compare the full path, in case a user tries to import AMICI + # from a module `*setup.py`. Will still cause trouble if some package + # requires the AMICI extension during its installation, but seems + # unlikely... + frame_path = os.path.realpath(os.path.expanduser(frame.filename)) + if (frame_path == os.path.join(package_root, 'setup.py') + or frame_path.endswith(f"{sep}setuptools{sep}build_meta.py") + ): + return True + + return False + + +# Initialize AMICI paths +amici_path = _get_amici_path() +amiciSwigPath = os.path.join(amici_path, 'swig') +amiciSrcPath = os.path.join(amici_path, 'src') +amiciModulePath = os.path.dirname(__file__) + +has_clibs = any(os.path.isfile(os.path.join(amici_path, wrapper)) + for wrapper in ['amici.py', 'amici_without_hdf5.py']) +hdf5_enabled = False + +# Get version number from file +with open(os.path.join(amici_path, 'version.txt')) as f: + __version__ = f.read().strip() + +__commit__ = _get_commit_hash() + +# Import SWIG module and swig-dependent submodules if required and available +if not _imported_from_setup(): + if has_clibs: + from . import amici + from .amici import * + # has to be done before importing readSolverSettingsFromHDF5 + # from .swig_wrappers + hdf5_enabled = 'readSolverSettingsFromHDF5' in dir() + from .swig_wrappers import * + + # These modules require the swig interface and other dependencies + from .numpy import ReturnDataView, ExpDataView + from .pandas import * + + # These modules don't require the swig interface + from .sbml_import import SbmlImporter, assignmentRules2observables + from .ode_export import ODEModel, ODEExporter + + from typing import Protocol + + + class ModelModule(Protocol): + """Enable Python static type checking for AMICI-generated model + modules""" + def getModel(self) -> amici.Model: + pass + + +class add_path: + """Context manager for temporarily changing PYTHONPATH""" + + def __init__(self, path: Union[str, Path]): + self.path: str = str(path) + + def __enter__(self): + if self.path: + sys.path.insert(0, self.path) + + def __exit__(self, exc_type, exc_value, traceback): + with contextlib.suppress(ValueError): + sys.path.remove(self.path) + + +def import_model_module( + module_name: str, + module_path: Optional[Union[Path, str]] = None +) -> ModelModule: + """ + Import Python module of an AMICI model + + :param module_name: + Name of the python package of the model + :param module_path: + Absolute or relative path of the package directory + :return: + The model module + """ + module_path = str(module_path) + + # ensure we will find the newly created module + importlib.invalidate_caches() + + if not os.path.isdir(module_path): + raise ValueError(f"module_path '{module_path}' is not a directory.") + + module_path = os.path.abspath(module_path) + + # module already loaded? + if module_name in sys.modules: + # if a module with that name is already in sys.modules, we remove it, + # along with all other modules from that package. otherwise, there + # will be trouble if two different models with the same name are to + # be imported. + del sys.modules[module_name] + # collect first, don't delete while iterating + to_unload = {loaded_module_name for loaded_module_name in + sys.modules.keys() if + loaded_module_name.startswith(f"{module_name}.")} + for m in to_unload: + del sys.modules[m] + + with add_path(module_path): + return importlib.import_module(module_name) + + +class AmiciVersionError(RuntimeError): + """Error thrown if an AMICI model is loaded that is incompatible with + the installed AMICI base package""" + pass diff --git a/python/sdist/amici/__init__.template.py b/python/sdist/amici/__init__.template.py deleted file mode 120000 index 165798fd5f..0000000000 --- a/python/sdist/amici/__init__.template.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/__init__.template.py \ No newline at end of file diff --git a/python/sdist/amici/__init__.template.py b/python/sdist/amici/__init__.template.py new file mode 100644 index 0000000000..9fbab85003 --- /dev/null +++ b/python/sdist/amici/__init__.template.py @@ -0,0 +1,19 @@ +"""AMICI-generated module for model TPL_MODELNAME""" + +import amici +from pathlib import Path + +# Ensure we are binary-compatible, see #556 +if 'TPL_AMICI_VERSION' != amici.__version__: + raise amici.AmiciVersionError( + f'Cannot use model `TPL_MODELNAME` in {Path(__file__).parent}, ' + 'generated with amici==TPL_AMICI_VERSION, ' + f'together with amici=={amici.__version__} ' + 'which is currently installed. To use this model, install ' + 'amici==TPL_AMICI_VERSION or re-import the model with the amici ' + 'version currently installed.' + ) + +from TPL_MODELNAME._TPL_MODELNAME import * + +__version__ = 'TPL_PACKAGE_VERSION' diff --git a/python/sdist/amici/__main__.py b/python/sdist/amici/__main__.py deleted file mode 120000 index cfa13d34da..0000000000 --- a/python/sdist/amici/__main__.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/__main__.py \ No newline at end of file diff --git a/python/sdist/amici/__main__.py b/python/sdist/amici/__main__.py new file mode 100644 index 0000000000..dac5230270 --- /dev/null +++ b/python/sdist/amici/__main__.py @@ -0,0 +1,25 @@ +"""Package-level entrypoint""" + +from . import __version__, compiledWithOpenMP, has_clibs, hdf5_enabled +import os +import sys + +def print_info(): + """Displays information on the current AMICI installation. + + Useful for verifying package installation of submitting bug reports""" + features = [] + + if has_clibs: + features.append("extensions") + + if compiledWithOpenMP(): + features.append("OpenMP") + + if hdf5_enabled: + features.append("HDF5") + + print(f"AMICI ({sys.platform}) version {__version__} ({','.join(features)})") + +if __name__ == '__main__': + print_info() diff --git a/python/sdist/amici/bngl_import.py b/python/sdist/amici/bngl_import.py deleted file mode 120000 index 5aa807079a..0000000000 --- a/python/sdist/amici/bngl_import.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/bngl_import.py \ No newline at end of file diff --git a/python/sdist/amici/bngl_import.py b/python/sdist/amici/bngl_import.py new file mode 100644 index 0000000000..840e4a4229 --- /dev/null +++ b/python/sdist/amici/bngl_import.py @@ -0,0 +1,32 @@ +""" +BNGL Import +------------ +This module provides all necessary functionality to import a model specified +in the :term:`BNGL` format. +""" + + +from pysb.importers.bngl import model_from_bngl + +from .pysb_import import pysb2amici + + +def bngl2amici(bngl_model: str, *args, **kwargs) -> None: + r""" + Generate AMICI C++ files for the provided model. + + :param bngl_model: + bngl model file, model name will determine the name of the generated + module + + :param args: + see :func:`amici.pysb_import.pysb2amici` for additional arguments + + :param kwargs: + see :func:`amici.pysb_import.pysb2amici` for additional arguments + + """ + if 'model' in kwargs: + raise ValueError('model argument not allowed') + pysb_model = model_from_bngl(bngl_model) + pysb2amici(pysb_model, *args, **kwargs) diff --git a/python/sdist/amici/conserved_quantities_demartino.py b/python/sdist/amici/conserved_quantities_demartino.py deleted file mode 120000 index 270caa6f65..0000000000 --- a/python/sdist/amici/conserved_quantities_demartino.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/conserved_quantities_demartino.py \ No newline at end of file diff --git a/python/sdist/amici/conserved_quantities_demartino.py b/python/sdist/amici/conserved_quantities_demartino.py new file mode 100644 index 0000000000..28fe3f9e77 --- /dev/null +++ b/python/sdist/amici/conserved_quantities_demartino.py @@ -0,0 +1,911 @@ +import logging +import math +import random +import sys +from typing import List, MutableSequence, Sequence, Tuple, Union, Optional + +from .logging import get_logger + +logger = get_logger(__name__, logging.ERROR) + +# increase recursion limit for recursive quicksort +sys.setrecursionlimit(3000) + +_MIN = 1e-9 +_MAX = 1e9 + + +def compute_moiety_conservation_laws( + stoichiometric_list: Sequence[float], + num_species: int, + num_reactions: int, + max_num_monte_carlo: int = 20, + rng_seed: Union[None, bool, int] = False, + species_names: Optional[Sequence[str]] = None, +) -> Tuple[List[List[int]], List[List[float]]]: + """Compute moiety conservation laws. + + According to the algorithm proposed by De Martino et al. (2014) + https://doi.org/10.1371/journal.pone.0100750 + + :param stoichiometric_list: + the stoichiometric matrix as a list (species x reactions, + column-major ordering) + :param num_species: + total number of species in the reaction network + :param num_reactions: + total number of reactions in the reaction network + :param max_num_monte_carlo: + maximum number of MonteCarlo steps before changing to relaxation + :param rng_seed: + Seed for the random number generator. If `False`, the RNG will not be + re-initialized. Other values will be passed to :func:`random.seed`. + :param species_names: + Species names. Optional and only used for logging. + :returns: + Integer MCLs as list of lists of indices of involved species and + list of lists of corresponding coefficients. + """ + # compute semi-positive conservation laws + (kernel_dim, engaged_species, int_kernel_dim, conserved_moieties, + cls_species_idxs, cls_coefficients) = _kernel( + stoichiometric_list, num_species, num_reactions) + # if the number of integer MCLs equals total MCLS no MC relaxation + done = (int_kernel_dim == kernel_dim) + + if not done: + # construct interaction matrix + J, J2, fields = _fill(stoichiometric_list, engaged_species, + num_species) + + # seed random number generator + if rng_seed is not False: + random.seed(rng_seed) + + timer = 0 + # maximum number of montecarlo search before starting relaxation + while not done: + yes, int_kernel_dim, conserved_moieties = _monte_carlo( + engaged_species, J, J2, fields, conserved_moieties, + int_kernel_dim, cls_species_idxs, cls_coefficients, + num_species, max_iter=max_num_monte_carlo + ) + # if the number of integer MCLs equals total MCLS then MC done + done = (int_kernel_dim == kernel_dim) + timer = 0 if yes else timer + 1 + + if timer == max_num_monte_carlo: + done = _relax(stoichiometric_list, conserved_moieties, + num_reactions, num_species) + timer = 0 + _reduce(int_kernel_dim, cls_species_idxs, cls_coefficients, num_species) + _output(int_kernel_dim, kernel_dim, engaged_species, cls_species_idxs, + cls_coefficients, species_names, verbose=True) + + return cls_species_idxs[:int_kernel_dim], cls_coefficients[:int_kernel_dim] + + +def _output( + int_kernel_dim: int, + kernel_dim: int, + int_matched: List[int], + species_indices: List[List[int]], + species_coefficients: List[List[float]], + species_names: Optional[Sequence[str]] = None, + verbose: bool = False, + log_level: int = logging.DEBUG +): + """Log infos on identified conservation laws""" + def log(*args, **kwargs): + logger.log(log_level, *args, **kwargs) + + log(f"There are {int_kernel_dim} linearly independent conserved " + f"moieties, engaging {len(int_matched)} state variables.") + if int_kernel_dim == kernel_dim: + log("They generate all the conservation laws") + else: + log(f"They don't generate all the conservation laws, " + f"{kernel_dim - int_kernel_dim} of them are not reducible to " + "moieties") + # print all conserved quantities + if verbose: + for i, (coefficients, engaged_species_idxs) \ + in enumerate(zip(species_coefficients, species_indices)): + if not engaged_species_idxs: + continue + log(f"Moiety number {i + 1} engages {len(engaged_species_idxs)} " + "species:") + for species_idx, coefficient \ + in zip(engaged_species_idxs, coefficients): + name = species_names[species_idx] if species_names \ + else species_idx + log(f"\t{name}\t{coefficient}") + + +def _qsort( + k: int, + km: int, + order: MutableSequence[int], + pivots: Sequence[int] +) -> None: + """Quicksort + + Recursive implementation of the quicksort algorithm + + :param k: + number of elements to sort + :param km: + current center element + :param order: + ordering of the elements + :param pivots: + corresponding pivot elements from scaled partial pivoting strategy + """ + # TODO: Rewrite into an iterative algorithm with pivoting strategy + + if k - km < 1: + # nothing to sort + return + + pivot = km + int((k - km) / 2) + l = 0 + p = k - km - 1 + new_order = [0] * (k - km) + for i in range(km, k): + if i != pivot: + if pivots[order[i]] < pivots[order[pivot]]: + new_order[l] = order[i] + l += 1 + else: + new_order[p] = order[i] + p -= 1 + new_order[p] = order[pivot] + order[km:k] = new_order + + # calculate center, then recursive calls on left and right intervals + centre = p + km + _qsort(k, centre + 1, order, pivots) + _qsort(centre, km, order, pivots) + + +def _kernel( + stoichiometric_list: Sequence[float], + num_species: int, + num_reactions: int +) -> Tuple[int, List[int], int, List[int], + List[List[int]], List[List[float]]]: + """ + Kernel (left nullspace of :math:`S`) calculation by Gaussian elimination + + To compute the left nullspace of the stoichiometric matrix :math:`S`, + a Gaussian elimination method with partial scaled pivoting is used to deal + effectively with a possibly ill-conditioned stoichiometric matrix + :math:`S`. + + Note that this is the Python reimplementation of the algorithm proposed by + `De Martino et al. (2014) `_ + and thus a direct adaption of the original implementation in C++. + + :param stoichiometric_list: + the stoichiometric matrix as a list (species x reactions, + col-major ordering) + :param num_species: + total number of species in the reaction network + :param num_reactions: + total number of reactions in the reaction network + :returns: + kernel dimension, MCLs, integer kernel dimension, integer MCLs and + indices to species and reactions in the preceding order as a tuple + """ + matrix: List[List[int]] = [[] for _ in range(num_species)] + matrix2: List[List[float]] = [[] for _ in range(num_species)] + i_reaction = 0 + i_species = 0 + for val in stoichiometric_list: + if val != 0: + matrix[i_species].append(i_reaction) + matrix2[i_species].append(val) + i_species += 1 + if i_species == num_species: + i_species = 0 + i_reaction += 1 + for i in range(num_species): + matrix[i].append(num_reactions + i) + matrix2[i].append(1) + + order: List[int] = list(range(num_species)) + pivots = [matrix[i][0] if len(matrix[i]) else _MAX + for i in range(num_species)] + + done = False + while not done: + _qsort(num_species, 0, order, pivots) + for j in range(num_species - 1): + if pivots[order[j + 1]] == pivots[order[j]] != _MAX: + min1 = _MAX + if len(matrix[order[j]]) > 1: + for i in range(len(matrix[order[j]])): + min1 = min(min1, abs(matrix2[order[j]][0] + / matrix2[order[j]][i])) + + min2 = _MAX + if len(matrix[order[j + 1]]) > 1: + for i in range(len(matrix[order[j + 1]])): + min2 = min(min2, abs(matrix2[order[j + 1]][0] + / matrix2[order[j + 1]][i])) + + if min2 > min1: + # swap + k2 = order[j + 1] + order[j + 1] = order[j] + order[j] = k2 + done = True + + for j in range(num_species - 1): + if pivots[order[j + 1]] == pivots[order[j]] != _MAX: + k1 = order[j + 1] + k2 = order[j] + column: List[float] = [0] * (num_species + num_reactions) + g = matrix2[k2][0] / matrix2[k1][0] + for i in range(1, len(matrix[k1])): + column[matrix[k1][i]] = matrix2[k1][i] * g + + for i in range(1, len(matrix[k2])): + column[matrix[k2][i]] -= matrix2[k2][i] + + matrix[k1] = [] + matrix2[k1] = [] + for col_idx, col_val in enumerate(column): + if abs(col_val) > _MIN: + matrix[k1].append(col_idx) + matrix2[k1].append(col_val) + + done = False + if len(matrix[order[j + 1]]): + pivots[order[j + 1]] = matrix[order[j + 1]][0] + else: + pivots[order[j + 1]] = _MAX + + RSolutions = [[] for _ in range(num_species)] + RSolutions2 = [[] for _ in range(num_species)] + kernel_dim = 0 + + for i in range(num_species): + done = all(matrix[i][j] >= num_reactions + for j in range(len(matrix[i]))) + if done and len(matrix[i]): + for j in range(len(matrix[i])): + RSolutions[kernel_dim].append(matrix[i][j] - num_reactions) + RSolutions2[kernel_dim].append(matrix2[i][j]) + kernel_dim += 1 + del matrix, matrix2 + + matched = [] + int_matched = [] + cls_species_idxs = [[] for _ in range(num_species)] + cls_coefficients = [[] for _ in range(num_species)] + + i2 = 0 + for i in range(kernel_dim): + ok2 = True + for j in range(len(RSolutions[i])): + if RSolutions2[i][j] * RSolutions2[i][0] < 0: + ok2 = False + if not matched or all( + cur_matched != RSolutions[i][j] for cur_matched in + matched + ): + matched.append(RSolutions[i][j]) + if ok2 and len(RSolutions[i]): + min_value = _MAX + for j in range(len(RSolutions[i])): + cls_species_idxs[i2].append(RSolutions[i][j]) + cls_coefficients[i2].append(abs(RSolutions2[i][j])) + min_value = min(min_value, abs(RSolutions2[i][j])) + if not int_matched or all( + cur_int_matched != cls_species_idxs[i2][j] + for cur_int_matched in int_matched + ): + int_matched.append(cls_species_idxs[i2][j]) + for j in range(len(cls_species_idxs[i2])): + cls_coefficients[i2][j] /= min_value + i2 += 1 + int_kernel_dim = i2 + + assert int_kernel_dim <= kernel_dim + assert len(cls_species_idxs) == len(cls_coefficients), \ + "Inconsistent number of conserved quantities in coefficients and " \ + "species" + return (kernel_dim, matched, int_kernel_dim, int_matched, cls_species_idxs, + cls_coefficients) + + +def _fill( + stoichiometric_list: Sequence[float], + matched: Sequence[int], + num_species: int +) -> Tuple[List[List[int]], List[List[int]], List[int]]: + """Construct interaction matrix + + Construct the interaction matrix out of the given stoichiometric matrix + :math:`S`. + + :param stoichiometric_list: + the stoichiometric matrix given as a flat list + :param matched: + found and independent moiety conservation laws (MCL) + :param num_species: + number of rows in :math:`S` + :returns: + interactions of metabolites and reactions, and matrix of interaction + """ + dim = len(matched) + + # for each entry in the stoichiometric matrix save interaction + i_reaction = 0 + i_species = 0 + matrix = [[] for _ in range(dim)] + matrix2 = [[] for _ in range(dim)] + for val in stoichiometric_list: + if val != 0: + take = dim + for matched_idx, matched_val in enumerate(matched): + if i_species == matched_val: + take = matched_idx + if take < dim: + matrix[take].append(i_reaction) + matrix2[take].append(val) + i_species += 1 + if i_species == num_species: + i_species = 0 + i_reaction += 1 + + J = [[] for _ in range(num_species)] + J2 = [[] for _ in range(num_species)] + fields = [0] * num_species + for i in range(dim): + for j in range(i, dim): + interactions = 0 + for po in range(len(matrix[i])): + for pu in range(len(matrix[j])): + if matrix[i][po] == matrix[j][pu]: + interactions += matrix2[i][po] * matrix2[j][pu] + if j == i: + fields[i] = interactions + elif abs(interactions) > _MIN: + J[i].append(j) + J2[i].append(interactions) + J[j].append(i) + J2[j].append(interactions) + return J, J2, fields + + +def _is_linearly_dependent( + vector: Sequence[float], + int_kernel_dim: int, + cls_species_idxs: Sequence[Sequence[int]], + cls_coefficients: Sequence[Sequence[float]], + matched: Sequence[int], + num_species: int +) -> bool: + """Check for linear dependence between MCLs + + Check if the solutions found with Monte Carlo are linearly independent + with respect to the previous found solution for all MCLs involved + + :param vector: + found basis + :param int_kernel_dim: + number of integer conservative laws + :param cls_species_idxs: + NSolutions contains the species involved in the MCL + :param cls_coefficients: + NSolutions2 contains the corresponding coefficients in the MCL + :param matched: + actual found MCLs + :param num_species: + number of rows in :math:`S` + :returns: + boolean indicating linear dependence (true) or not (false) + """ + K = int_kernel_dim + 1 + matrix: List[List[int]] = [[] for _ in range(K)] + matrix2: List[List[float]] = [[] for _ in range(K)] + # Populate matrices with species ids and coefficients for CLs + for i in range(K - 1): + for j in range(len(cls_species_idxs[i])): + matrix[i].append(cls_species_idxs[i][j]) + matrix2[i].append(cls_coefficients[i][j]) + + order2 = list(range(len(matched))) + pivots2 = matched[:] + _qsort(len(matched), 0, order2, pivots2) + + # ensure positivity + for i in range(len(matched)): + if vector[order2[i]] > _MIN: + matrix[K - 1].append(matched[order2[i]]) + matrix2[K - 1].append(vector[order2[i]]) + + order = list(range(K)) + pivots = [matrix[i][0] if len(matrix[i]) else _MAX for i in range(K)] + + # check for linear independence of the solution + ok = False + while not ok: + _qsort(K, 0, order, pivots) + for j in range(K - 1): + if pivots[order[j + 1]] == pivots[order[j]] != _MAX: + min1 = _MAX + if len(matrix[order[j]]) > 1: + for i in range(len(matrix[order[j]])): + min1 = min(min1, abs(matrix2[order[j]][0] + / matrix2[order[j]][i])) + min2 = _MAX + if len(matrix[order[j + 1]]) > 1: + for i in range(len(matrix[order[j + 1]])): + min2 = min(min2, abs(matrix2[order[j + 1]][0] + / matrix2[order[j + 1]][i])) + if min2 > min1: + # swap + k2 = order[j + 1] + order[j + 1] = order[j] + order[j] = k2 + ok = True + for j in range(K - 1): + if pivots[order[j + 1]] == pivots[order[j]] != _MAX: + k1 = order[j + 1] + k2 = order[j] + column: List[float] = [0] * num_species + g = matrix2[k2][0] / matrix2[k1][0] + for i in range(1, len(matrix[k1])): + column[matrix[k1][i]] = matrix2[k1][i] * g + for i in range(1, len(matrix[k2])): + column[matrix[k2][i]] -= matrix2[k2][i] + + matrix[k1] = [] + matrix2[k1] = [] + for i in range(num_species): + if abs(column[i]) > _MIN: + matrix[k1].append(i) + matrix2[k1].append(column[i]) + ok = False + pivots[k1] = matrix[k1][0] if len(matrix[k1]) else _MAX + K1 = sum(len(matrix[i]) > 0 for i in range(K)) + return K == K1 + + +def _monte_carlo( + matched: Sequence[int], + J: Sequence[Sequence[int]], + J2: Sequence[Sequence[float]], + fields: Sequence[float], + int_matched: MutableSequence[int], + int_kernel_dim: int, + cls_species_idxs: MutableSequence[MutableSequence[int]], + cls_coefficients: MutableSequence[MutableSequence[float]], + num_species: int, + initial_temperature: float = 1, + cool_rate: float = 1e-3, + max_iter: int = 10 +) -> Tuple[bool, int, Sequence[int]]: + """MonteCarlo simulated annealing for finding integer MCLs + + Finding integer solutions for the MCLs by Monte Carlo, see step (b) in + the De Martino (2014) paper and Eqs. 11-13 in the publication + + :param matched: + number of found MCLs + :param J: + index of metabolites involved in a MCL + :param J2: + coefficients of metabolites involved in a MCL + :param fields: + actual number of metabolites involved in a MCL + :param int_matched: + actual matched MCLs + :param int_kernel_dim: + number of MCLs found in :math:`S` + :param cls_species_idxs: + Modified in-place. + :param cls_coefficients: + Modified in-place. + :param initial_temperature: + initial temperature + :param cool_rate: + cooling rate of simulated annealing + :param max_iter: + maximum number of MonteCarlo steps before changing to relaxation + :returns: + status of MC iteration, number of integer MCLs, number of MCLs, + metabolites and reaction indices, MCLs and integer MCLs as a tuple + + status indicates if the currently found moiety by the Monte Carlo + process is linearly dependent (False) or linearly independent (True) + in case of linear dependence, the current Monte Carlo cycle can be + considered otherwise the algorithm retries Monte Carlo up to max_iter + """ + dim = len(matched) + num = [int(2 * random.uniform(0, 1)) if len(J[i]) else 0 + for i in range(dim)] + numtot = sum(num) + + def compute_h(): + H = 0 + for i in range(dim): + H += fields[i] * num[i] ** 2 + for j in range(len(J[i])): + H += J2[i][j] * num[i] * num[J[i][j]] + return H + + H = compute_h() + + count = 0 + howmany = 0 + T1 = initial_temperature + e = math.exp(-1 / T1) + while True: + en = int(random.uniform(0, 1) * dim) + while not len(J[en]): + en = int(random.uniform(0, 1) * dim) + + p = -1 if num[en] > 0 and random.uniform(0, 1) < 0.5 else 1 + delta = fields[en] * num[en] + for i in range(len(J[en])): + delta += J2[en][i] * num[J[en][i]] + delta = 2 * p * delta + fields[en] + + if delta < 0 or random.uniform(0, 1) < math.pow(e, delta): + num[en] += p + numtot += p + H += delta + + count += 1 + if count % dim == 0: + T1 -= cool_rate + if T1 <= 0: + T1 = cool_rate + e = math.exp(-1 / T1) + + if count == dim // cool_rate: + count = 0 + T1 = initial_temperature + e = math.exp(-1 / T1) + en = int(random.uniform(0, 1) * dim) + while not len(J[en]): + en = int(random.uniform(0, 1) * dim) + num = [0] * dim + num[en] = 1 + numtot = 1 + + H = compute_h() + howmany += 1 + + if (H < _MIN and numtot > 0) or (howmany == 10 * max_iter): + break + + if howmany >= 10 * max_iter: + return False, int_kernel_dim, int_matched + + # founds MCLS? need to check for linear independence + if len(int_matched) and not _is_linearly_dependent( + num, int_kernel_dim, cls_species_idxs, + cls_coefficients, matched, num_species): + logger.debug( + "Found a moiety but it is linearly dependent... next.") + return False, int_kernel_dim, int_matched + + # reduce by MC procedure + order2 = list(range(len(matched))) + pivots2 = matched[:] + _qsort(len(matched), 0, order2, pivots2) + for i in range(len(matched)): + if num[order2[i]] > 0: + cls_species_idxs[int_kernel_dim].append(matched[order2[i]]) + cls_coefficients[int_kernel_dim].append(num[order2[i]]) + int_kernel_dim += 1 + _reduce(int_kernel_dim, cls_species_idxs, cls_coefficients, num_species) + min_value = 1000 + for i in range(len(cls_species_idxs[int_kernel_dim - 1])): + if not len(int_matched) \ + or all(cur_int_matched + != cls_species_idxs[int_kernel_dim - 1][i] + for cur_int_matched in int_matched): + int_matched.append(cls_species_idxs[int_kernel_dim - 1][i]) + + min_value = min(min_value, cls_coefficients[int_kernel_dim - 1][i]) + for i in range(len(cls_species_idxs[int_kernel_dim - 1])): + cls_coefficients[int_kernel_dim - 1][i] /= min_value + + logger.debug( + f"Found linearly independent moiety, now there are " + f"{int_kernel_dim} engaging {len(int_matched)} species") + + return True, int_kernel_dim, int_matched + + +def _relax( + stoichiometric_list: Sequence[float], + int_matched: Sequence[int], + num_reactions: int, + num_species: int, + relaxation_max: float = 1e6, + relaxation_step: float = 1.9 +) -> bool: + """Relaxation scheme for Monte Carlo final solution + + Checking for completeness using Motzkin's theorem. See Step (c) in + De Martino (2014) and the Eqs. 14-16 in the corresponding publication + + :param stoichiometric_list: + stoichiometric matrix :math:`S` as a flat list (column-major ordering) + :param int_matched: + number of matched integer CLs + :param num_reactions: + number of reactions in reaction network + :param num_species: + number of species in reaction network + :param relaxation_max: + maximum relaxation step + :param relaxation_step: + relaxation step width + :returns: + boolean indicating if relaxation has succeeded (``True``) or not + (``False``) + """ + K = len(int_matched) + matrix: List[List[int]] = [[] for _ in range(K)] + matrix2: List[List[float]] = [[] for _ in range(K)] + i_reaction = 0 + i_species = 0 + for val in stoichiometric_list: + if val != 0: + take = K + if K > 0: + for i in range(K): + if i_species == int_matched[i]: + take = i + if take < K: + matrix[take].append(i_reaction) + matrix2[take].append(val) + i_species += 1 + if i_species == num_species: + i_species = 0 + i_reaction += 1 + + # reducing the stoichiometric matrix of conserved moieties to row echelon + # form by Gaussian elimination + order = list(range(K)) + pivots = [matrix[i][0] if len(matrix[i]) else _MAX for i in range(K)] + done = False + while not done: + _qsort(K, 0, order, pivots) + for j in range(K - 1): + if pivots[order[j + 1]] == pivots[order[j]] != _MAX: + min1 = _MAX + if len(matrix[order[j]]) > 1: + for i in range(len(matrix[order[j]])): + min1 = min(min1, abs(matrix2[order[j]][0] + / matrix2[order[j]][i])) + min2 = _MAX + if len(matrix[order[j + 1]]) > 1: + for i in range(len(matrix[order[j + 1]])): + min2 = min(min2, abs(matrix2[order[j + 1]][0] + / matrix2[order[j + 1]][i])) + if min2 > min1: + # swap + k2 = order[j + 1] + order[j + 1] = order[j] + order[j] = k2 + done = True + for j in range(K - 1): + if pivots[order[j + 1]] == pivots[order[j]] != _MAX: + k1 = order[j + 1] + k2 = order[j] + column: List[float] = [0] * num_reactions + g = matrix2[k2][0] / matrix2[k1][0] + for i in range(1, len(matrix[k1])): + column[matrix[k1][i]] = matrix2[k1][i] * g + for i in range(1, len(matrix[k2])): + column[matrix[k2][i]] -= matrix2[k2][i] + + matrix[k1] = [] + matrix2[k1] = [] + for col_idx, col_val in enumerate(column): + if abs(col_val) > _MIN: + matrix[k1].append(col_idx) + matrix2[k1].append(col_val) + done = False + if len(matrix[order[j + 1]]): + pivots[order[j + 1]] = matrix[order[j + 1]][0] + else: + pivots[order[j + 1]] = _MAX + + # normalize + for matrix2_i in matrix2: + if len(matrix2_i): + norm = matrix2_i[0] + for j in range(len(matrix2_i)): + matrix2_i[j] /= norm + + for k1 in reversed(range(K - 1)): + k = order[k1] + if len(matrix[k]) <= 1: + continue + + i = 0 + while i < len(matrix[k]): + for i_species in range(k1 + 1, K): + j = order[i_species] + if not len(matrix[j]) or matrix[j][0] != matrix[k][i]: + continue + + # subtract rows + # matrix2[k] = matrix2[k] - matrix2[j] * matrix2[k][i] + row_k: List[float] = [0] * num_reactions + for a in range(len(matrix[k])): + row_k[matrix[k][a]] = matrix2[k][a] + for a in range(len(matrix[j])): + row_k[matrix[j][a]] -= matrix2[j][a] * matrix2[k][i] + # filter + matrix[k] = [row_idx for row_idx, row_val in enumerate(row_k) + if row_val != 0] + matrix2[k] = [row_val for row_val in row_k if row_val != 0] + + if len(matrix[k]) <= i: + break + i += 1 + + indip = [K + 1] * num_reactions + for i in range(K): + if len(matrix[i]): + indip[matrix[i][0]] = i + M1 = 0 + for i in range(num_reactions): + if indip[i] == K + 1: + indip[i] = K + M1 + M1 += 1 + + matrixAus = [[] for _ in range(M1)] + matrixAus2 = [[] for _ in range(M1)] + i_reaction = 0 + for i in range(num_reactions): + if indip[i] >= K: + matrixAus[i_reaction].append(i) + matrixAus2[i_reaction].append(1) + i_reaction += 1 + else: + t = indip[i] + if len(matrix[t]) > 1: + for k in range(1, len(matrix[t])): + idx = indip[matrix[t][k]] - K + matrixAus[idx].append(i) + matrixAus2[idx].append(-matrix2[t][k]) + del matrix + + N1 = num_species - K + matrix_aus = [[] for _ in range(N1)] + matrix_aus2 = [[] for _ in range(N1)] + k1 = 0 + i_reaction = 0 + i_species = 0 + for val in stoichiometric_list: + take = 1 + for i in range(len(int_matched)): + if i_species == int_matched[i]: + take -= 1 + if val != 0 and take == 1: + matrix_aus[k1].append(i_reaction) + matrix_aus2[k1].append(val) + i_species += 1 + k1 += take + if i_species == num_species: + i_species = 0 + k1 = 0 + i_reaction += 1 + + matrixb = [[] for _ in range(N1)] + matrixb2 = [[] for _ in range(N1)] + for i in range(M1): + for j in range(N1): + if len(matrix_aus[j]) * len(matrixAus[i]): + prod = 0 + for ib in range(len(matrixAus[i])): + for jb in range(len(matrix_aus[j])): + if matrixAus[i][ib] == matrix_aus[j][jb]: + prod += matrixAus2[i][ib] * matrix_aus2[j][jb] + if abs(prod) > _MIN: + matrixb[j].append(i) + matrixb2[j].append(prod) + del matrixAus, matrixAus2, matrix_aus, matrix_aus2 + + var = [_MIN] * M1 + time = 0 + cmin_idx = 0 + while True: + cmin = 1000 + for j in range(N1): + constr = 0 + if len(matrixb[j]): + for i in range(len(matrixb[j])): + constr += matrixb2[j][i] * var[matrixb[j][i]] + if constr < cmin: + cmin_idx = j + cmin = constr + if cmin >= 0: + # constraints satisfied + break + + # Motzkin relaxation + alpha = -relaxation_step * cmin + fact = sum(val ** 2 for val in matrixb2[cmin_idx]) + alpha /= fact + alpha = max(1e-9 * _MIN, alpha) + for j in range(len(matrixb[cmin_idx])): + var[matrixb[cmin_idx][j]] += alpha * matrixb2[cmin_idx][j] + + time += 1 + if time >= relaxation_max: + # timeout + break + + return done + + +def _reduce( + int_kernel_dim: int, + cls_species_idxs: MutableSequence[MutableSequence[int]], + cls_coefficients: MutableSequence[MutableSequence[float]], + num_species: int +) -> None: + """Reducing the solution which has been found by the Monte Carlo process + + In case of superpositions of independent MCLs one can reduce by + iteratively subtracting the other independent MCLs, taking care + to maintain then non-negativity constraint, see Eq. 13 in De Martino (2014) + + :param int_kernel_dim: + number of found MCLs + :param cls_species_idxs: + Species indices involved in each of the conservation laws. + Modified in-place. + :param cls_coefficients: + Coefficients for each of the species involved in each of the + conservation laws. Modified in-place. + :param num_species: + number of species / rows in :math:`S` + """ + K = int_kernel_dim + order = list(range(K)) + pivots = [-len(cls_species_idxs[i]) for i in range(K)] + + done = False + while not done: + _qsort(K, 0, order, pivots) + done = True + for i in range(K - 1): + k1 = order[i] + for j in range(i + 1, K): + k2 = order[j] + column: List[float] = [0] * num_species + for species_idx, coefficient \ + in zip(cls_species_idxs[k1], cls_coefficients[k1]): + column[species_idx] = coefficient + ok1 = True + for species_idx, coefficient \ + in zip(cls_species_idxs[k2], cls_coefficients[k2]): + column[species_idx] -= coefficient + if column[species_idx] < -_MIN: + ok1 = False + break + if not ok1: + continue + + done = False + cls_species_idxs[k1] = [] + cls_coefficients[k1] = [] + for col_idx, col_val in enumerate(column): + if abs(col_val) > _MIN: + cls_species_idxs[k1].append(col_idx) + cls_coefficients[k1].append(col_val) + pivots[k1] = -len(cls_species_idxs[k1]) diff --git a/python/sdist/amici/conserved_quantities_rref.py b/python/sdist/amici/conserved_quantities_rref.py deleted file mode 120000 index e84ceab727..0000000000 --- a/python/sdist/amici/conserved_quantities_rref.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/conserved_quantities_rref.py \ No newline at end of file diff --git a/python/sdist/amici/conserved_quantities_rref.py b/python/sdist/amici/conserved_quantities_rref.py new file mode 100644 index 0000000000..4c401293cf --- /dev/null +++ b/python/sdist/amici/conserved_quantities_rref.py @@ -0,0 +1,99 @@ +"""Find conserved quantities deterministically""" + +from typing import List, Literal, Optional, Union + +import numpy as np + + +def rref( + mat: np.array, + round_ndigits: Optional[Union[Literal[False], int]] = None +) -> np.array: + """ + Bring matrix ``mat`` to reduced row echelon form + + see https://en.wikipedia.org/wiki/Row_echelon_form + + :param mat: Numpy float matrix to operate on (will be copied) + :param round_ndigits: Number of digits to round intermediary results to, + or ``False`` to disable rounding completely. + Helps to avoid numerical artifacts. + :returns: ``mat`` in rref form. + """ + # Rounding function + if round_ndigits is False: + # no-op + def _round(mat): + return mat + else: + if round_ndigits is None: + # drop the least significant digit (more or less) + round_ndigits = - int(np.ceil(np.log10(np.spacing(1)))) + + def _round(mat): + mat = np.round(mat, round_ndigits) + mat[np.abs(mat) <= 10**(-round_ndigits)] = 0 + return mat + + # create a copy that will be modified + mat = mat.copy() + + lead = 0 + n_rows, n_columns = mat.shape + for r in range(n_rows): + if n_columns <= lead: + return mat + + i = r + while mat[i, lead] == 0: + i += 1 + if n_rows == i: + i = r + lead += 1 + if n_columns == lead: + return mat + + if i != r: + # Swap rows + mat[[i, r]] = mat[[r, i]] + # Divide row + mat[r] /= mat[r, lead] + for i in range(n_rows): + if i != r: + # Subtract multiple + mat[i] -= mat[i, lead] * mat[r] + mat = _round(mat) + lead += 1 + return mat + + +def pivots(mat: np.array) -> List[int]: + """Get indices of pivot columns in ``mat``, assumed to be in reduced row + echelon form""" + pivot_cols = [] + last_pivot_col = -1 + for i in range(mat.shape[0]): + for j in range(last_pivot_col + 1, mat.shape[1]): + if mat[i, j] != 0: + pivot_cols.append(j) + last_pivot_col = j + break + return pivot_cols + + +def nullspace_by_rref(mat: np.array) -> np.array: + """Compute basis of the nullspace of ``mat`` based on the reduced row + echelon form""" + rref_mat = rref(mat) + pivot_cols = pivots(rref_mat) + rows, cols = mat.shape + + basis = [] + for i in range(cols): + if i in pivot_cols: + continue + vec = [1.0 if i == j else 0.0 for j in range(cols)] + for pivot_row, pivot_col in enumerate(pivot_cols): + vec[pivot_col] -= rref_mat[pivot_row][i] + basis.append(vec) + return np.array(basis) diff --git a/python/sdist/amici/constants.py b/python/sdist/amici/constants.py deleted file mode 120000 index 5612f52bb9..0000000000 --- a/python/sdist/amici/constants.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/constants.py \ No newline at end of file diff --git a/python/sdist/amici/constants.py b/python/sdist/amici/constants.py new file mode 100644 index 0000000000..fabd34b3be --- /dev/null +++ b/python/sdist/amici/constants.py @@ -0,0 +1,33 @@ +""" +Constants +----------- +This module provides a central place to define native python enums and +constants that are used in multiple other modules +""" + +import enum + + +class SymbolId(str, enum.Enum): + """ + Defines the different fields in the symbol dict to which sbml entities + get parsed to. + + .. note:: This class inherits from str enabling direct comparison to + strings, which means that the species symbols can be accessed as + symbols['species'], which is convenient for debugging and symbols[ + SymbolId.SPECIES], which is how the field should be accessed + programmatically. + """ + SPECIES = 'species' + PARAMETER = 'parameter' + FIXED_PARAMETER = 'fixed_parameter' + OBSERVABLE = 'observable' + EXPRESSION = 'expression' + SIGMAY = 'sigmay' + LLHY = 'llhy' + EVENT = 'event' + EVENT_OBSERVABLE = 'event_observable' + SIGMAZ = 'sigmaz' + LLHZ = 'llhz' + LLHRZ = 'llhrz' diff --git a/python/sdist/amici/custom_commands.py b/python/sdist/amici/custom_commands.py deleted file mode 120000 index 5310358842..0000000000 --- a/python/sdist/amici/custom_commands.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/custom_commands.py \ No newline at end of file diff --git a/python/sdist/amici/custom_commands.py b/python/sdist/amici/custom_commands.py new file mode 100644 index 0000000000..d4c25a348a --- /dev/null +++ b/python/sdist/amici/custom_commands.py @@ -0,0 +1,328 @@ +"""Custom setuptools commands for AMICI installation""" + +import glob +import os +import subprocess +import sys +from shutil import copyfile +from typing import Dict, List, Tuple + +from amici.swig import fix_typehints +from amici.setuptools import generate_swig_interface_files +from setuptools.command.build_clib import build_clib +from setuptools.command.build_ext import build_ext +from setuptools.command.develop import develop +from setuptools.command.install import install +from setuptools.command.install_lib import install_lib +from setuptools.command.sdist import sdist + +# typehints +Library = Tuple[str, Dict[str, List[str]]] + + +class AmiciInstall(install): + """Custom install to handle extra arguments""" + + print("running AmiciInstall") + + # Passing --no-clibs allows to install the Python-only part of AMICI + user_options = install.user_options + [ + ('no-clibs', None, "Don't build AMICI C++ extension"), + ] + + def initialize_options(self): + install.initialize_options(self) + self.no_clibs = False + + def finalize_options(self): + if self.no_clibs: + self.no_clibs = True + install.finalize_options(self) + + +def compile_parallel(self, sources, output_dir=None, macros=None, + include_dirs=None, debug=0, extra_preargs=None, + extra_postargs=None, depends=None): + """Parallelized version of distutils.ccompiler.compile""" + + macros, objects, extra_postargs, pp_opts, build = \ + self._setup_compile(output_dir, macros, include_dirs, sources, + depends, extra_postargs) + cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) + + # parallel compilation + num_threads = 1 + if 'AMICI_PARALLEL_COMPILE' in os.environ: + max_threads = int(os.environ['AMICI_PARALLEL_COMPILE']) + num_threads = min(len(objects), max_threads) + num_threads = max(1, num_threads) + + def _single_compile(obj): + try: + src, ext = build[obj] + except KeyError: + return + self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) + + if num_threads > 1: + import multiprocessing.pool + # convert to list, imap is evaluated on-demand + list(multiprocessing.pool.ThreadPool(num_threads).imap( + _single_compile, objects)) + else: + for obj in objects: + _single_compile(obj) + + return objects + + +class AmiciBuildCLib(build_clib): + """Custom build_clib""" + + def run(self): + print("running AmiciBuildCLib") + + # Always force recompilation. The way setuptools/distutils check for + # whether sources require recompilation is not reliable and may lead + # to crashes or wrong results. We rather compile once too often... + self.force = True + + build_clib.run(self) + + def build_libraries(self, libraries: List[Library]): + print("running AmiciBuildCLib.build_libraries") + + no_clibs = 'develop' in self.distribution.command_obj \ + and self.get_finalized_command('develop').no_clibs + no_clibs |= 'install' in self.distribution.command_obj \ + and self.get_finalized_command('install').no_clibs + + if no_clibs: + return + + # Override for parallel compilation + import distutils.ccompiler + distutils.ccompiler.CCompiler.compile = compile_parallel + + # Work-around for compiler-specific build options + set_compiler_specific_library_options( + libraries, self.compiler.compiler_type) + + # Monkey-patch setuptools, to force recompilation of library sources + # --force does not work as expected + + # need full import here, not module-level imported build_clib + import setuptools.command.build_clib + # the patched function may return anything but `([], [])` to trigger + # recompilation + setuptools.command.build_clib.newer_pairwise_group = lambda *_: None + + build_clib.build_libraries(self, libraries) + + +class AmiciDevelop(develop): + """Custom develop to build clibs""" + + # Passing --no-clibs allows to install the Python-only part of AMICI + user_options = develop.user_options + [ + ('no-clibs', None, "Don't build AMICI C++ extension"), + ] + + def initialize_options(self): + develop.initialize_options(self) + self.no_clibs = False + + def finalize_options(self): + if self.no_clibs: + self.no_clibs = True + develop.finalize_options(self) + + def run(self): + print("running AmiciDevelop") + + if not self.no_clibs: + self.get_finalized_command('build_clib').run() + + develop.run(self) + + +class AmiciInstallLib(install_lib): + """Custom install to allow preserving of debug symbols""" + + def run(self): + """strip debug symbols + + Returns: + + """ + print("running AmiciInstallLib") + + if 'ENABLE_AMICI_DEBUGGING' in os.environ \ + and os.environ['ENABLE_AMICI_DEBUGGING'] == 'TRUE' \ + and sys.platform == 'darwin': + search_dir = os.path.join(os.getcwd(), self.build_dir, 'amici') + for file in os.listdir(search_dir): + if file.endswith('.so'): + subprocess.run(['dsymutil', os.path.join(search_dir, file), + '-o', + os.path.join(search_dir, file + '.dSYM')]) + + # Continue with the actual installation + install_lib.run(self) + + +class AmiciBuildExt(build_ext): + """Custom build_ext to allow keeping otherwise temporary static libs""" + + def build_extension(self, ext): + # Work-around for compiler-specific build options + set_compiler_specific_extension_options( + ext, self.compiler.compiler_type) + + build_ext.build_extension(self, ext) + + def run(self): + """Copy the generated clibs to the extensions folder to be included in + the wheel + """ + + print("running AmiciBuildExt") + + no_clibs = 'develop' in self.distribution.command_obj \ + and self.get_finalized_command('develop').no_clibs + no_clibs |= 'install' in self.distribution.command_obj \ + and self.get_finalized_command('install').no_clibs + + if no_clibs: + # Nothing to build + return + + if not self.dry_run and self.distribution.has_c_libraries(): + # get the previously built static libraries + build_clib = self.get_finalized_command('build_clib') + libraries = build_clib.get_library_names() or [] + + # Module build directory where we want to copy the generated + # libs to + if self.inplace == 0: + build_dir = self.build_lib + else: + build_dir = os.getcwd() + target_dir = os.path.join(build_dir, 'amici', 'libs') + self.mkpath(target_dir) + + # Copy the generated libs + for lib in libraries: + libfilenames = glob.glob( + f"{build_clib.build_clib}{os.sep}*{lib}.*") + assert len(libfilenames) == 1, \ + f"Found unexpected number of files: {libfilenames}" + src = libfilenames[0] + dest = os.path.join(target_dir, os.path.basename(src)) + print(f"copying {src} -> {dest}") + copyfile(src, dest) + + swig_outdir = os.path.join(os.path.abspath(build_dir), "amici") + generate_swig_interface_files(swig_outdir=swig_outdir) + swig_py_module_path = os.path.join(swig_outdir, 'amici.py') + print("updating typehints") + fix_typehints(swig_py_module_path, swig_py_module_path) + + # Always force recompilation. The way setuptools/distutils check for + # whether sources require recompilation is not reliable and may lead + # to crashes or wrong results. We rather compile once too often... + self.force = True + + # Continue with the actual extension building + build_ext.run(self) + + +class AmiciSDist(sdist): + """Customized creation of source distribution""" + + def run(self): + """Setuptools entry-point""" + + print("running AmiciSDist") + + save_git_version() + + sdist.run(self) + + +def save_git_version(): + """Create file with extended version string + + This requires git. We assume that whoever creates the sdist will work + inside a valid git repository. + + Returns: + + """ + with open(os.path.join("amici", "git_version.txt"), "w") as f: + try: + cmd = ['git', 'describe', '--abbrev=4', '--dirty=-dirty', + '--always', '--tags'] + subprocess.run(cmd, stdout=f) + except Exception as e: + print(e) + + +def set_compiler_specific_library_options( + libraries: List[Library], + compiler_type: str) -> None: + """Set compiler-specific library options. + + C/C++-libraries for setuptools/distutils are provided as dict containing + entries for 'sources', 'macros', 'cflags', etc. + As we don't know the compiler type at the stage of calling + ``setuptools.setup`` and as there is no other apparent way to set + compiler-specific options, we elsewhere extend the dict with additional + fields ${original_field}_${compiler_class}, and add the additional + compiler-specific options here, at a stage when the compiler has been + determined by distutils. + + Arguments: + libraries: + List of libraries as passed as ``libraries`` argument to + ``setuptools.setup`` and ``setuptools.build_ext.build_extension``. + This is modified in place. + compiler_type: + Compiler type, as defined in + ``distutils.ccompiler.compiler.compiler_class``, (e.g. 'unix', + 'msvc', 'mingw32'). + """ + + for lib in libraries: + for field in ['cflags', 'sources', 'macros']: + try: + lib[1][field] += lib[1][f'{field}_{compiler_type}'] + print(f"Changed {field} for {lib[0]} with {compiler_type} " + f"to {lib[1][field]}") + except KeyError: + # No compiler-specific options set + pass + + +def set_compiler_specific_extension_options( + ext: 'setuptools.Extension', + compiler_type: str) -> None: + """Set compiler-specific extension build options. + + Same game as in ``set_compiler_specific_library_options``, except that + here we look for compiler-specific class attributes. + + Arguments: + ext: setuptools/distutils extension object + compiler_type: Compiler type + """ + for attr in ['extra_compile_args', 'extra_link_args']: + try: + new_value = getattr(ext, attr) + \ + getattr(ext, f'{attr}_{compiler_type}') + setattr(ext, attr, new_value) + print(f"Changed {attr} for {compiler_type} to {new_value}") + except AttributeError: + # No compiler-specific options set + pass + diff --git a/python/sdist/amici/cxxcodeprinter.py b/python/sdist/amici/cxxcodeprinter.py deleted file mode 120000 index 4a7a4c588b..0000000000 --- a/python/sdist/amici/cxxcodeprinter.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/cxxcodeprinter.py \ No newline at end of file diff --git a/python/sdist/amici/cxxcodeprinter.py b/python/sdist/amici/cxxcodeprinter.py new file mode 100644 index 0000000000..15f440a0fb --- /dev/null +++ b/python/sdist/amici/cxxcodeprinter.py @@ -0,0 +1,290 @@ +"""C++ code generation""" +import itertools +import os +import re +from typing import Dict, List, Optional, Tuple + +import sympy as sp +from sympy.printing.cxx import CXX11CodePrinter +from sympy.utilities.iterables import numbered_symbols +from toposort import toposort + + +class AmiciCxxCodePrinter(CXX11CodePrinter): + """C++ code printer""" + + def __init__(self): + super().__init__() + + # extract common subexpressions in matrix functions? + self.extract_cse = (os.getenv("AMICI_EXTRACT_CSE", "0").lower() + in ('1', 'on', 'true')) + + def doprint(self, expr: sp.Expr, assign_to: Optional[str] = None) -> str: + try: + code = super().doprint(expr, assign_to) + code = re.sub(r'(^|\W)M_PI(\W|$)', r'\1amici::pi\2', code) + + return code + except TypeError as e: + raise ValueError( + f'Encountered unsupported function in expression "{expr}"' + ) from e + + def _print_min_max(self, expr, cpp_fun: str, sympy_fun): + # C++ doesn't like mixing int and double for arguments for min/max, + # therefore, we just always convert to float + arg0 = sp.Float(expr.args[0]) if expr.args[0].is_number \ + else expr.args[0] + if len(expr.args) == 1: + return self._print(arg0) + return "%s%s(%s, %s)" % (self._ns, cpp_fun, self._print(arg0), + self._print(sympy_fun(*expr.args[1:]))) + + def _print_Min(self, expr): + from sympy.functions.elementary.miscellaneous import Min + return self._print_min_max(expr, "min", Min) + + def _print_Max(self, expr): + from sympy.functions.elementary.miscellaneous import Max + return self._print_min_max(expr, "max", Max) + + def _get_sym_lines_array( + self, + equations: sp.Matrix, + variable: str, + indent_level: int + ) -> List[str]: + """ + Generate C++ code for assigning symbolic terms in symbols to C++ array + `variable`. + + :param equations: + vectors of symbolic expressions + + :param variable: + name of the C++ array to assign to + + :param indent_level: + indentation level (number of leading blanks) + + :return: + C++ code as list of lines + """ + return [ + ' ' * indent_level + f'{variable}[{index}] = ' + f'{self.doprint(math)};' + for index, math in enumerate(equations) + if math not in [0, 0.0] + ] + + def _get_sym_lines_symbols( + self, symbols: sp.Matrix, + equations: sp.Matrix, + variable: str, + indent_level: int + ) -> List[str]: + """ + Generate C++ code for where array elements are directly replaced with + their corresponding macro symbol + + :param symbols: + vectors of symbols that equations are assigned to + + :param equations: + vectors of expressions + + :param variable: + name of the C++ array to assign to, only used in comments + + :param indent_level: + indentation level (number of leading blanks) + + :return: + C++ code as list of lines + """ + indent = " " * indent_level + + def format_regular_line(symbol, math, index): + return ( + f'{indent}{self.doprint(symbol)} = {self.doprint(math)};' + f' // {variable}[{index}]'.replace('\n', '\n' + indent) + ) + + if self.extract_cse: + # Extract common subexpressions + cse_sym_prefix = "__amici_cse_" + symbol_generator = numbered_symbols( + cls=sp.Symbol, prefix=cse_sym_prefix) + replacements, reduced_exprs = sp.cse( + equations, + symbols=symbol_generator, + order='none', + list=False, + ) + if replacements: + # we need toposort to handle the dependencies of extracted + # subexpressions + expr_dict = dict(itertools.chain(zip(symbols, reduced_exprs), + replacements)) + sorted_symbols = toposort({ + identifier: { + s for s in definition.free_symbols + if s in expr_dict + } + for (identifier, definition) in expr_dict.items() + }) + symbol_to_idx = {sym: idx for idx, sym in enumerate(symbols)} + + def format_line(symbol: sp.Symbol): + math = expr_dict[symbol] + if str(symbol).startswith(cse_sym_prefix): + return f'{indent}const realtype ' \ + f'{self.doprint(symbol)} ' \ + f'= {self.doprint(math)};' + elif math not in [0, 0.0]: + return format_regular_line( + symbol, math, symbol_to_idx[symbol]) + return [ + line + for symbol_group in sorted_symbols + for symbol in sorted(symbol_group, key=str) + if (line := format_line(symbol)) + ] + + return [ + format_regular_line(sym, math, index) + for index, (sym, math) in enumerate(zip(symbols, equations)) + if math not in [0, 0.0] + ] + + def csc_matrix( + self, + matrix: sp.Matrix, + rownames: List[sp.Symbol], + colnames: List[sp.Symbol], + identifier: Optional[int] = 0, + pattern_only: Optional[bool] = False + ) -> Tuple[ + List[int], List[int], sp.Matrix, List[str], sp.Matrix + ]: + """ + Generates the sparse symbolic identifiers, symbolic identifiers, + sparse matrix, column pointers and row values for a symbolic + variable + + :param matrix: + dense matrix to be sparsified + + :param rownames: + ids of the variable of which the derivative is computed (assuming + matrix is the jacobian) + + :param colnames: + ids of the variable with respect to which the derivative is computed + (assuming matrix is the jacobian) + + :param identifier: + additional identifier that gets appended to symbol names to + ensure their uniqueness in outer loops + + :param pattern_only: + flag for computing sparsity pattern without whole matrix + + :return: + symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, + sparse_matrix + """ + idx = 0 + + nrows, ncols = matrix.shape + + if not pattern_only: + sparse_matrix = sp.zeros(nrows, ncols) + symbol_list = [] + sparse_list = [] + symbol_col_ptrs = [] + symbol_row_vals = [] + + for col in range(ncols): + symbol_col_ptrs.append(idx) + for row in range(nrows): + if matrix[row, col] == 0: + continue + + symbol_row_vals.append(row) + idx += 1 + symbol_name = f'd{self.doprint(rownames[row])}' \ + f'_d{self.doprint(colnames[col])}' + if identifier: + symbol_name += f'_{identifier}' + symbol_list.append(symbol_name) + if pattern_only: + continue + + sparse_matrix[row, col] = sp.Symbol(symbol_name, real=True) + sparse_list.append(matrix[row, col]) + + if idx == 0: + symbol_col_ptrs = [] # avoid bad memory access for empty matrices + else: + symbol_col_ptrs.append(idx) + + if pattern_only: + sparse_matrix = None + else: + sparse_list = sp.Matrix(sparse_list) + + return symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, \ + sparse_matrix + + @staticmethod + def print_bool(expr) -> str: + """Print the boolean value of the given expression""" + return "true" if bool(expr) else "false" + + +def get_switch_statement(condition: str, cases: Dict[int, List[str]], + indentation_level: Optional[int] = 0, + indentation_step: Optional[str] = ' ' * 4): + """ + Generate code for switch statement + + :param condition: + Condition for switch + + :param cases: + Cases as dict with expressions as keys and statement as + list of strings + + :param indentation_level: + indentation level + + :param indentation_step: + indentation whitespace per level + + :return: + Code for switch expression as list of strings + + """ + lines = [] + + if not cases: + return lines + + indent0 = indentation_level * indentation_step + indent1 = (indentation_level + 1) * indentation_step + indent2 = (indentation_level + 2) * indentation_step + for expression, statements in cases.items(): + if statements: + lines.extend([ + f'{indent1}case {expression}:', + *(f"{indent2}{statement}" for statement in statements), + f'{indent2}break;' + ]) + + if lines: + lines.insert(0, f'{indent0}switch({condition}) {{') + lines.append(indent0 + '}') + + return lines diff --git a/python/sdist/amici/gradient_check.py b/python/sdist/amici/gradient_check.py deleted file mode 120000 index 3402ef0822..0000000000 --- a/python/sdist/amici/gradient_check.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/gradient_check.py \ No newline at end of file diff --git a/python/sdist/amici/gradient_check.py b/python/sdist/amici/gradient_check.py new file mode 100644 index 0000000000..76a17817c2 --- /dev/null +++ b/python/sdist/amici/gradient_check.py @@ -0,0 +1,307 @@ +""" +Finite Difference Check +----------------------- +This module provides functions to automatically check correctness of amici +computed sensitivities using finite difference approximations +""" + +from . import ( + runAmiciSimulation, SensitivityOrder, AMICI_SUCCESS, SensitivityMethod, + Model, Solver, ExpData, ReturnData, ParameterScaling) +import numpy as np +import copy + +from typing import Callable, Optional, List, Sequence + + +def check_finite_difference( + x0: Sequence[float], + model: Model, + solver: Solver, + edata: ExpData, + ip: int, + fields: List[str], + atol: Optional[float] = 1e-4, + rtol: Optional[float] = 1e-4, + epsilon: Optional[float] = 1e-3 +) -> None: + """ + Checks the computed sensitivity based derivatives against a finite + difference approximation. + + :param x0: + parameter value at which to check finite difference approximation + + :param model: + amici model + + :param solver: + amici solver + + :param edata: + exp data + + :param ip: + parameter index + + :param fields: + rdata fields for which to check the gradient + + :param atol: + absolute tolerance for comparison + + :param rtol: + relative tolerance for comparison + + :param epsilon: + finite difference step-size + + """ + og_sensitivity_order = solver.getSensitivityOrder() + og_parameters = model.getParameters() + og_plist = model.getParameterList() + if edata: + og_eplist = edata.plist + + # sensitivity + p = copy.deepcopy(x0) + plist = [ip] + + model.setParameters(p) + model.setParameterList(plist) + if edata: + edata.plist = plist + + # simulation with gradient + if int(og_sensitivity_order) < int(SensitivityOrder.first): + solver.setSensitivityOrder(SensitivityOrder.first) + rdata = runAmiciSimulation(model, solver, edata) + if rdata['status'] != AMICI_SUCCESS: + raise AssertionError(f"Simulation failed (status {rdata['status']}") + + # finite difference + solver.setSensitivityOrder(SensitivityOrder.none) + + pf = copy.deepcopy(x0) + pb = copy.deepcopy(x0) + pscale = model.getParameterScale()[ip] + if x0[ip] == 0 or pscale != int(ParameterScaling.none): + pf[ip] += epsilon / 2 + pb[ip] -= epsilon / 2 + else: + pf[ip] *= 1 + epsilon / 2 + pb[ip] /= 1 + epsilon / 2 + + # forward: + model.setParameters(pf) + rdataf = runAmiciSimulation(model, solver, edata) + if rdataf['status'] != AMICI_SUCCESS: + raise AssertionError(f"Simulation failed (status {rdataf['status']}") + + # backward: + model.setParameters(pb) + rdatab = runAmiciSimulation(model, solver, edata) + if rdatab['status'] != AMICI_SUCCESS: + raise AssertionError(f"Simulation failed (status {rdatab['status']}") + + for field in fields: + sensi_raw = rdata[f's{field}'] + fd = (rdataf[field] - rdatab[field]) / (pf[ip] - pb[ip]) + if len(sensi_raw.shape) == 1: + sensi = sensi_raw[0] + elif len(sensi_raw.shape) == 2: + sensi = sensi_raw[:, 0] + elif len(sensi_raw.shape) == 3: + sensi = sensi_raw[:, 0, :] + else: + raise NotImplementedError() + + _check_close(sensi, fd, atol=atol, rtol=rtol, field=field, ip=ip) + + solver.setSensitivityOrder(og_sensitivity_order) + model.setParameters(og_parameters) + model.setParameterList(og_plist) + if edata: + edata.plist = og_eplist + + +def check_derivatives( + model: Model, + solver: Solver, + edata: Optional[ExpData] = None, + atol: Optional[float] = 1e-4, + rtol: Optional[float] = 1e-4, + epsilon: Optional[float] = 1e-3, + check_least_squares: bool = True, + skip_zero_pars: bool = False +) -> None: + """ + Finite differences check for likelihood gradient. + + :param model: + amici model + + :param solver: + amici solver + + :param edata: + exp data + + :param atol: + absolute tolerance for comparison + + :param rtol: + relative tolerance for comparison + + :param epsilon: + finite difference step-size + + :param check_least_squares: + whether to check least squares related values. + + :param skip_zero_pars: + whether to perform FD checks for parameters that are zero + + """ + p = np.array(model.getParameters()) + + og_sens_order = solver.getSensitivityOrder() + + if int(og_sens_order) < int(SensitivityOrder.first): + solver.setSensitivityOrder(SensitivityOrder.first) + rdata = runAmiciSimulation(model, solver, edata) + solver.setSensitivityOrder(og_sens_order) + + if rdata['status'] != AMICI_SUCCESS: + raise AssertionError(f"Simulation failed (status {rdata['status']}") + + fields = [] + + if solver.getSensitivityMethod() == SensitivityMethod.forward and \ + solver.getSensitivityOrder() <= SensitivityOrder.first: + fields.append('x') + + leastsquares_applicable = \ + solver.getSensitivityMethod() == SensitivityMethod.forward \ + and edata is not None + + if 'ssigmay' in rdata.keys() \ + and rdata['ssigmay'] is not None \ + and rdata['ssigmay'].any() and not model.getAddSigmaResiduals(): + leastsquares_applicable = False + + if check_least_squares and leastsquares_applicable: + fields += ['res', 'y'] + + _check_results(rdata, 'FIM', np.dot(rdata['sres'].T, rdata['sres']), + atol=1e-8, rtol=1e-4) + _check_results(rdata, 'sllh', -np.dot(rdata['res'].T, rdata['sres']), + atol=1e-8, rtol=1e-4) + + if edata is not None: + fields.append('llh') + + for ip, pval in enumerate(p): + if pval == 0.0 and skip_zero_pars: + continue + check_finite_difference(p, model, solver, edata, ip, fields, + atol=atol, rtol=rtol, epsilon=epsilon) + + +def _check_close( + result: np.array, + expected: np.array, + atol: float, + rtol: float, + field: str, + ip: Optional[int] = None, + verbose: Optional[bool] = True, +) -> None: + """ + Compares computed values against expected values and provides rich + output information. + + :param result: + computed values + + :param expected: + expected values + + :param field: + rdata field for which the gradient is checked, only for error reporting + + :param atol: + absolute tolerance for comparison + + :param rtol: + relative tolerance for comparison + + :param ip: + parameter index, for more informative output + + :param verbose: + produce a more verbose error message in case of unmatched expectations + """ + close = np.isclose(result, expected, atol=atol, rtol=rtol, equal_nan=True) + if close.all(): + return + + if ip is None: + index_str = '' + check_type = 'Regression check' + else: + index_str = f'at index ip={ip} ' + check_type = 'FD check' + + lines = [f'{check_type} failed for {field} {index_str}for ' + f'{close.size - close.sum()} indices:'] + if verbose: + for idx in np.argwhere(~close): + idx = tuple(idx) + if result.shape: + rr = result[idx] + else: + rr = result + lines.append( + f"\tat {idx}: Expected {expected[idx]}, got {rr}") + adev = np.abs(result - expected) + rdev = np.abs((result - expected) / (expected + atol)) + lines.append(f'max(adev): {adev.max()}, max(rdev): {rdev.max()}') + + raise AssertionError("\n".join(lines)) + + +def _check_results( + rdata: ReturnData, + field: str, + expected: np.array, + atol: float, + rtol: float + ) -> None: + """ + Checks whether rdata[field] agrees with expected according to provided + tolerances. + + :param rdata: + simulation results as returned by + :meth:`amici.amici.runAmiciSimulation` + + :param field: + name of the field to check + + :param expected: + expected values + + :param atol: + absolute tolerance for comparison + + :param rtol: + relative tolerance for comparison + """ + + result = rdata[field] + if type(result) is float: + result = np.array(result) + + _check_close(result=result, expected=expected, + atol=atol, rtol=rtol, field=field) diff --git a/python/sdist/amici/import_utils.py b/python/sdist/amici/import_utils.py deleted file mode 120000 index ca30c75bf9..0000000000 --- a/python/sdist/amici/import_utils.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/import_utils.py \ No newline at end of file diff --git a/python/sdist/amici/import_utils.py b/python/sdist/amici/import_utils.py new file mode 100644 index 0000000000..65d7285a65 --- /dev/null +++ b/python/sdist/amici/import_utils.py @@ -0,0 +1,682 @@ +"""Miscellaneous functions related to model import, independent of any specific + model format""" +import enum +import itertools as itt +import numbers +import sys +from typing import (Any, Callable, Dict, Iterable, Optional, Sequence, + SupportsFloat, Tuple, Union) + +import sympy as sp +from sympy.functions.elementary.piecewise import ExprCondPair +from sympy.logic.boolalg import BooleanAtom +from toposort import toposort + +RESERVED_SYMBOLS = ['x', 'k', 'p', 'y', 'w', 'h', 't', 'AMICI_EMPTY_BOLUS'] + +try: + import pysb +except ImportError: + pysb = None + +SymbolDef = Dict[sp.Symbol, Union[Dict[str, sp.Expr], sp.Expr]] + + +# Monkey-patch toposort CircularDependencyError to handle non-sortable objects, +# such as sympy objects +class CircularDependencyError(ValueError): + def __init__(self, data): + # Sort the data just to make the output consistent, for use in + # error messages. That's convenient for doctests. + s = "Circular dependencies exist among these items: {{{}}}".format( + ", ".join( + "{!r}:{!r}".format(key, value) for key, value in sorted( + {str(k): v for k, v in data.items()}.items()) + ) + ) + super(CircularDependencyError, self).__init__(s) + self.data = data + + +setattr(sys.modules["toposort"], "CircularDependencyError", + CircularDependencyError) + + +class ObservableTransformation(str, enum.Enum): + """ + Different modes of observable transformation. + """ + LOG10 = 'log10' + LOG = 'log' + LIN = 'lin' + + +def noise_distribution_to_observable_transformation( + noise_distribution: Union[str, Callable] +) -> ObservableTransformation: + """ + Parse noise distribution string and extract observable transformation + + :param noise_distribution: + see :func:`noise_distribution_to_cost_function` + + :return: + observable transformation + """ + if isinstance(noise_distribution, str): + if noise_distribution.startswith('log-'): + return ObservableTransformation.LOG + if noise_distribution.startswith('log10-'): + return ObservableTransformation.LOG10 + + return ObservableTransformation.LIN + + +def noise_distribution_to_cost_function( + noise_distribution: Union[str, Callable] +) -> Callable[[str], str]: + """ + Parse noise distribution string to a cost function definition amici can + work with. + + The noise distributions listed in the following are supported. :math:`m` + denotes the measurement, :math:`y` the simulation, and :math:`\\sigma` a + distribution scale parameter + (currently, AMICI only supports a single distribution parameter). + + - `'normal'`, `'lin-normal'`: A normal distribution: + + .. math:: + \\pi(m|y,\\sigma) = \\frac{1}{\\sqrt{2\\pi}\\sigma}\\ + exp\\left(-\\frac{(m-y)^2}{2\\sigma^2}\\right) + + - `'log-normal'`: A log-normal distribution (i.e. log(m) is + normally distributed): + + .. math:: + \\pi(m|y,\\sigma) = \\frac{1}{\\sqrt{2\\pi}\\sigma m}\\ + exp\\left(-\\frac{(\\log m - \\log y)^2}{2\\sigma^2}\\right) + + - `'log10-normal'`: A log10-normal distribution (i.e. log10(m) is + normally distributed): + + .. math:: + \\pi(m|y,\\sigma) = \\frac{1}{\\sqrt{2\\pi}\\sigma m \\log(10)}\\ + exp\\left(-\\frac{(\\log_{10} m - \\log_{10} y)^2}{2\\sigma^2}\\right) + + - `'laplace'`, `'lin-laplace'`: A laplace distribution: + + .. math:: + \\pi(m|y,\\sigma) = \\frac{1}{2\\sigma} + \\exp\\left(-\\frac{|m-y|}{\\sigma}\\right) + + - `'log-laplace'`: A log-Laplace distribution (i.e. log(m) is Laplace + distributed): + + .. math:: + \\pi(m|y,\\sigma) = \\frac{1}{2\\sigma m} + \\exp\\left(-\\frac{|\\log m - \\log y|}{\\sigma}\\right) + + - `'log10-laplace'`: A log10-Laplace distribution (i.e. log10(m) is + Laplace distributed): + + .. math:: + \\pi(m|y,\\sigma) = \\frac{1}{2\\sigma m \\log(10)} + \\exp\\left(-\\frac{|\\log_{10} m - \\log_{10} y|}{\\sigma}\\right) + + - `'binomial'`, `'lin-binomial'`: A (continuation of a discrete) binomial + distribution, parameterized via the success probability + :math:`p=\\sigma`: + + .. math:: + \\pi(m|y,\\sigma) = \\operatorname{Heaviside}(y-m) \\cdot + \\frac{\\Gamma(y+1)}{\\Gamma(m+1) \\Gamma(y-m+1)} + \\sigma^m (1-\\sigma)^{(y-m)} + + - `'negative-binomial'`, `'lin-negative-binomial'`: A (continuation of a + discrete) negative binomial distribution, with with `mean = y`, + parameterized via success probability `p`: + + .. math:: + + \\pi(m|y,\\sigma) = \\frac{\\Gamma(m+r)}{\\Gamma(m+1) \\Gamma(r)} + (1-\\sigma)^m \\sigma^r + + where + + .. math:: + r = \\frac{1-\\sigma}{\\sigma} y + + The distributions above are for a single data point. + For a collection :math:`D=\\{m_i\\}_i` of data points and corresponding + simulations :math:`Y=\\{y_i\\}_i` and noise parameters + :math:`\\Sigma=\\{\\sigma_i\\}_i`, AMICI assumes independence, + i.e. the full distributions is + + .. math:: + \\pi(D|Y,\\Sigma) = \\prod_i\\pi(m_i|y_i,\\sigma_i) + + AMICI uses the logarithm :math:`\\log(\\pi(m|y,\\sigma)`. + + In addition to the above mentioned distributions, it is also possible to + pass a function taking a symbol string and returning a log-distribution + string with variables '{str_symbol}', 'm{str_symbol}', 'sigma{str_symbol}' + for y, m, sigma, respectively. + + :param noise_distribution: An identifier specifying a noise model. + Possible values are + + {`'normal'`, `'lin-normal'`, `'log-normal'`, `'log10-normal'`, + `'laplace'`, `'lin-laplace'`, `'log-laplace'`, `'log10-laplace'`, + `'binomial'`, `'lin-binomial'`, `'negative-binomial'`, + `'lin-negative-binomial'`, ``} + + For the meaning of the values see above. + + :return: A function that takes a strSymbol and then creates a cost + function string (negative log-likelihood) from it, which can be + sympified. + """ + + if isinstance(noise_distribution, Callable): + return noise_distribution + + if noise_distribution in ['normal', 'lin-normal']: + y_string = '0.5*log(2*pi*{sigma}**2) + 0.5*(({y} - {m}) / {sigma})**2' + elif noise_distribution == 'log-normal': + y_string = '0.5*log(2*pi*{sigma}**2*{m}**2) ' \ + '+ 0.5*((log({y}) - log({m})) / {sigma})**2' + elif noise_distribution == 'log10-normal': + y_string = '0.5*log(2*pi*{sigma}**2*{m}**2*log(10)**2) ' \ + '+ 0.5*((log({y}, 10) - log({m}, 10)) / {sigma})**2' + elif noise_distribution in ['laplace', 'lin-laplace']: + y_string = 'log(2*{sigma}) + Abs({y} - {m}) / {sigma}' + elif noise_distribution == 'log-laplace': + y_string = 'log(2*{sigma}*{m}) + Abs(log({y}) - log({m})) / {sigma}' + elif noise_distribution == 'log10-laplace': + y_string = 'log(2*{sigma}*{m}*log(10)) ' \ + '+ Abs(log({y}, 10) - log({m}, 10)) / {sigma}' + elif noise_distribution in ['binomial', 'lin-binomial']: + # Binomial noise model parameterized via success probability p + y_string = '- log(Heaviside({y} - {m})) - loggamma({y}+1) ' \ + '+ loggamma({m}+1) + loggamma({y}-{m}+1) ' \ + '- {m} * log({sigma}) - ({y} - {m}) * log(1-{sigma})' + elif noise_distribution in ['negative-binomial', 'lin-negative-binomial']: + # Negative binomial noise model of the number of successes m + # (data) before r=(1-sigma)/sigma * y failures occur, + # with mean number of successes y (simulation), + # parameterized via success probability p = sigma. + r = '{y} * (1-{sigma}) / {sigma}' + y_string = f'- loggamma({{m}}+{r}) + loggamma({{m}}+1) ' \ + f'+ loggamma({r}) - {r} * log(1-{{sigma}}) ' \ + f'- {{m}} * log({{sigma}})' + else: + raise ValueError( + f"Cost identifier {noise_distribution} not recognized.") + + def nllh_y_string(str_symbol): + y, m, sigma = _get_str_symbol_identifiers(str_symbol) + return y_string.format(y=y, m=m, sigma=sigma) + + return nllh_y_string + + +def _get_str_symbol_identifiers(str_symbol: str) -> tuple: + """Get identifiers for simulation, measurement, and sigma.""" + y, m, sigma = f"{str_symbol}", f"m{str_symbol}", f"sigma{str_symbol}" + return y, m, sigma + + +def smart_subs_dict(sym: sp.Expr, + subs: SymbolDef, + field: Optional[str] = None, + reverse: bool = True) -> sp.Expr: + """ + Substitutes expressions completely flattening them out. Requires + sorting of expressions with toposort. + + :param sym: + Symbolic expression in which expressions will be substituted + + :param subs: + Substitutions + + :param field: + Field of substitution expressions in subs.values(), if applicable + + :param reverse: + Whether ordering in subs should be reversed. Note that substitution + requires the reverse order of what is required for evaluation. + + :return: + Substituted symbolic expression + """ + s = [ + (eid, expr[field] if field is not None else expr) + for eid, expr in subs.items() + ] + if reverse: + s.reverse() + for substitution in s: + # note that substitution may change free symbols, so we have to do + # this recursively + if sym.has(substitution[0]): + sym = sym.subs(*substitution) + return sym + + +def smart_subs(element: sp.Expr, old: sp.Symbol, new: sp.Expr) -> sp.Expr: + """ + Optimized substitution that checks whether anything needs to be done first + + :param element: + substitution target + + :param old: + to be substituted + + :param new: + subsitution value + + :return: + substituted expression + """ + return element.subs(old, new) if element.has(old) else element + + +def toposort_symbols(symbols: SymbolDef, + field: Optional[str] = None) -> SymbolDef: + """ + Topologically sort symbol definitions according to their interdependency + + :param symbols: + symbol definitions + + :param field: + field of definition.values() that is used to compute interdependency + + :return: + ordered symbol definitions + """ + sorted_symbols = toposort({ + identifier: { + s for s in ( + definition[field] if field is not None else definition + ).free_symbols + if s in symbols + } + for identifier, definition + in symbols.items() + }) + return { + s: symbols[s] + for symbol_group in sorted_symbols + for s in sorted(symbol_group, key=str) + } + + +def _parse_special_functions(sym: sp.Expr, toplevel: bool = True) -> sp.Expr: + """ + Recursively checks the symbolic expression for functions which have be + to parsed in a special way, such as piecewise functions + + :param sym: + symbolic expressions + + :param toplevel: + as this is called recursively, are we in the top level expression? + """ + args = tuple(arg if arg.__class__.__name__ == 'piecewise' + and sym.__class__.__name__ == 'piecewise' + else _parse_special_functions(arg, False) + for arg in sym.args) + + fun_mappings = { + 'times': sp.Mul, + 'xor': sp.Xor, + 'abs': sp.Abs, + 'min': sp.Min, + 'max': sp.Max, + 'ceil': sp.functions.ceiling, + 'floor': sp.functions.floor, + 'factorial': sp.functions.factorial, + 'arcsin': sp.functions.asin, + 'arccos': sp.functions.acos, + 'arctan': sp.functions.atan, + 'arccot': sp.functions.acot, + 'arcsec': sp.functions.asec, + 'arccsc': sp.functions.acsc, + 'arcsinh': sp.functions.asinh, + 'arccosh': sp.functions.acosh, + 'arctanh': sp.functions.atanh, + 'arccoth': sp.functions.acoth, + 'arcsech': sp.functions.asech, + 'arccsch': sp.functions.acsch, + } + + if sym.__class__.__name__ in fun_mappings: + return fun_mappings[sym.__class__.__name__](*args) + + elif sym.__class__.__name__ == 'piecewise' \ + or isinstance(sym, sp.Piecewise): + if isinstance(sym, sp.Piecewise): + # this is sympy piecewise, can't be nested + denested_args = args + else: + # this is sbml piecewise, can be nested + denested_args = _denest_piecewise(args) + return _parse_piecewise_to_heaviside(denested_args) + + if sym.__class__.__name__ == 'plus' and not sym.args: + return sp.Float(0.0) + + if isinstance(sym, (sp.Function, sp.Mul, sp.Add, sp.Pow)): + sym._args = args + + elif toplevel and isinstance(sym, BooleanAtom): + # Replace boolean constants by numbers so they can be differentiated + # must not replace in Piecewise function. Therefore, we only replace + # it the complete expression consists only of a Boolean value. + sym = sp.Float(int(bool(sym))) + + return sym + + +def _denest_piecewise( + args: Sequence[Union[sp.Expr, sp.logic.boolalg.Boolean, bool]] +) -> Tuple[Union[sp.Expr, sp.logic.boolalg.Boolean, bool]]: + """ + Denest piecewise functions that contain piecewise as condition + + :param args: + Arguments to the piecewise function + + :return: + Arguments where conditions no longer contain piecewise functions and + the conditional dependency is flattened out + """ + args_out = [] + for coeff, cond in grouper(args, 2, True): + # handling of this case is explicitely disabled in + # _parse_special_functions as keeping track of coeff/cond + # arguments is tricky. Simpler to just parse them out here + if coeff.__class__.__name__ == 'piecewise': + coeff = _parse_special_functions(coeff, False) + + # we can have conditions that are piecewise function + # returning True or False + if cond.__class__.__name__ == 'piecewise': + # this keeps track of conditional that the previous + # piece was picked + previous_was_picked = sp.false + # recursively denest those first + for sub_coeff, sub_cond in grouper( + _denest_piecewise(cond.args), 2, True + ): + # flatten the individual pieces + pick_this = sp.And( + sp.Not(previous_was_picked), sub_cond + ) + if sub_coeff == sp.true: + args_out.extend([coeff, pick_this]) + previous_was_picked = pick_this + + else: + args_out.extend([coeff, cond]) + # cut off last condition as that's the default + return tuple(args_out[:-1]) + + +def _parse_piecewise_to_heaviside(args: Iterable[sp.Expr]) -> sp.Expr: + """ + Piecewise functions cannot be transformed into C++ right away, but AMICI + has a special interface for Heaviside functions, so we transform them. + + :param args: + symbolic expressions for arguments of the piecewise function + """ + # how many condition-expression pairs will we have? + formula = sp.Float(0.0) + not_condition = sp.Float(1.0) + + if all(isinstance(arg, ExprCondPair) for arg in args): + # sympy piecewise + grouped_args = args + else: + # smbl piecewise + grouped_args = grouper(args, 2, True) + + for coeff, trigger in grouped_args: + if isinstance(coeff, BooleanAtom): + coeff = sp.Float(int(bool(coeff))) + + if trigger == sp.true: + return formula + coeff * not_condition + + if trigger == sp.false: + continue + + tmp = _parse_heaviside_trigger(trigger) + formula += coeff * sp.simplify(not_condition * tmp) + not_condition *= (1-tmp) + + return formula + + +def _parse_heaviside_trigger(trigger: sp.Expr) -> sp.Expr: + """ + Recursively translates a boolean trigger function into a real valued + root function + + :param trigger: + :return: real valued root function expression + """ + if trigger.is_Relational: + root = trigger.args[0] - trigger.args[1] + _check_unsupported_functions(root, 'sympy.Expression') + + # normalize such that we always implement <, + # this ensures that we can correctly evaluate the condition if + # simulation starts at H(0). This is achieved by translating + # conditionals into Heaviside functions H that is implemented as unit + # step with H(0) = 1 + if isinstance(trigger, sp.core.relational.StrictLessThan): + # x < y => x - y < 0 => r < 0 + return 1 - sp.Heaviside(root) + if isinstance(trigger, sp.core.relational.LessThan): + # x <= y => not(y < x) => not(y - x < 0) => not -r < 0 + return sp.Heaviside(-root) + if isinstance(trigger, sp.core.relational.StrictGreaterThan): + # y > x => y - x < 0 => -r < 0 + return 1 - sp.Heaviside(-root) + if isinstance(trigger, sp.core.relational.GreaterThan): + # y >= x => not(x < y) => not(x - y < 0) => not r < 0 + return sp.Heaviside(root) + + # or(x,y) = not(and(not(x),not(y)) + if isinstance(trigger, sp.Or): + return 1-sp.Mul(*[1-_parse_heaviside_trigger(arg) + for arg in trigger.args]) + + if isinstance(trigger, sp.And): + return sp.Mul(*[_parse_heaviside_trigger(arg) + for arg in trigger.args]) + + raise RuntimeError( + 'AMICI can not parse piecewise/event trigger functions with argument ' + f'{trigger}.' + ) + + +def grouper(iterable: Iterable, n: int, + fillvalue: Any = None) -> Iterable[Tuple[Any]]: + """ + Collect data into fixed-length chunks or blocks + + grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" + + :param iterable: + any iterable + + :param n: + chunk length + + :param fillvalue: + padding for last chunk if length < n + + :return: itertools.zip_longest of requested chunks + """ + args = [iter(iterable)] * n + return itt.zip_longest(*args, fillvalue=fillvalue) + + +def _check_unsupported_functions(sym: sp.Expr, + expression_type: str, + full_sym: Optional[sp.Expr] = None): + """ + Recursively checks the symbolic expression for unsupported symbolic + functions + + :param sym: + symbolic expressions + + :param expression_type: + type of expression, only used when throwing errors + + :param full sym: + outermost symbolic expression in recursive checks, only used for errors + """ + if full_sym is None: + full_sym = sym + + # note that sp.functions.factorial, sp.functions.ceiling, + # sp.functions.floor applied to numbers should be simplified out and + # thus pass this test + unsupported_functions = ( + sp.functions.factorial, sp.functions.ceiling, sp.functions.floor, + sp.functions.sec, sp.functions.csc, sp.functions.cot, + sp.functions.asec, sp.functions.acsc, sp.functions.acot, + sp.functions.acsch, sp.functions.acoth, + sp.Mod, sp.core.function.UndefinedFunction + ) + + if isinstance(sym.func, unsupported_functions) \ + or isinstance(sym, unsupported_functions): + raise RuntimeError(f'Encountered unsupported expression ' + f'"{sym.func}" of type ' + f'"{type(sym.func)}" as part of a ' + f'{expression_type}: "{full_sym}"!') + for arg in list(sym.args): + _check_unsupported_functions(arg, expression_type) + + +def cast_to_sym(value: Union[SupportsFloat, sp.Expr, BooleanAtom], + input_name: str) -> sp.Expr: + """ + Typecasts the value to :py:class:`sympy.Float` if possible, and ensures the + value is a symbolic expression. + + :param value: + value to be cast + + :param input_name: + name of input variable + + :return: + typecast value + """ + if isinstance(value, (sp.RealNumber, numbers.Number)): + value = sp.Float(float(value)) + elif isinstance(value, BooleanAtom): + value = sp.Float(float(bool(value))) + + if not isinstance(value, sp.Expr): + raise TypeError(f"Couldn't cast {input_name} to sympy.Expr, was " + f"{type(value)}") + + return value + + +def generate_measurement_symbol(observable_id: Union[str, sp.Symbol]): + """ + Generates the appropriate measurement symbol for the provided observable + + :param observable_id: + symbol (or string representation) of the observable + + :return: + symbol for the corresponding measurement + """ + if not isinstance(observable_id, str): + observable_id = strip_pysb(observable_id) + return symbol_with_assumptions(f'm{observable_id}') + + +def generate_regularization_symbol(observable_id: Union[str, sp.Symbol]): + """ + Generates the appropriate regularization symbol for the provided observable + + :param observable_id: + symbol (or string representation) of the observable + + :return: + symbol for the corresponding regularization + """ + if not isinstance(observable_id, str): + observable_id = strip_pysb(observable_id) + return symbol_with_assumptions(f'r{observable_id}') + + +def generate_flux_symbol( + reaction_index: int, + name: Optional[str] = None +) -> sp.Symbol: + """ + Generate identifier symbol for a reaction flux. + This function will always return the same unique python object for a + given entity. + + :param reaction_index: + index of the reaction to which the flux corresponds + :param name: + an optional identifier of the reaction to which the flux corresponds + :return: + identifier symbol + """ + if name is not None: + return symbol_with_assumptions(name) + + return symbol_with_assumptions(f'flux_r{reaction_index}') + + +def symbol_with_assumptions(name: str): + """ + Central function to create symbols with consistent, canonical assumptions + + :param name: + name of the symbol + + :return: + symbol with canonical assumptions + """ + return sp.Symbol(name, real=True) + + +def strip_pysb(symbol: sp.Basic) -> sp.Basic: + """ + Strips pysb info from a :class:`pysb.Component` object + + :param symbol: + symbolic expression + + :return: + stripped expression + """ + # strip pysb type and transform into a flat sympy.Symbol. + # this ensures that the pysb type specific __repr__ is used when converting + # to string + if pysb and isinstance(symbol, pysb.Component): + return sp.Symbol(symbol.name, real=True) + else: + # in this case we will use sympy specific transform anyways + return symbol diff --git a/python/sdist/amici/logging.py b/python/sdist/amici/logging.py deleted file mode 120000 index b8d5e1b32e..0000000000 --- a/python/sdist/amici/logging.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/logging.py \ No newline at end of file diff --git a/python/sdist/amici/logging.py b/python/sdist/amici/logging.py new file mode 100644 index 0000000000..eae753b29e --- /dev/null +++ b/python/sdist/amici/logging.py @@ -0,0 +1,202 @@ +""" +Logging +------- +This module provides custom logging functionality for other amici modules +""" + +import logging +import platform +import socket +import amici +import os +import warnings +import time +import functools + +from inspect import getouterframes, currentframe + +LOG_LEVEL_ENV_VAR = 'AMICI_LOG' +BASE_LOGGER_NAME = 'amici' +# Supported values for LOG_LEVEL_ENV_VAR +NAMED_LOG_LEVELS = {'NOTSET': logging.NOTSET, + 'DEBUG': logging.DEBUG, + 'INFO': logging.INFO, + 'WARNING': logging.WARNING, + 'ERROR': logging.ERROR, + 'CRITICAL': logging.CRITICAL} + +from typing import Optional, Callable, Union + + +def _setup_logger(level: Optional[int] = logging.WARNING, + console_output: Optional[bool] = True, + file_output: Optional[bool] = False, + capture_warnings: Optional[bool] = True) -> logging.Logger: + """ + Set up a new logging.Logger for AMICI logging + + :param level: + Logging level, typically using a constant like logging.INFO or + logging.DEBUG + + :param console_output: + Set up a default console log handler if True (default) + + :param file_output: + Supply a filename to copy all log output to that file, or + set to False to disable (default) + + :param capture_warnings: + Capture warnings from Python's warnings module if True (default) + + :return: + A :class:`logging.Logger` object for AMICI logging. Note that other + AMICI modules + should use a logger specific to their namespace instead by calling + :func:`get_logger`. + """ + log = logging.getLogger(BASE_LOGGER_NAME) + + # Logging level can be overridden with environment variable + if LOG_LEVEL_ENV_VAR in os.environ: + try: + level = int(os.environ[LOG_LEVEL_ENV_VAR]) + except ValueError: + # Try parsing as a name + level_name = os.environ[LOG_LEVEL_ENV_VAR] + if level_name in NAMED_LOG_LEVELS.keys(): + level = NAMED_LOG_LEVELS[level_name] + else: + raise ValueError(f'Environment variable {LOG_LEVEL_ENV_VAR} ' + f'contains an invalid value "{level_name}".' + f' If set, its value must be one of ' + f'{", ".join(NAMED_LOG_LEVELS.keys())}' + f' (case-sensitive) or an integer log level.') + + log.setLevel(level) + + # Remove default logging handler + log.handlers = [] + + log_fmt = logging.Formatter('%(asctime)s.%(msecs).3d - %(name)s - ' + '%(levelname)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S') + + if console_output: + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(log_fmt) + log.addHandler(stream_handler) + + if file_output: + file_handler = logging.FileHandler(file_output) + file_handler.setFormatter(log_fmt) + log.addHandler(file_handler) + + log.info('Logging started on AMICI version %s', amici.__version__) + + log.debug('OS Platform: %s', platform.platform()) + log.debug('Python version: %s', platform.python_version()) + log.debug('Hostname: %s', socket.getfqdn()) + + logging.captureWarnings(capture_warnings) + + return log + + +def set_log_level(logger: logging.Logger, log_level: Union[int, bool]) -> None: + if log_level is not None and log_level is not False: + if isinstance(log_level, bool): + log_level = logging.DEBUG + elif not isinstance(log_level, int): + raise ValueError('log_level must be a boolean, integer or None') + + if logger.getEffectiveLevel() != log_level: + logger.debug('Changing log_level from %d to %d' % ( + logger.getEffectiveLevel(), log_level)) + logger.setLevel(log_level) + + +def get_logger(logger_name: Optional[str] = BASE_LOGGER_NAME, + log_level: Optional[int] = None, + **kwargs) -> logging.Logger: + """ + Returns (if extistant) or creates an AMICI logger + + If the AMICI base logger has already been set up, this method will + return it or any of its descendant loggers without overriding the + settings - i.e. any values supplied as kwargs will be ignored. + + :param logger_name: + Get a logger for a specific namespace, typically __name__ + for code outside of classes or self.__module__ inside a class + + :param log_level: + Override the default or preset log level for the requested logger. + None or False uses the default or preset value. True evaluates to + logging.DEBUG. Any integer is used directly. + + :param console_output: + Set up a default console log handler if True (default). Only used when + the AMICI logger hasn't been set up yet. + + :param file_output: + Supply a filename to copy all log output to that file, or set to + False to disable (default). Only used when the AMICI logger hasn't + been set up yet. + + :param capture_warnings: + Capture warnings from Python's warnings module if True (default). + Only used when the AMICI logger hasn't been set up yet.. + + :return: + A logging.Logger object with the requested name + """ + if BASE_LOGGER_NAME not in logging.Logger.manager.loggerDict.keys(): + _setup_logger(**kwargs) + elif kwargs: + warnings.warn('AMICI logger already exists, ignoring keyword ' + 'arguments to setup_logger') + + logger = logging.getLogger(logger_name) + + set_log_level(logger, log_level) + + return logger + + +def log_execution_time(description: str, logger: logging.Logger) -> Callable: + """ + Parameterized function decorator that enables automatic execution time + tracking + + :param description: + Description of what the decorated function does + + :param logger: + Logger to which execution timing will be printed + """ + def decorator_timer(func): + @functools.wraps(func) + def wrapper_timer(*args, **kwargs): + + # append pluses to indicate recursion level + recursion_level = sum( + frame.function == 'wrapper_timer' + and frame.filename == __file__ + for frame in getouterframes(currentframe(), context=0) + ) + + recursion = '' + if recursion_level > 1: + recursion = '+' * (recursion_level - 1) + + tstart = time.perf_counter() + rval = func(*args, **kwargs) + tend = time.perf_counter() + spacers = ' ' * max(54 - len(description) - len(logger.name) - + len(recursion), 0) + logger.info(f'Finished {description}{spacers}' + f'{recursion} ({(tend - tstart):.2E}s)') + return rval + return wrapper_timer + return decorator_timer diff --git a/python/sdist/amici/numpy.py b/python/sdist/amici/numpy.py deleted file mode 120000 index 54495e0290..0000000000 --- a/python/sdist/amici/numpy.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/numpy.py \ No newline at end of file diff --git a/python/sdist/amici/numpy.py b/python/sdist/amici/numpy.py new file mode 100644 index 0000000000..52ea05e6fe --- /dev/null +++ b/python/sdist/amici/numpy.py @@ -0,0 +1,309 @@ +""" +C++ object views +---------------- +This module provides views on C++ objects for efficient access. +""" + +import numpy as np +import copy +import collections + +from . import ExpDataPtr, ReturnDataPtr, ExpData, ReturnData +from typing import Union, List, Dict, Iterator + + +class SwigPtrView(collections.abc.Mapping): + """ + Interface class to expose std::vector and scalar members of + swig wrapped C++ objects as numpy array attributes and fields. This + class is memory efficient as copies of the underlying C++ objects is + only created when respective fields are accessed for the first time. + Cached copies are used for all subsequent calls. + + :ivar _swigptr: pointer to the c++ object + :ivar _field_names: names of members that will be exposed as numpy arrays + :ivar _field_dimensions: dimensions of numpy arrays + :ivar _cache: dictionary with cached values + """ + + _swigptr = None + _field_names: List[str] = [] + _field_dimensions: Dict[str, List[int]] = dict() + + def __getitem__(self, item: str) -> Union[np.ndarray, float]: + """ + Access to field names, copies data from C++ object into numpy + array, reshapes according to field dimensions and stores values in + cache. + + :param item: field name + :return: value + """ + if self._swigptr is None: + raise NotImplementedError('Cannot get items from abstract class.') + + if item == 'ptr': + return self._swigptr + + if item in self._cache: + return self._cache[item] + + if item == 'id': + return getattr(self._swigptr, item) + + if item not in self._field_names: + self.__missing__(item) + + value = field_as_numpy( + self._field_dimensions, item, self._swigptr + ) + self._cache[item] = value + return value + + def __missing__(self, key: str) -> None: + """ + Default behaviour for missing keys + + :param key: field name + """ + raise KeyError(f'Unknown field name {key}.') + + def __getattr__(self, item) -> Union[np.ndarray, float]: + """ + Attribute accessor for field names + + :param item: field name + + :returns: value + """ + return self.__getitem__(item) + + def __init__(self, swigptr): + """ + Constructor + + :param swigptr: pointer to the C++ object + """ + self._swigptr = swigptr + self._cache = dict() + super(SwigPtrView, self).__init__() + + def __len__(self) -> int: + """ + Returns the number of available keys/fields + + :returns: length of _field_names + """ + return len(self._field_names) + + def __iter__(self) -> Iterator: + """ + Create an iterator of the keys/fields + + :returns: iterator over _field_names + """ + return iter(self._field_names) + + def __copy__(self): + """ + Create a shallow copy + + :return: SwigPtrView shallow copy + """ + other = SwigPtrView(self._swigptr) + other._field_names = self._field_names + other._field_dimensions = self._field_dimensions + other._cache = self._cache + return other + + def __contains__(self, item) -> bool: + """ + Faster implementation of __contains__ that avoids copy of the field + + :param item: item to check for + + :returns: whether item is available as key + """ + return item in self._field_names + + def __deepcopy__(self, memo): + """ + Create a deep copy + + :param memo: dict with id-to-object mapping + + :returns: SwigPtrView deep copy + """ + other = SwigPtrView(self._swigptr) + other._field_names = copy.deepcopy(self._field_names) + other._field_dimensions = copy.deepcopy(self._field_dimensions) + other._cache = copy.deepcopy(self._cache) + return other + + +class ReturnDataView(SwigPtrView): + """ + Interface class for C++ Return Data objects that avoids possibly costly + copies of member data. + """ + + _field_names = [ + 'ts', 'x', 'x0', 'x_ss', 'sx', 'sx0', 'sx_ss', 'y', 'sigmay', + 'sy', 'ssigmay', 'z', 'rz', 'sigmaz', 'sz', 'srz', + 'ssigmaz', 'sllh', 's2llh', 'J', 'xdot', 'status', 'llh', + 'chi2', 'res', 'sres', 'FIM', 'w', 'preeq_wrms', 'preeq_t', + 'preeq_numsteps', 'preeq_numstepsB', 'preeq_status', 'preeq_cpu_time', + 'preeq_cpu_timeB', 'posteq_wrms', 'posteq_t', 'posteq_numsteps', + 'posteq_numstepsB', 'posteq_status', 'posteq_cpu_time', + 'posteq_cpu_timeB', 'numsteps', 'numrhsevals', + 'numerrtestfails', 'numnonlinsolvconvfails', 'order', 'cpu_time', + 'numstepsB', 'numrhsevalsB', 'numerrtestfailsB', + 'numnonlinsolvconvfailsB', 'cpu_timeB', 'cpu_time_total' + ] + + def __init__(self, rdata: Union[ReturnDataPtr, ReturnData]): + """ + Constructor + + :param rdata: pointer to the ReturnData instance + """ + if not isinstance(rdata, (ReturnDataPtr, ReturnData)): + raise TypeError(f'Unsupported pointer {type(rdata)}, must be' + f'amici.ExpDataPtr!') + self._field_dimensions = { + 'ts': [rdata.nt], + 'x': [rdata.nt, rdata.nx], + 'x0': [rdata.nx], + 'x_ss': [rdata.nx], + 'sx': [rdata.nt, rdata.nplist, rdata.nx], + 'sx0': [rdata.nplist, rdata.nx], + 'sx_ss': [rdata.nplist, rdata.nx], + + # observables + 'y': [rdata.nt, rdata.ny], + 'sigmay': [rdata.nt, rdata.ny], + 'sy': [rdata.nt, rdata.nplist, rdata.ny], + 'ssigmay': [rdata.nt, rdata.nplist, rdata.ny], + + # event observables + 'z': [rdata.nmaxevent, rdata.nz], + 'rz': [rdata.nmaxevent, rdata.nz], + 'sigmaz': [rdata.nmaxevent, rdata.nz], + 'sz': [rdata.nmaxevent, rdata.nplist, rdata.nz], + 'srz': [rdata.nmaxevent, rdata.nplist, rdata.nz], + 'ssigmaz': [rdata.nmaxevent, rdata.nplist, rdata.nz], + + # objective function + 'sllh': [rdata.nplist], + 's2llh': [rdata.np, rdata.nplist], + + 'res': [rdata.nt * rdata.nytrue * + (2 if rdata.sigma_res else 1)], + 'sres': [rdata.nt * rdata.nytrue * + (2 if rdata.sigma_res else 1), rdata.nplist], + 'FIM': [rdata.nplist, rdata.nplist], + + # diagnosis + 'J': [rdata.nx_solver, rdata.nx_solver], + 'w': [rdata.nt, rdata.nw], + 'xdot': [rdata.nx_solver], + 'preeq_numlinsteps': [rdata.newton_maxsteps, 2], + 'preeq_numsteps': [1, 3], + 'preeq_status': [1, 3], + 'posteq_numlinsteps': [rdata.newton_maxsteps, 2], + 'posteq_numsteps': [1, 3], + 'posteq_status': [1, 3], + 'numsteps': [rdata.nt], + 'numrhsevals': [rdata.nt], + 'numerrtestfails': [rdata.nt], + 'numnonlinsolvconvfails': [rdata.nt], + 'order': [rdata.nt], + 'numstepsB': [rdata.nt], + 'numrhsevalsB': [rdata.nt], + 'numerrtestfailsB': [rdata.nt], + 'numnonlinsolvconvfailsB': [rdata.nt], + } + super(ReturnDataView, self).__init__(rdata) + + def __getitem__(self, item: str) -> Union[np.ndarray, ReturnDataPtr, + ReturnData, float]: + """ + Custom getitem implementation shim to map `t` to `ts` + + :param item: field/attribute key + + :returns: self[item] + """ + if item == 't': + item = 'ts' + return super(ReturnDataView, self).__getitem__(item) + + +class ExpDataView(SwigPtrView): + """ + Interface class for C++ Exp Data objects that avoids possibly costly + copies of member data. + """ + + _field_names = [ + 'observedData', 'observedDataStdDev', 'observedEvents', + 'observedEventsStdDev', 'fixedParameters', + 'fixedParametersPreequilibration', + 'fixedParametersPresimulation' + ] + + def __init__(self, edata: Union[ExpDataPtr, ExpData]): + """ + Constructor + + :param edata: pointer to the ExpData instance + """ + if not isinstance(edata, (ExpDataPtr, ExpData)): + raise TypeError(f'Unsupported pointer {type(edata)}, must be' + f'amici.ExpDataPtr!') + self._field_dimensions = { # observables + 'observedData': [edata.nt(), edata.nytrue()], + 'observedDataStdDev': [edata.nt(), edata.nytrue()], + + # event observables + 'observedEvents': [edata.nmaxevent(), edata.nztrue()], + 'observedEventsStdDev': [edata.nmaxevent(), edata.nztrue()], + + # fixed parameters + 'fixedParameters': [len(edata.fixedParameters)], + 'fixedParametersPreequilibration': [ + len(edata.fixedParametersPreequilibration)], + 'fixedParametersPresimulation': [ + len(edata.fixedParametersPreequilibration)], + } + edata.observedData = edata.getObservedData() + edata.observedDataStdDev = edata.getObservedDataStdDev() + edata.observedEvents = edata.getObservedEvents() + edata.observedEventsStdDev = edata.getObservedEventsStdDev() + super(ExpDataView, self).__init__(edata) + + +def field_as_numpy(field_dimensions: Dict[str, List[int]], + field: str, data: SwigPtrView) -> Union[np.ndarray, + float, + None]: + """ + Convert data object field to numpy array with dimensions according to + specified field dimensions + + :param field_dimensions: dimension specifications + dict({field: list([dim1, dim2, ...])}) + :param data: object with fields + :param field: Name of field + + :returns: Field Data as numpy array with dimensions according to + specified field dimensions + """ + attr = getattr(data, field) + if field in field_dimensions: + if len(attr) == 0: + return None + else: + return np.array(attr).reshape(field_dimensions[field]) + else: + return float(attr) diff --git a/python/sdist/amici/ode_export.py b/python/sdist/amici/ode_export.py deleted file mode 120000 index f6a27b74d4..0000000000 --- a/python/sdist/amici/ode_export.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/ode_export.py \ No newline at end of file diff --git a/python/sdist/amici/ode_export.py b/python/sdist/amici/ode_export.py new file mode 100644 index 0000000000..ee537efa69 --- /dev/null +++ b/python/sdist/amici/ode_export.py @@ -0,0 +1,3590 @@ +""" +C++ Export +---------- +This module provides all necessary functionality specify an ODE model and +generate executable C++ simulation code. The user generally won't have to +directly call any function from this module as this will be done by +:py:func:`amici.pysb_import.pysb2amici`, +:py:func:`amici.sbml_import.SbmlImporter.sbml2amici` and +:py:func:`amici.petab_import.import_model`. +""" +import contextlib +import copy +import itertools +import logging +import os +import re +import shutil +import subprocess +import sys +from dataclasses import dataclass +from itertools import chain, starmap +from pathlib import Path +from string import Template +from typing import (Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, + Union) + +import numpy as np +import sympy as sp +from sympy.matrices.dense import MutableDenseMatrix +from sympy.matrices.immutable import ImmutableDenseMatrix + +from . import (__commit__, __version__, amiciModulePath, amiciSrcPath, + amiciSwigPath, sbml_import) +from .constants import SymbolId +from .cxxcodeprinter import AmiciCxxCodePrinter, get_switch_statement +from .import_utils import (ObservableTransformation, generate_flux_symbol, + smart_subs_dict, strip_pysb, + symbol_with_assumptions, toposort_symbols) +from .logging import get_logger, log_execution_time, set_log_level +from .ode_model import * + + +# Template for model simulation main.cpp file +CXX_MAIN_TEMPLATE_FILE = os.path.join(amiciSrcPath, 'main.template.cpp') +# Template for model/swig/CMakeLists.txt +SWIG_CMAKE_TEMPLATE_FILE = os.path.join(amiciSwigPath, + 'CMakeLists_model.cmake') +# Template for model/CMakeLists.txt +MODEL_CMAKE_TEMPLATE_FILE = os.path.join(amiciSrcPath, + 'CMakeLists.template.cmake') + +IDENTIFIER_PATTERN = re.compile(r'^[a-zA-Z_]\w*$') +DERIVATIVE_PATTERN = re.compile(r'^d(x_rdata|xdot|\w+?)d(\w+?)(?:_explicit)?$') +@dataclass +class _FunctionInfo: + """Information on a model-specific generated C++ function + + :ivar arguments: argument list of the function. input variables should be + ``const``. + :ivar return_type: the return type of the function + :ivar assume_pow_positivity: + identifies the functions on which ``assume_pow_positivity`` will have + an effect when specified during model generation. generally these are + functions that are used for solving the ODE, where negative values may + negatively affect convergence of the integration algorithm + :ivar sparse: + specifies whether the result of this function will be stored in sparse + format. sparse format means that the function will only return an + array of nonzero values and not a full matrix. + :ivar generate_body: + indicates whether a model-specific implementation is to be generated + :ivar body: + the actual function body. will be filled later + """ + arguments: str = '' + return_type: str = 'void' + assume_pow_positivity: bool = False + sparse: bool = False + generate_body: bool = True + body: str = '' + + +# Information on a model-specific generated C++ function +# prototype for generated C++ functions, keys are the names of functions +functions = { + 'Jy': + _FunctionInfo( + 'realtype *Jy, const int iy, const realtype *p, ' + 'const realtype *k, const realtype *y, const realtype *sigmay, ' + 'const realtype *my' + ), + 'dJydsigma': + _FunctionInfo( + 'realtype *dJydsigma, const int iy, const realtype *p, ' + 'const realtype *k, const realtype *y, const realtype *sigmay, ' + 'const realtype *my' + ), + 'dJydy': + _FunctionInfo( + 'realtype *dJydy, const int iy, const realtype *p, ' + 'const realtype *k, const realtype *y, ' + 'const realtype *sigmay, const realtype *my', + sparse=True + ), + 'Jz': + _FunctionInfo( + 'realtype *Jz, const int iz, const realtype *p, const realtype *k, ' + 'const realtype *z, const realtype *sigmaz, const realtype *mz' + ), + 'dJzdsigma': + _FunctionInfo( + 'realtype *dJzdsigma, const int iz, const realtype *p, ' + 'const realtype *k, const realtype *z, const realtype *sigmaz, ' + 'const realtype *mz' + ), + 'dJzdz': + _FunctionInfo( + 'realtype *dJzdz, const int iz, const realtype *p, ' + 'const realtype *k, const realtype *z, const realtype *sigmaz, ' + 'const double *mz', + ), + 'Jrz': + _FunctionInfo( + 'realtype *Jrz, const int iz, const realtype *p, ' + 'const realtype *k, const realtype *rz, const realtype *sigmaz' + ), + 'dJrzdsigma': + _FunctionInfo( + 'realtype *dJrzdsigma, const int iz, const realtype *p, ' + 'const realtype *k, const realtype *rz, const realtype *sigmaz' + ), + 'dJrzdz': + _FunctionInfo( + 'realtype *dJrzdz, const int iz, const realtype *p, ' + 'const realtype *k, const realtype *rz, const realtype *sigmaz', + ), + 'root': + _FunctionInfo( + 'realtype *root, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const realtype *tcl' + ), + 'dwdp': + _FunctionInfo( + 'realtype *dwdp, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const realtype *w, const realtype *tcl, const realtype *dtcldp', + assume_pow_positivity=True, sparse=True + ), + 'dwdx': + _FunctionInfo( + 'realtype *dwdx, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const realtype *w, const realtype *tcl', + assume_pow_positivity=True, sparse=True + ), + 'dwdw': + _FunctionInfo( + 'realtype *dwdw, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const realtype *w, const realtype *tcl', + assume_pow_positivity=True, sparse=True + ), + 'dxdotdw': + _FunctionInfo( + 'realtype *dxdotdw, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const realtype *w', + assume_pow_positivity=True, sparse=True + ), + 'dxdotdx_explicit': + _FunctionInfo( + 'realtype *dxdotdx_explicit, const realtype t, ' + 'const realtype *x, const realtype *p, const realtype *k, ' + 'const realtype *h, const realtype *w', + assume_pow_positivity=True, sparse=True + ), + 'dxdotdp_explicit': + _FunctionInfo( + 'realtype *dxdotdp_explicit, const realtype t, ' + 'const realtype *x, const realtype *p, const realtype *k, ' + 'const realtype *h, const realtype *w', + assume_pow_positivity=True, sparse=True + ), + 'dydx': + _FunctionInfo( + 'realtype *dydx, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const realtype *w, const realtype *dwdx', + ), + 'dydp': + _FunctionInfo( + 'realtype *dydp, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const int ip, const realtype *w, const realtype *tcl, ' + 'const realtype *dtcldp', + ), + 'dzdx': + _FunctionInfo( + 'realtype *dzdx, const int ie, const realtype t, ' + 'const realtype *x, const realtype *p, const realtype *k, ' + 'const realtype *h', + ), + 'dzdp': + _FunctionInfo( + 'realtype *dzdp, const int ie, const realtype t, ' + 'const realtype *x, const realtype *p, const realtype *k, ' + 'const realtype *h, const int ip', + ), + 'drzdx': + _FunctionInfo( + 'realtype *drzdx, const int ie, const realtype t, ' + 'const realtype *x, const realtype *p, const realtype *k, ' + 'const realtype *h', + ), + 'drzdp': + _FunctionInfo( + 'realtype *drzdp, const int ie, const realtype t, ' + 'const realtype *x, const realtype *p, const realtype *k, ' + 'const realtype *h, const int ip', + ), + 'dsigmaydy': + _FunctionInfo( + 'realtype *dsigmaydy, const realtype t, const realtype *p, ' + 'const realtype *k, const realtype *y' + ), + 'dsigmaydp': + _FunctionInfo( + 'realtype *dsigmaydp, const realtype t, const realtype *p, ' + 'const realtype *k, const realtype *y, const int ip', + ), + 'sigmay': + _FunctionInfo( + 'realtype *sigmay, const realtype t, const realtype *p, ' + 'const realtype *k, const realtype *y', + ), + 'dsigmazdp': + _FunctionInfo( + 'realtype *dsigmazdp, const realtype t, const realtype *p,' + ' const realtype *k, const int ip', + ), + 'sigmaz': + _FunctionInfo( + 'realtype *sigmaz, const realtype t, const realtype *p, ' + 'const realtype *k', + ), + 'sroot': + _FunctionInfo( + 'realtype *stau, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const realtype *sx, const int ip, const int ie, ' + 'const realtype *tcl', + generate_body=False + ), + 'drootdt': + _FunctionInfo(generate_body=False), + 'drootdt_total': + _FunctionInfo(generate_body=False), + 'drootdp': + _FunctionInfo(generate_body=False), + 'drootdx': + _FunctionInfo(generate_body=False), + 'stau': + _FunctionInfo( + 'realtype *stau, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const realtype *tcl, const realtype *sx, const int ip, ' + 'const int ie' + ), + 'deltax': + _FunctionInfo( + 'double *deltax, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const int ie, const realtype *xdot, const realtype *xdot_old' + ), + 'ddeltaxdx': + _FunctionInfo(generate_body=False), + 'ddeltaxdt': + _FunctionInfo(generate_body=False), + 'ddeltaxdp': + _FunctionInfo(generate_body=False), + 'deltasx': + _FunctionInfo( + 'realtype *deltasx, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const realtype *w, const int ip, const int ie, ' + 'const realtype *xdot, const realtype *xdot_old, ' + 'const realtype *sx, const realtype *stau, const realtype *tcl' + ), + 'w': + _FunctionInfo( + 'realtype *w, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, ' + 'const realtype *h, const realtype *tcl', + assume_pow_positivity=True + ), + 'x0': + _FunctionInfo( + 'realtype *x0, const realtype t, const realtype *p, ' + 'const realtype *k' + ), + 'x0_fixedParameters': + _FunctionInfo( + 'realtype *x0_fixedParameters, const realtype t, ' + 'const realtype *p, const realtype *k, ' + 'gsl::span reinitialization_state_idxs', + ), + 'sx0': + _FunctionInfo( + 'realtype *sx0, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const int ip', + ), + 'sx0_fixedParameters': + _FunctionInfo( + 'realtype *sx0_fixedParameters, const realtype t, ' + 'const realtype *x0, const realtype *p, const realtype *k, ' + 'const int ip, gsl::span reinitialization_state_idxs', + ), + 'xdot': + _FunctionInfo( + 'realtype *xdot, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h, ' + 'const realtype *w', + assume_pow_positivity=True + ), + 'xdot_old': + _FunctionInfo(generate_body=False), + 'y': + _FunctionInfo( + 'realtype *y, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, ' + 'const realtype *h, const realtype *w', + ), + 'x_rdata': + _FunctionInfo( + 'realtype *x_rdata, const realtype *x, const realtype *tcl, ' + 'const realtype *p, const realtype *k' + ), + 'total_cl': + _FunctionInfo( + 'realtype *total_cl, const realtype *x_rdata, ' + 'const realtype *p, const realtype *k' + ), + 'dtotal_cldp': + _FunctionInfo( + 'realtype *dtotal_cldp, const realtype *x_rdata, ' + 'const realtype *p, const realtype *k, const int ip' + ), + 'dtotal_cldx_rdata': + _FunctionInfo( + 'realtype *dtotal_cldx_rdata, const realtype *x_rdata, ' + 'const realtype *p, const realtype *k, const realtype *tcl', + sparse=True + ), + 'x_solver': + _FunctionInfo('realtype *x_solver, const realtype *x_rdata'), + 'dx_rdatadx_solver': + _FunctionInfo( + 'realtype *dx_rdatadx_solver, const realtype *x, ' + 'const realtype *tcl, const realtype *p, const realtype *k', + sparse=True + ), + 'dx_rdatadp': + _FunctionInfo( + 'realtype *dx_rdatadp, const realtype *x, ' + 'const realtype *tcl, const realtype *p, const realtype *k, ' + 'const int ip' + ), + 'dx_rdatadtcl': + _FunctionInfo( + 'realtype *dx_rdatadtcl, const realtype *x, ' + 'const realtype *tcl, const realtype *p, const realtype *k', + sparse=True + ), + 'z': + _FunctionInfo( + 'realtype *z, const int ie, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h' + ), + 'rz': + _FunctionInfo( + 'realtype *rz, const int ie, const realtype t, const realtype *x, ' + 'const realtype *p, const realtype *k, const realtype *h' + ), +} + +# list of sparse functions +sparse_functions = [ + func_name for func_name, func_info in functions.items() + if func_info.sparse +] +# list of nobody functions +nobody_functions = [ + func_name for func_name, func_info in functions.items() + if not func_info.generate_body +] +# list of sensitivity functions +sensi_functions = [ + func_name for func_name, func_info in functions.items() + if 'const int ip' in func_info.arguments +] +# list of sensitivity functions +sparse_sensi_functions = [ + func_name for func_name, func_info in functions.items() + if 'const int ip' not in func_info.arguments + and func_name.endswith('dp') or func_name.endswith('dp_explicit') +] +# list of event functions +event_functions = [ + func_name for func_name, func_info in functions.items() + if 'const int ie' in func_info.arguments and + 'const int ip' not in func_info.arguments +] +event_sensi_functions = [ + func_name for func_name, func_info in functions.items() + if 'const int ie' in func_info.arguments and + 'const int ip' in func_info.arguments +] +# list of multiobs functions +multiobs_functions = [ + func_name for func_name, func_info in functions.items() + if 'const int iy' in func_info.arguments + or 'const int iz' in func_info.arguments +] +# list of equations that have ids which may not be unique +non_unique_id_symbols = [ + 'x_rdata', 'y' +] + +# custom c++ function replacements +CUSTOM_FUNCTIONS = [ + {'sympy': 'polygamma', + 'c++': 'boost::math::polygamma', + 'include': '#include ', + 'build_hint': 'Using polygamma requires libboost-math header files.' + }, + {'sympy': 'Heaviside', + 'c++': 'amici::heaviside'}, + {'sympy': 'DiracDelta', + 'c++': 'amici::dirac'} +] + +# python log manager +logger = get_logger(__name__, logging.ERROR) + + +def var_in_function_signature(name: str, varname: str) -> bool: + """ + Checks if the values for a symbolic variable is passed in the signature + of a function + + :param name: + name of the function + :param varname: + name of the symbolic variable + + :return: + boolean indicating whether the variable occurs in the function + signature + """ + return name in functions \ + and re.search( + rf'const (realtype|double) \*{varname}[0]*(,|$)+', + functions[name].arguments + ) + + +# defines the type of some attributes in ODEModel +symbol_to_type = { + SymbolId.SPECIES: State, + SymbolId.PARAMETER: Parameter, + SymbolId.FIXED_PARAMETER: Constant, + SymbolId.OBSERVABLE: Observable, + SymbolId.EVENT_OBSERVABLE: EventObservable, + SymbolId.SIGMAY: SigmaY, + SymbolId.SIGMAZ: SigmaZ, + SymbolId.LLHY: LogLikelihoodY, + SymbolId.LLHZ: LogLikelihoodZ, + SymbolId.LLHRZ: LogLikelihoodRZ, + SymbolId.EXPRESSION: Expression, + SymbolId.EVENT: Event +} + + +@log_execution_time('running smart_jacobian', logger) +def smart_jacobian( + eq: sp.MutableDenseMatrix, + sym_var: sp.MutableDenseMatrix +) -> sp.MutableSparseMatrix: + """ + Wrapper around symbolic jacobian with some additional checks that reduce + computation time for large matrices + + :param eq: + equation + :param sym_var: + differentiation variable + :return: + jacobian of eq wrt sym_var + """ + nrow = eq.shape[0] + ncol = sym_var.shape[0] + if ( + not min(eq.shape) + or not min(sym_var.shape) + or smart_is_zero_matrix(eq) + or smart_is_zero_matrix(sym_var) + ): + return sp.MutableSparseMatrix(nrow, ncol, dict()) + + # preprocess sparsity pattern + elements = ( + (i, j, a, b) + for i, a in enumerate(eq) + for j, b in enumerate(sym_var) + if a.has(b) + ) + + if (n_procs := int(os.environ.get("AMICI_IMPORT_NPROCS", 1))) == 1: + # serial + return sp.MutableSparseMatrix(nrow, ncol, + dict(starmap(_jacobian_element, elements)) + ) + + # parallel + from multiprocessing import get_context + # "spawn" should avoid potential deadlocks occurring with fork + # see e.g. https://stackoverflow.com/a/66113051 + ctx = get_context('spawn') + with ctx.Pool(n_procs) as p: + mapped = p.starmap(_jacobian_element, elements) + return sp.MutableSparseMatrix(nrow, ncol, dict(mapped)) + + +@log_execution_time('running smart_multiply', logger) +def smart_multiply( + x: Union[sp.MutableDenseMatrix, sp.MutableSparseMatrix], + y: sp.MutableDenseMatrix +) -> Union[sp.MutableDenseMatrix, sp.MutableSparseMatrix]: + """ + Wrapper around symbolic multiplication with some additional checks that + reduce computation time for large matrices + + :param x: + educt 1 + :param y: + educt 2 + :return: + product + """ + if not x.shape[0] or not y.shape[1] or smart_is_zero_matrix(x) or \ + smart_is_zero_matrix(y): + return sp.zeros(x.shape[0], y.shape[1]) + return x.multiply(y) + + +def smart_is_zero_matrix(x: Union[sp.MutableDenseMatrix, + sp.MutableSparseMatrix]) -> bool: + """A faster implementation of sympy's is_zero_matrix + + Avoids repeated indexer type checks and double iteration to distinguish + False/None. Found to be about 100x faster for large matrices. + + :param x: Matrix to check + """ + + if isinstance(x, sp.MutableDenseMatrix): + return all(xx.is_zero is True for xx in x.flat()) + + if isinstance(x, list): + return all(smart_is_zero_matrix(xx) for xx in x) + + return x.nnz() == 0 + + +def _default_simplify(x): + """Default simplification applied in ODEModel""" + # We need this as a free function instead of a lambda to have it picklable + # for parallel simplification + return sp.powsimp(x, deep=True) + + +class ODEModel: + """ + Defines an Ordinary Differential Equation as set of ModelQuantities. + This class provides general purpose interfaces to compute arbitrary + symbolic derivatives that are necessary for model simulation or + sensitivity computation. + + :ivar _states: + list of state variables + + :ivar _observables: + list of observables + + :ivar _event_observables: + list of event observables + + :ivar _sigmays: + list of sigmas for observables + + :ivar _sigmazs: + list of sigmas for event observables + + :ivar _parameters: + list of parameters + + :ivar _loglikelihoodys: + list of loglikelihoods for observables + + :ivar _loglikelihoodzs: + list of loglikelihoods for event observables + + :ivar _loglikelihoodrzs: + list of loglikelihoods for event observable regularizations + + :ivar _expressions: + list of expressions instances + + :ivar _conservationlaws: + list of conservation laws + + :ivar _symboldim_funs: + define functions that compute model dimensions, these + are functions as the underlying symbolic expressions have not been + populated at compile time + + :ivar _eqs: + carries symbolic formulas of the symbolic variables of the model + + :ivar _sparseeqs: + carries linear list of all symbolic formulas for sparsified + variables + + :ivar _vals: + carries numeric values of symbolic identifiers of the symbolic + variables of the model + + :ivar _names: + carries names of symbolic identifiers of the symbolic variables + of the model + + :ivar _syms: + carries symbolic identifiers of the symbolic variables of the + model + + :ivar _sparsesyms: + carries linear list of all symbolic identifiers for sparsified + variables + + :ivar _colptrs: + carries column pointers for sparsified variables. See + SUNMatrixContent_Sparse definition in ``sunmatrix/sunmatrix_sparse.h`` + + :ivar _rowvals: + carries row values for sparsified variables. See + SUNMatrixContent_Sparse definition in ``sunmatrix/sunmatrix_sparse.h`` + + :ivar _equation_prototype: + defines the attribute from which an equation should be generated via + list comprehension (see :meth:`ODEModel._generate_equation`) + + :ivar _variable_prototype: + defines the attribute from which a variable should be generated via + list comprehension (see :meth:`ODEModel._generate_symbol`) + + :ivar _value_prototype: + defines the attribute from which a value should be generated via + list comprehension (see :meth:`ODEModel._generate_value`) + + :ivar _total_derivative_prototypes: + defines how a total derivative equation is computed for an equation, + key defines the name and values should be arguments for + ODEModel.totalDerivative() + + :ivar _lock_total_derivative: + add chainvariables to this set when computing total derivative from + a partial derivative call to enforce a partial derivative in the + next recursion. prevents infinite recursion + + :ivar _simplify: + If not None, this function will be used to simplify symbolic + derivative expressions. Receives sympy expressions as only argument. + To apply multiple simplifications, wrap them in a lambda expression. + + :ivar _x0_fixedParameters_idx: + Index list of subset of states for which x0_fixedParameters was + computed + + :ivar _w_recursion_depth: + recursion depth in w, quantified as nilpotency of dwdw + + :ivar _has_quadratic_nllh: + whether all observables have a gaussian noise model, i.e. whether + res and FIM make sense. + + :ivar _code_printer: + Code printer to generate C++ code + + :ivar _z2event: + list of event indices for each event observable + """ + + def __init__(self, verbose: Optional[Union[bool, int]] = False, + simplify: Optional[Callable] = _default_simplify, + cache_simplify: bool = False): + """ + Create a new ODEModel instance. + + :param verbose: + verbosity level for logging, True/False default to + ``logging.DEBUG``/``logging.ERROR`` + + :param simplify: + see :meth:`ODEModel._simplify` + + :param cache_simplify: + Whether to cache calls to the simplify method. Can e.g. decrease + import times for models with events. + """ + self._states: List[State] = [] + self._observables: List[Observable] = [] + self._eventobservables: List[EventObservable] = [] + self._sigmays: List[SigmaY] = [] + self._sigmazs: List[SigmaZ] = [] + self._parameters: List[Parameter] = [] + self._constants: List[Constant] = [] + self._loglikelihoodys: List[LogLikelihoodY] = [] + self._loglikelihoodzs: List[LogLikelihoodZ] = [] + self._loglikelihoodrzs: List[LogLikelihoodRZ] = [] + self._expressions: List[Expression] = [] + self._conservationlaws: List[ConservationLaw] = [] + self._events: List[Event] = [] + self._symboldim_funs: Dict[str, Callable[[], int]] = { + 'sx': self.num_states_solver, + 'v': self.num_states_solver, + 'vB': self.num_states_solver, + 'xB': self.num_states_solver, + 'sigmay': self.num_obs, + 'sigmaz': self.num_eventobs, + } + self._eqs: Dict[str, Union[sp.Matrix, sp.SparseMatrix, + List[Union[sp.Matrix, sp.SparseMatrix]]]] = dict() + self._sparseeqs: Dict[str, Union[sp.Matrix, List[sp.Matrix]]] = dict() + self._vals: Dict[str, List[float]] = dict() + self._names: Dict[str, List[str]] = dict() + self._syms: Dict[str, Union[sp.Matrix, List[sp.Matrix]]] = dict() + self._sparsesyms: Dict[str, Union[List[str], List[List[str]]]] = dict() + self._colptrs: Dict[str, Union[List[int], List[List[int]]]] = dict() + self._rowvals: Dict[str, Union[List[int], List[List[int]]]] = dict() + + self._equation_prototype: Dict[str, str] = { + 'total_cl': '_conservationlaws', + 'x0': '_states', + 'y': '_observables', + 'Jy': '_loglikelihoodys', + 'Jz': '_loglikelihoodzs', + 'Jrz': '_loglikelihoodrzs', + 'w': '_expressions', + 'root': '_events', + 'sigmay': '_sigmays', + 'sigmaz': '_sigmazs' + } + self._variable_prototype: Dict[str, str] = { + 'tcl': '_conservationlaws', + 'x_rdata': '_states', + 'y': '_observables', + 'z': '_eventobservables', + 'p': '_parameters', + 'k': '_constants', + 'w': '_expressions', + 'sigmay': '_sigmays', + 'sigmaz': '_sigmazs', + 'h': '_events' + } + self._value_prototype: Dict[str, str] = { + 'p': '_parameters', + 'k': '_constants', + } + self._total_derivative_prototypes: \ + Dict[str, Dict[str, Union[str, List[str]]]] = { + 'sroot': { + 'eq': 'root', + 'chainvars': ['x'], + 'var': 'p', + 'dxdz_name': 'sx', + }, + } + + self._lock_total_derivative: List[str] = list() + self._simplify: Callable = simplify + if cache_simplify and simplify is not None: + def cached_simplify( + expr: sp.Expr, + _simplified: Dict[str, sp.Expr] = {}, + _simplify: Callable = simplify, + ) -> sp.Expr: + """Speed up expression simplification with caching. + + NB: This can decrease model import times for models that have + many repeated expressions during C++ file generation. + For example, this can be useful for models with events. + However, for other models, this may increase model import + times. + + :param expr: + The SymPy expression. + :param _simplified: + The cache. + :param _simplify: + The simplification method. + + :return: + The simplified expression. + """ + expr_str = repr(expr) + if expr_str not in _simplified: + _simplified[expr_str] = _simplify(expr) + return _simplified[expr_str] + self._simplify = cached_simplify + self._x0_fixedParameters_idx: Union[None, Sequence[int]] + self._w_recursion_depth: int = 0 + self._has_quadratic_nllh: bool = True + set_log_level(logger, verbose) + + self._code_printer = AmiciCxxCodePrinter() + for fun in CUSTOM_FUNCTIONS: + self._code_printer.known_functions[fun['sympy']] = fun['c++'] + + @log_execution_time('importing SbmlImporter', logger) + def import_from_sbml_importer( + self, + si: 'sbml_import.SbmlImporter', + compute_cls: Optional[bool] = True + ) -> None: + """ + Imports a model specification from a + :class:`amici.sbml_import.SbmlImporter` instance. + + :param si: + imported SBML model + :param compute_cls: + whether to compute conservation laws + """ + + # get symbolic expression from SBML importers + symbols = copy.copy(si.symbols) + + # assemble fluxes and add them as expressions to the model + assert len(si.flux_ids) == len(si.flux_vector) + fluxes = [generate_flux_symbol(ir, name=flux_id) + for ir, flux_id in enumerate(si.flux_ids)] + + # correct time derivatives for compartment changes + def transform_dxdt_to_concentration(species_id, dxdt): + """ + Produces the appropriate expression for the first derivative of a + species with respect to time, for species that reside in + compartments with a constant volume, or a volume that is defined by + an assignment or rate rule. + + :param species_id: + The identifier of the species (generated in "sbml_import.py"). + + :param dxdt: + The element-wise product of the row in the stoichiometric + matrix that corresponds to the species (row x_index) and the + flux (kinetic laws) vector. Ignored in the case of rate rules. + """ + # The derivation of the below return expressions can be found in + # the documentation. They are found by rearranging + # $\frac{d}{dt} (vx) = Sw$ for $\frac{dx}{dt}$, where $v$ is the + # vector of species compartment volumes, $x$ is the vector of + # species concentrations, $S$ is the stoichiometric matrix, and $w$ + # is the flux vector. The conditional below handles the cases of + # species in (i) compartments with a rate rule, (ii) compartments + # with an assignment rule, and (iii) compartments with a constant + # volume, respectively. + species = si.symbols[SymbolId.SPECIES][species_id] + + comp = species['compartment'] + if comp in si.symbols[SymbolId.SPECIES]: + dv_dt = si.symbols[SymbolId.SPECIES][comp]['dt'] + xdot = (dxdt - dv_dt * species_id) / comp + return xdot + elif comp in si.compartment_assignment_rules: + v = si.compartment_assignment_rules[comp] + + # we need to flatten out assignments in the compartment in + # order to ensure that we catch all species dependencies + v = smart_subs_dict(v, si.symbols[SymbolId.EXPRESSION], + 'value') + dv_dt = v.diff(si.amici_time_symbol) + # we may end up with a time derivative of the compartment + # volume due to parameter rate rules + comp_rate_vars = [p for p in v.free_symbols + if p in si.symbols[SymbolId.SPECIES]] + for var in comp_rate_vars: + dv_dt += \ + v.diff(var) * si.symbols[SymbolId.SPECIES][var]['dt'] + dv_dx = v.diff(species_id) + xdot = (dxdt - dv_dt * species_id) / (dv_dx * species_id + v) + return xdot + else: + v = si.compartments[comp] + + if v == 1.0: + return dxdt + + return dxdt / v + + # create dynamics without respecting conservation laws first + dxdt = smart_multiply(si.stoichiometric_matrix, + MutableDenseMatrix(fluxes)) + for ix, ((species_id, species), formula) in enumerate(zip( + symbols[SymbolId.SPECIES].items(), + dxdt + )): + assert ix == species['index'] # check that no reordering occurred + # rate rules and amount species don't need to be updated + if 'dt' in species: + continue + if species['amount']: + species['dt'] = formula + else: + species['dt'] = transform_dxdt_to_concentration(species_id, + formula) + + # create all basic components of the ODE model and add them. + for symbol_name in symbols: + # transform dict of lists into a list of dicts + args = ['name', 'identifier'] + + if symbol_name == SymbolId.SPECIES: + args += ['dt', 'init'] + else: + args += ['value'] + + if symbol_name == SymbolId.EVENT: + args += ['state_update', 'initial_value'] + elif symbol_name == SymbolId.OBSERVABLE: + args += ['transformation'] + elif symbol_name == SymbolId.EVENT_OBSERVABLE: + args += ['event'] + + protos = [ + { + 'identifier': var_id, + **{k: v for k, v in var.items() if k in args} + } + for var_id, var in symbols[symbol_name].items() + ] + + for proto in protos: + self.add_component(symbol_to_type[symbol_name](**proto)) + + # add fluxes as expressions, this needs to happen after base + # expressions from symbols have been parsed + for flux_id, flux in zip(fluxes, si.flux_vector): + self.add_component(Expression( + identifier=flux_id, + name=str(flux_id), + value=flux + )) + + # process conservation laws + if compute_cls: + si.process_conservation_laws(self) + + # fill in 'self._sym' based on prototypes and components in ode_model + self.generate_basic_variables() + self._has_quadratic_nllh = all( + llh['dist'] in ['normal', 'lin-normal', 'log-normal', + 'log10-normal'] + for llh in si.symbols[SymbolId.LLHY].values() + ) + + def add_component(self, component: ModelQuantity, + insert_first: Optional[bool] = False) -> None: + """ + Adds a new ModelQuantity to the model. + + :param component: + model quantity to be added + + :param insert_first: + whether to add quantity first or last, relevant when components + may refer to other components of the same type. + """ + if type(component) not in { + Observable, Expression, Parameter, Constant, State, + LogLikelihoodY, LogLikelihoodZ, LogLikelihoodRZ, + SigmaY, SigmaZ, ConservationLaw, Event, EventObservable + }: + raise ValueError(f'Invalid component type {type(component)}') + + component_list = getattr( + self, f'_{type(component).__name__.lower()}s' + ) + if insert_first: + component_list.insert(0, component) + else: + component_list.append(component) + + def add_conservation_law(self, + state: sp.Symbol, + total_abundance: sp.Symbol, + coefficients: Dict[sp.Symbol, sp.Expr]) -> None: + r""" + Adds a new conservation law to the model. A conservation law is defined + by the conserved quantity :math:`T = \sum_i(a_i * x_i)`, where + :math:`a_i` are coefficients and :math:`x_i` are different state + variables. + + :param state: + symbolic identifier of the state that should be replaced by + the conservation law (:math:`x_j`) + + :param total_abundance: + symbolic identifier of the total abundance (:math:`T/a_j`) + + :param coefficients: + Dictionary of coefficients {x_i: a_i} + """ + try: + ix = next(filter(lambda is_s: is_s[1].get_id() == state, + enumerate(self._states)))[0] + except StopIteration: + raise ValueError(f'Specified state {state} was not found in the ' + f'model states.') + + state_id = self._states[ix].get_id() + + # \sum_{i≠j}(a_i * x_i)/a_j + target_expression = sp.Add(*( + c_i*x_i for x_i, c_i in coefficients.items() if x_i != state + )) / coefficients[state] + + # x_j = T/a_j - \sum_{i≠j}(a_i * x_i)/a_j + state_expr = total_abundance - target_expression + + # T/a_j = \sum_{i≠j}(a_i * x_i)/a_j + x_j + abundance_expr = target_expression + state_id + + self.add_component( + Expression(state_id, str(state_id), state_expr), + insert_first=True + ) + + cl = ConservationLaw( + total_abundance, f'total_{state_id}', abundance_expr, + coefficients, state_id + ) + + self.add_component(cl) + self._states[ix].set_conservation_law(cl) + + def get_observable_transformations(self) -> List[ObservableTransformation]: + """ + List of observable transformations + + :return: + list of transformations + """ + return [obs.trafo for obs in self._observables] + + def num_states_rdata(self) -> int: + """ + Number of states. + + :return: + number of state variable symbols + """ + return len(self.sym('x_rdata')) + + def num_states_solver(self) -> int: + """ + Number of states after applying conservation laws. + + :return: + number of state variable symbols + """ + return len(self.sym('x')) + + def num_cons_law(self) -> int: + """ + Number of conservation laws. + + :return: + number of conservation laws + """ + return self.num_states_rdata() - self.num_states_solver() + + def num_state_reinits(self) -> int: + """ + Number of solver states which would be reinitialized after + preequilibration + + :return: + number of state variable symbols with reinitialization + """ + reinit_states = self.eq('x0_fixedParameters') + solver_states = self.eq('x_solver') + return sum(ix in solver_states for ix in reinit_states) + + def num_obs(self) -> int: + """ + Number of Observables. + + :return: + number of observable symbols + """ + return len(self.sym('y')) + + def num_eventobs(self) -> int: + """ + Number of Event Observables. + + :return: + number of event observable symbols + """ + return len(self.sym('z')) + + def num_const(self) -> int: + """ + Number of Constants. + + :return: + number of constant symbols + """ + return len(self.sym('k')) + + def num_par(self) -> int: + """ + Number of Parameters. + + :return: + number of parameter symbols + """ + return len(self.sym('p')) + + def num_expr(self) -> int: + """ + Number of Expressions. + + :return: + number of expression symbols + """ + return len(self.sym('w')) + + def num_events(self) -> int: + """ + Number of Events. + + :return: + number of event symbols (length of the root vector in AMICI) + """ + return len(self.sym('h')) + + def sym(self, name: str) -> sp.Matrix: + """ + Returns (and constructs if necessary) the identifiers for a symbolic + entity. + + :param name: + name of the symbolic variable + + :return: + matrix of symbolic identifiers + """ + if name not in self._syms: + self._generate_symbol(name) + + return self._syms[name] + + def sparsesym(self, name: str, force_generate: bool = True) -> List[str]: + """ + Returns (and constructs if necessary) the sparsified identifiers for + a sparsified symbolic variable. + + :param name: + name of the symbolic variable + + :param force_generate: + whether the symbols should be generated if not available + + :return: + linearized Matrix containing the symbolic identifiers + """ + if name not in sparse_functions: + raise ValueError(f'{name} is not marked as sparse') + if name not in self._sparsesyms and force_generate: + self._generate_sparse_symbol(name) + return self._sparsesyms.get(name, []) + + def eq(self, name: str) -> sp.Matrix: + """ + Returns (and constructs if necessary) the formulas for a symbolic + entity. + + :param name: + name of the symbolic variable + + :return: + matrix of symbolic formulas + """ + + if name not in self._eqs: + dec = log_execution_time(f'computing {name}', logger) + dec(self._compute_equation)(name) + return self._eqs[name] + + def sparseeq(self, name) -> sp.Matrix: + """ + Returns (and constructs if necessary) the sparsified formulas for a + sparsified symbolic variable. + + :param name: + name of the symbolic variable + + :return: + linearized matrix containing the symbolic formulas + """ + if name not in sparse_functions: + raise ValueError(f'{name} is not marked as sparse') + if name not in self._sparseeqs: + self._generate_sparse_symbol(name) + return self._sparseeqs[name] + + def colptrs(self, name: str) -> Union[List[sp.Number], + List[List[sp.Number]]]: + """ + Returns (and constructs if necessary) the column pointers for + a sparsified symbolic variable. + + :param name: + name of the symbolic variable + + :return: + list containing the column pointers + """ + if name not in sparse_functions: + raise ValueError(f'{name} is not marked as sparse') + if name not in self._sparseeqs: + self._generate_sparse_symbol(name) + return self._colptrs[name] + + def rowvals(self, name: str) -> Union[List[sp.Number], + List[List[sp.Number]]]: + """ + Returns (and constructs if necessary) the row values for a + sparsified symbolic variable. + + :param name: + name of the symbolic variable + + :return: + list containing the row values + """ + if name not in sparse_functions: + raise ValueError(f'{name} is not marked as sparse') + if name not in self._sparseeqs: + self._generate_sparse_symbol(name) + return self._rowvals[name] + + def val(self, name: str) -> List[float]: + """ + Returns (and constructs if necessary) the numeric values of a + symbolic entity + + :param name: + name of the symbolic variable + + :return: + list containing the numeric values + """ + if name not in self._vals: + self._generate_value(name) + return self._vals[name] + + def name(self, name: str) -> List[str]: + """ + Returns (and constructs if necessary) the names of a symbolic + variable + + :param name: + name of the symbolic variable + + :return: + list of names + """ + if name not in self._names: + self._generate_name(name) + return self._names[name] + + def free_symbols(self) -> Set[sp.Basic]: + """ + Returns list of free symbols that appear in ODE RHS and initial + conditions. + """ + return set(chain.from_iterable( + state.get_free_symbols() + for state in self._states + )) + + def _generate_symbol(self, name: str) -> None: + """ + Generates the symbolic identifiers for a symbolic variable + + :param name: + name of the symbolic variable + """ + if name in self._variable_prototype: + component = self._variable_prototype[name] + self._syms[name] = sp.Matrix([ + comp.get_id() + for comp in getattr(self, component) + ]) + if name == 'y': + self._syms['my'] = sp.Matrix([ + comp.get_measurement_symbol() + for comp in getattr(self, component) + ]) + if name == 'z': + self._syms['mz'] = sp.Matrix([ + comp.get_measurement_symbol() + for comp in getattr(self, component) + ]) + self._syms['rz'] = sp.Matrix([ + comp.get_regularization_symbol() + for comp in getattr(self, component) + ]) + return + elif name == 'x': + self._syms[name] = sp.Matrix([ + state.get_id() + for state in self._states + if not state.has_conservation_law() + ]) + return + elif name == 'sx0': + self._syms[name] = sp.Matrix([ + f's{state.get_id()}_0' + for state in self._states + if not state.has_conservation_law() + ]) + return + elif name == 'sx_rdata': + self._syms[name] = sp.Matrix([ + f'sx_rdata_{i}' + for i in range(len(self._states)) + ]) + return + elif name == 'dtcldp': + # check, whether the CL consists of only one state. Then, + # sensitivities drop out, otherwise generate symbols + self._syms[name] = sp.Matrix([ + [sp.Symbol(f's{strip_pysb(tcl.get_id())}__' + f'{strip_pysb(par.get_id())}', real=True) + for par in self._parameters] + if self.conservation_law_has_multispecies(tcl) + else [0] * self.num_par() + for tcl in self._conservationlaws + ]) + return + elif name == 'xdot_old': + length = len(self.eq('xdot')) + elif name in sparse_functions: + self._generate_sparse_symbol(name) + return + elif name in self._symboldim_funs: + length = self._symboldim_funs[name]() + elif name == 'stau': + length = self.eq(name)[0].shape[1] + elif name in sensi_functions: + length = self.eq(name).shape[0] + else: + length = len(self.eq(name)) + self._syms[name] = sp.Matrix([ + sp.Symbol(f'{name}{0 if name == "stau" else i}', real=True) + for i in range(length) + ]) + + def generate_basic_variables(self) -> None: + """ + Generates the symbolic identifiers for all variables in + ``ODEModel._variable_prototype`` + """ + # We need to process events and Heaviside functions in the ODE Model, + # before adding it to ODEExporter + self.parse_events() + + for var in self._variable_prototype: + if var not in self._syms: + self._generate_symbol(var) + + self._generate_symbol('x') + + def parse_events(self) -> None: + """ + This function checks the right-hand side for roots of Heaviside + functions or events, collects the roots, removes redundant roots, + and replaces the formulae of the found roots by identifiers of AMICI's + Heaviside function implementation in the right-hand side + """ + # Track all roots functions in the right-hand side + roots = copy.deepcopy(self._events) + for state in self._states: + state.set_dt(self._process_heavisides(state.get_dt(), roots)) + + for expr in self._expressions: + expr.set_val(self._process_heavisides(expr.get_val(), roots)) + + # remove all possible Heavisides from roots, which may arise from + # the substitution of `'w'` in `_collect_heaviside_roots` + for root in roots: + root.set_val(self._process_heavisides(root.get_val(), roots)) + + # Now add the found roots to the model components + for root in roots: + # skip roots of SBML events, as these have already been added + if root in self._events: + continue + # add roots of heaviside functions + self.add_component(root) + + def get_appearance_counts(self, idxs: List[int]) -> List[int]: + """ + Counts how often a state appears in the time derivative of + another state and expressions for a subset of states + + :param idxs: + list of state indices for which counts are to be computed + + :return: + list of counts for the states ordered according to the provided + indices + """ + free_symbols_dt = list(itertools.chain.from_iterable( + [ + str(symbol) + for symbol in state.get_dt().free_symbols + ] + for state in self._states + )) + + free_symbols_expr = list(itertools.chain.from_iterable( + [ + str(symbol) + for symbol in expr.get_val().free_symbols + ] + for expr in self._expressions + )) + + return [ + free_symbols_dt.count(str(self._states[idx].get_id())) + + + free_symbols_expr.count(str(self._states[idx].get_id())) + for idx in idxs + ] + + def _generate_sparse_symbol(self, name: str) -> None: + """ + Generates the sparse symbolic identifiers, symbolic identifiers, + sparse equations, column pointers and row values for a symbolic + variable + + :param name: + name of the symbolic variable + """ + matrix = self.eq(name) + + if match_deriv := DERIVATIVE_PATTERN.match(name): + eq = match_deriv[1] + var = match_deriv[2] + + rownames = self.sym(eq) + colnames = self.sym(var) + + if name == 'dJydy': + # One entry per y-slice + self._colptrs[name] = [] + self._rowvals[name] = [] + self._sparseeqs[name] = [] + self._sparsesyms[name] = [] + self._syms[name] = [] + + for iy in range(self.num_obs()): + symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, \ + sparse_matrix = self._code_printer.csc_matrix( + matrix[iy, :], rownames=rownames, colnames=colnames, + identifier=iy) + self._colptrs[name].append(symbol_col_ptrs) + self._rowvals[name].append(symbol_row_vals) + self._sparseeqs[name].append(sparse_list) + self._sparsesyms[name].append(symbol_list) + self._syms[name].append(sparse_matrix) + else: + symbol_col_ptrs, symbol_row_vals, sparse_list, symbol_list, \ + sparse_matrix = self._code_printer.csc_matrix( + matrix, rownames=rownames, colnames=colnames, + pattern_only=name in nobody_functions + ) + + self._colptrs[name] = symbol_col_ptrs + self._rowvals[name] = symbol_row_vals + self._sparseeqs[name] = sparse_list + self._sparsesyms[name] = symbol_list + self._syms[name] = sparse_matrix + + def _compute_equation(self, name: str) -> None: + """ + Computes the symbolic formula for a symbolic variable + + :param name: + name of the symbolic variable + """ + # replacement ensures that we don't have to adapt name in abstract + # model and keep backwards compatibility with matlab + match_deriv = DERIVATIVE_PATTERN.match( + re.sub(r'dJ(y|z|rz)dsigma', r'dJ\1dsigma\1', name) + .replace('sigmarz', 'sigmaz') + .replace('dJrzdz', 'dJrzdrz') + ) + time_symbol = sp.Matrix([symbol_with_assumptions('t')]) + + if name in self._equation_prototype: + self._equation_from_component(name, self._equation_prototype[name]) + + elif name in self._total_derivative_prototypes: + args = self._total_derivative_prototypes[name] + args['name'] = name + self._lock_total_derivative += args['chainvars'] + self._total_derivative(**args) + for cv in args['chainvars']: + self._lock_total_derivative.remove(cv) + + elif name == 'xdot': + self._eqs[name] = sp.Matrix([ + state.get_dt() for state in self._states + if not state.has_conservation_law() + ]) + + elif name == 'x_rdata': + self._eqs[name] = sp.Matrix([ + state.get_x_rdata() + for state in self._states + ]) + + elif name == 'x_solver': + self._eqs[name] = sp.Matrix([ + state.get_id() + for state in self._states + if not state.has_conservation_law() + ]) + + elif name == 'sx_solver': + self._eqs[name] = sp.Matrix([ + self.sym('sx_rdata')[ix] + for ix, state in enumerate(self._states) + if not state.has_conservation_law() + ]) + + elif name == 'sx0': + self._derivative(name[1:], 'p', name=name) + + elif name == 'sx0_fixedParameters': + # deltax = -x+x0_fixedParameters if x0_fixedParameters>0 else 0 + # deltasx = -sx+dx0_fixed_parametersdx*sx+dx0_fixedParametersdp + # if x0_fixedParameters>0 else 0 + # sx0_fixedParameters = sx+deltasx = + # dx0_fixed_parametersdx*sx+dx0_fixedParametersdp + self._eqs[name] = smart_jacobian( + self.eq('x0_fixedParameters'), self.sym('p') + ) + + dx0_fixed_parametersdx = smart_jacobian( + self.eq('x0_fixedParameters'), self.sym('x') + ) + + if not smart_is_zero_matrix(dx0_fixed_parametersdx): + if isinstance(self._eqs[name], ImmutableDenseMatrix): + self._eqs[name] = MutableDenseMatrix(self._eqs[name]) + tmp = smart_multiply(dx0_fixed_parametersdx, self.sym('sx0')) + for ip in range(self._eqs[name].shape[1]): + self._eqs[name][:, ip] += tmp + + elif name == 'x0_fixedParameters': + k = self.sym('k') + self._x0_fixedParameters_idx = [ + ix + for ix, eq in enumerate(self.eq('x0')) + if any(sym in eq.free_symbols for sym in k) + ] + eq = self.eq('x0') + self._eqs[name] = sp.Matrix([eq[ix] for ix in + self._x0_fixedParameters_idx]) + + elif name == 'dtotal_cldx_rdata': + x_rdata = self.sym('x_rdata') + self._eqs[name] = sp.Matrix( + [ + [cl.get_ncoeff(xr) for xr in x_rdata] + for cl in self._conservationlaws + ] + ) + + elif name == 'dtcldx': + # this is always zero + self._eqs[name] = \ + sp.zeros(self.num_cons_law(), self.num_states_solver()) + + elif name == 'dtcldp': + # force symbols + self._eqs[name] = self.sym(name) + + elif name == 'dx_rdatadx_solver': + if self.num_cons_law(): + x_solver = self.sym('x') + self._eqs[name] = sp.Matrix( + [ + [state.get_dx_rdata_dx_solver(xs) for xs in x_solver] + for state in self._states + ] + ) + else: + # so far, dx_rdatadx_solver is only required for sx_rdata + # in case of no conservation laws, C++ code will directly use + # sx, we don't need this + self._eqs[name] = \ + sp.zeros(self.num_states_rdata(), + self.num_states_solver()) + + elif name == 'dx_rdatadp': + if self.num_cons_law(): + self._eqs[name] = smart_jacobian(self.eq('x_rdata'), + self.sym('p')) + else: + # so far, dx_rdatadp is only required for sx_rdata + # in case of no conservation laws, C++ code will directly use + # sx, we don't need this + self._eqs[name] = \ + sp.zeros(self.num_states_rdata(), + self.num_par()) + + elif name == 'dx_rdatadtcl': + self._eqs[name] = smart_jacobian(self.eq('x_rdata'), + self.sym('tcl')) + + elif name == 'dxdotdx_explicit': + # force symbols + self._derivative('xdot', 'x', name=name) + + elif name == 'dxdotdp_explicit': + # force symbols + self._derivative('xdot', 'p', name=name) + + elif name == 'drootdt': + self._eqs[name] = smart_jacobian(self.eq('root'), time_symbol) + + elif name == 'drootdt_total': + # backsubstitution of optimized right-hand side terms into RHS + # calling subs() is costly. Due to looping over events though, the + # following lines are only evaluated if a model has events + w_sorted = \ + toposort_symbols(dict(zip(self.sym('w'), self.eq('w')))) + tmp_xdot = smart_subs_dict(self.eq('xdot'), w_sorted) + self._eqs[name] = self.eq('drootdt') + if self.num_states_solver(): + self._eqs[name] += smart_multiply(self.eq('drootdx'), tmp_xdot) + + elif name == 'deltax': + # fill boluses for Heaviside functions, as empty state updates + # would cause problems when writing the function file later + event_eqs = [] + for event in self._events: + if event._state_update is None: + event_eqs.append(sp.zeros(self.num_states_solver(), 1)) + else: + event_eqs.append(event._state_update) + + self._eqs[name] = event_eqs + + elif name == 'z': + event_observables = [ + sp.zeros(self.num_eventobs(), 1) + for _ in self._events + ] + event_ids = [ + e.get_id() for e in self._events + ] + # TODO: get rid of this stupid 1-based indexing as soon as we can + # the matlab interface + z2event = [ + event_ids.index(event_obs.get_event()) + 1 + for event_obs in self._eventobservables + ] + for (iz, ie), event_obs in zip(enumerate(z2event), + self._eventobservables): + event_observables[ie-1][iz] = event_obs.get_val() + + self._eqs[name] = event_observables + self._z2event = z2event + + elif name in ['ddeltaxdx', 'ddeltaxdp', 'ddeltaxdt', 'dzdp', 'dzdx']: + if match_deriv[2] == 't': + var = time_symbol + else: + var = self.sym(match_deriv[2]) + + self._eqs[name] = [ + smart_jacobian(self.eq(match_deriv[1])[ie], var) + for ie in range(self.num_events()) + ] + if name == 'dzdx': + for ie in range(self.num_events()): + dtaudx = -self.eq('drootdx')[ie, :] / \ + self.eq('drootdt_total')[ie] + for iz in range(self.num_eventobs()): + if ie != self._z2event[iz]-1: + continue + dzdt = sp.diff(self.eq('z')[ie][iz], time_symbol) + self._eqs[name][ie][iz, :] += dzdt * dtaudx + + elif name in ['rz', 'drzdx', 'drzdp']: + eq_events = [] + for ie in range(self.num_events()): + val = sp.zeros( + self.num_eventobs(), + 1 if name == 'rz' else len(self.sym(match_deriv[2])) + ) + # match event observables to root function + for iz in range(self.num_eventobs()): + if ie == self._z2event[iz]-1: + val[iz, :] = self.eq(name.replace('rz', 'root'))[ie, :] + eq_events.append(val) + + self._eqs[name] = eq_events + + elif name == 'stau': + self._eqs[name] = [ + -self.eq('sroot')[ie, :] / self.eq('drootdt_total')[ie] + if not self.eq('drootdt_total')[ie].is_zero else + sp.zeros(*self.eq('sroot')[ie, :].shape) + for ie in range(self.num_events()) + ] + + elif name == 'deltasx': + event_eqs = [] + for ie, event in enumerate(self._events): + + tmp_eq = sp.zeros(self.num_states_solver(), self.num_par()) + + # need to check if equations are zero since we are using + # symbols + if not smart_is_zero_matrix(self.eq('stau')[ie]): + tmp_eq += smart_multiply( + (self.sym('xdot_old') - self.sym('xdot')), + self.sym('stau').T) + + # only add deltax part if there is state update + if event._state_update is not None: + # partial derivative for the parameters + tmp_eq += self.eq('ddeltaxdp')[ie] + + # initial part of chain rule state variables + tmp_dxdp = self.sym('sx') * sp.ones(1, self.num_par()) + + # need to check if equations are zero since we are using + # symbols + if not smart_is_zero_matrix(self.eq('stau')[ie]): + # chain rule for the time point + tmp_eq += smart_multiply(self.eq('ddeltaxdt')[ie], + self.sym('stau').T) + + # additional part of chain rule state variables + # This part only works if we use self.eq('xdot') + # instead of self.sym('xdot'). Not immediately clear + # why that is. + tmp_dxdp += smart_multiply(self.eq('xdot'), + self.sym('stau').T) + + # finish chain rule for the state variables + tmp_eq += smart_multiply(self.eq('ddeltaxdx')[ie], + tmp_dxdp) + + event_eqs.append(tmp_eq) + + self._eqs[name] = event_eqs + + elif name == 'xdot_old': + # force symbols + self._eqs[name] = self.sym(name) + + elif name == 'dwdx': + x = self.sym('x') + self._eqs[name] = sp.Matrix([ + [-cl.get_ncoeff(xs) for xs in x] + # the insert first in ode_model._add_conservation_law() means + # that we need to reverse the order here + for cl in reversed(self._conservationlaws) + ]).col_join(smart_jacobian(self.eq('w')[self.num_cons_law():, :], + x)) + + elif match_deriv: + self._derivative(match_deriv[1], match_deriv[2], name) + + else: + raise ValueError(f'Unknown equation {name}') + + if name == 'root': + # Events are processed after the ODE model has been set up. + # Equations are there, but symbols for roots must be added + self.sym('h') + + if name in {'Jy', 'dydx'}: + # do not transpose if we compute the partial derivative as part of + # a total derivative + if not len(self._lock_total_derivative): + self._eqs[name] = self._eqs[name].transpose() + + if name in {'dzdx', 'drzdx'}: + self._eqs[name] = [ + e.T for e in self._eqs[name] + ] + + if self._simplify: + dec = log_execution_time(f'simplifying {name}', logger) + if isinstance(self._eqs[name], list): + self._eqs[name] = [ + dec(_parallel_applyfunc)(sub_eq, self._simplify) + for sub_eq in self._eqs[name] + ] + else: + self._eqs[name] = dec(_parallel_applyfunc)(self._eqs[name], + self._simplify) + + def sym_names(self) -> List[str]: + """ + Returns a list of names of generated symbolic variables + + :return: + list of names + """ + return list(self._syms.keys()) + + def _derivative(self, eq: str, var: str, name: str = None) -> None: + """ + Creates a new symbolic variable according to a derivative + + :param eq: + name of the symbolic variable that defines the formula + + :param var: + name of the symbolic variable that defines the identifiers + with respect to which the derivatives are to be computed + + :param name: + name of resulting symbolic variable, default is ``d{eq}d{var}`` + """ + if not name: + name = f'd{eq}d{var}' + + ignore_chainrule = { + ('xdot', 'p'): 'w', # has generic implementation in c++ code + ('xdot', 'x'): 'w', # has generic implementation in c++ code + ('w', 'w'): 'tcl', # dtcldw = 0 + ('w', 'x'): 'tcl', # dtcldx = 0 + } + # automatically detect chainrule + chainvars = [ + cv for cv in ['w', 'tcl'] + if var_in_function_signature(eq, cv) + and cv not in self._lock_total_derivative + and var is not cv + and min(self.sym(cv).shape) + and ( + (eq, var) not in ignore_chainrule + or ignore_chainrule[(eq, var)] != cv + ) + ] + if len(chainvars): + self._lock_total_derivative += chainvars + self._total_derivative(name, eq, chainvars, var) + for cv in chainvars: + self._lock_total_derivative.remove(cv) + return + + # partial derivative + sym_eq = self.eq(eq).transpose() if eq == 'Jy' else self.eq(eq) + + sym_var = self.sym(var) + + derivative = smart_jacobian(sym_eq, sym_var) + + self._eqs[name] = derivative + + # compute recursion depth based on nilpotency of jacobian. computing + # nilpotency can be done more efficiently on numerical sparsity pattern + if name == 'dwdw': + nonzeros = np.asarray( + derivative.applyfunc(lambda x: int(not x.is_zero)) + ).astype(np.int64) + recursion = nonzeros.copy() + if max(recursion.shape): + while recursion.max(): + recursion = recursion.dot(nonzeros) + self._w_recursion_depth += 1 + if self._w_recursion_depth > len(sym_eq): + raise RuntimeError( + 'dwdw is not nilpotent. Something, somewhere went ' + 'terribly wrong. Please file a bug report at ' + 'https://github.com/AMICI-dev/AMICI/issues and ' + 'attach this model.' + ) + + if name == 'dydw' and not smart_is_zero_matrix(derivative): + dwdw = self.eq('dwdw') + # h(k) = d{eq}dw*dwdw^k* (k=1) + h = smart_multiply(derivative, dwdw) + while not smart_is_zero_matrix(h): + self._eqs[name] += h + # h(k+1) = d{eq}dw*dwdw^(k+1) = h(k)*dwdw + h = smart_multiply(h, dwdw) + + def _total_derivative(self, name: str, eq: str, chainvars: List[str], + var: str, dydx_name: str = None, + dxdz_name: str = None) -> None: + """ + Creates a new symbolic variable according to a total derivative + using the chain rule + + :param name: + name of resulting symbolic variable + + :param eq: + name of the symbolic variable that defines the formula + + :param chainvars: + names of the symbolic variable that define the + identifiers with respect to which the chain rules are applied + + :param var: + name of the symbolic variable that defines the identifiers + with respect to which the derivatives are to be computed + + :param dydx_name: + defines the name of the symbolic variable that + defines the derivative of the ``eq`` with respect to ``chainvar``, + default is ``d{eq}d{chainvar}`` + + :param dxdz_name: + defines the name of the symbolic variable that + defines the derivative of the ``chainvar`` with respect to ``var``, + default is d{chainvar}d{var} + """ + # compute total derivative according to chainrule + # Dydz = dydx*dxdz + dydz + + # initialize with partial derivative dydz without chain rule + self._eqs[name] = self.sym_or_eq(name, f'd{eq}d{var}') + if not isinstance(self._eqs[name], sp.Symbol): + # if not a Symbol, create a copy using sympy API + # NB deepcopy does not work safely, see sympy issue #7672 + self._eqs[name] = self._eqs[name].copy() + + for chainvar in chainvars: + if dydx_name is None: + dydx_name = f'd{eq}d{chainvar}' + if dxdz_name is None: + dxdz_name = f'd{chainvar}d{var}' + + dydx = self.sym_or_eq(name, dydx_name) + dxdz = self.sym_or_eq(name, dxdz_name) + # Save time for large models if one multiplicand is zero, + # which is not checked for by sympy + if not smart_is_zero_matrix(dydx) and not \ + smart_is_zero_matrix(dxdz): + dydx_times_dxdz = smart_multiply(dydx, dxdz) + if dxdz.shape[1] == 1 and \ + self._eqs[name].shape[1] != dxdz.shape[1]: + for iz in range(self._eqs[name].shape[1]): + self._eqs[name][:, iz] += dydx_times_dxdz + else: + self._eqs[name] += dydx_times_dxdz + + def sym_or_eq(self, name: str, varname: str) -> sp.Matrix: + """ + Returns symbols or equations depending on whether a given + variable appears in the function signature or not. + + :param name: + name of function for which the signature should be checked + + :param varname: + name of the variable which should be contained in the + function signature + + :return: + the variable symbols if the variable is part of the signature and + the variable equations otherwise. + """ + # dwdx and dwdp will be dynamically computed and their ordering + # within a column may differ from the initialization of symbols here, + # so those are not safe to use. Not removing them from signature as + # this would break backwards compatibility. + if var_in_function_signature(name, varname) \ + and varname not in ['dwdx', 'dwdp']: + return self.sym(varname) + else: + return self.eq(varname) + + def _multiplication(self, name: str, x: str, y: str, + transpose_x: Optional[bool] = False, + sign: Optional[int] = 1): + """ + Creates a new symbolic variable according to a multiplication + + :param name: + name of resulting symbolic variable, default is ``d{eq}d{var}`` + + :param x: + name of the symbolic variable that defines the first factor + + :param y: + name of the symbolic variable that defines the second factor + + :param transpose_x: + indicates whether the first factor should be + transposed before multiplication + + :param sign: + defines the sign of the product, should be +1 or -1 + """ + if sign not in [-1, 1]: + raise TypeError(f'sign must be +1 or -1, was {sign}') + + variables = { + varname: self.sym(varname) + if var_in_function_signature(name, varname) + else self.eq(varname) + for varname in [x, y] + } + + xx = variables[x].transpose() if transpose_x else variables[x] + yy = variables[y] + + self._eqs[name] = sign * smart_multiply(xx, yy) + + def _equation_from_component(self, name: str, component: str) -> None: + """ + Generates the formulas of a symbolic variable from the attributes + + :param name: + name of resulting symbolic variable + + :param component: + name of the attribute + """ + self._eqs[name] = sp.Matrix( + [comp.get_val() for comp in getattr(self, component)] + ) + + def get_conservation_laws(self) -> List[Tuple[sp.Symbol, sp.Expr]]: + """Returns a list of states with conservation law set + + :return: + list of state identifiers + """ + return [ + (state.get_id(), state.get_x_rdata()) + for state in self._states + if state.has_conservation_law() + ] + + def _generate_value(self, name: str) -> None: + """ + Generates the numeric values of a symbolic variable from value + prototypes + + :param name: + name of resulting symbolic variable + """ + if name in self._value_prototype: + component = self._value_prototype[name] + else: + raise ValueError(f'No values for {name}') + + self._vals[name] = [comp.get_val() + for comp in getattr(self, component)] + + def _generate_name(self, name: str) -> None: + """ + Generates the names of a symbolic variable from variable prototypes or + equation prototypes + + :param name: + name of resulting symbolic variable + """ + if name in self._variable_prototype: + component = self._variable_prototype[name] + elif name in self._equation_prototype: + component = self._equation_prototype[name] + else: + raise ValueError(f'No names for {name}') + + self._names[name] = [comp.get_name() + for comp in getattr(self, component)] + + def state_has_fixed_parameter_initial_condition(self, ix: int) -> bool: + """ + Checks whether the state at specified index has a fixed parameter + initial condition + + :param ix: + state index + + :return: + boolean indicating if any of the initial condition free + variables is contained in the model constants + """ + ic = self._states[ix].get_val() + if not isinstance(ic, sp.Basic): + return False + return any( + fp in (c.get_id() for c in self._constants) + for fp in ic.free_symbols + ) + + def state_has_conservation_law(self, ix: int) -> bool: + """ + Checks whether the state at specified index has a conservation + law set + + :param ix: + state index + + :return: + boolean indicating if conservation_law is not None + """ + return self._states[ix].has_conservation_law() + + def get_solver_indices(self) -> Dict[int, int]: + """ + Returns a mapping that maps rdata species indices to solver indices + + :return: + dictionary mapping rdata species indices to solver indices + """ + solver_index = {} + ix_solver = 0 + for ix in range(len(self._states)): + if self.state_has_conservation_law(ix): + continue + solver_index[ix] = ix_solver + ix_solver += 1 + return solver_index + + def state_is_constant(self, ix: int) -> bool: + """ + Checks whether the temporal derivative of the state is zero + + :param ix: + state index + + :return: + boolean indicating if constant over time + """ + return self._states[ix].get_dt() == 0.0 + + def conservation_law_has_multispecies(self, + tcl: ConservationLaw) -> bool: + """ + Checks whether a conservation law has multiple species or it just + defines one constant species + + :param tcl: + conservation law + + :return: + boolean indicating if conservation_law is not None + """ + state_set = set(self.sym('x_rdata')) + n_species = len(state_set.intersection(tcl.get_val().free_symbols)) + return n_species > 1 + + def _expr_is_time_dependent(self, expr: sp.Expr) -> bool: + """Determine whether an expression is time-dependent. + + :param expr: + The expression. + + :returns: + Whether the expression is time-dependent. + """ + # `expr.free_symbols` will be different to `self._states.keys()`, so + # it's easier to compare as `str`. + expr_syms = {str(sym) for sym in expr.free_symbols} + + # Check if the time variable is in the expression. + if 't' in expr_syms: + return True + + # Check if any time-dependent states are in the expression. + state_syms = [str(sym) for sym in self._states] + return any( + not self.state_is_constant(state_syms.index(state)) + for state in expr_syms.intersection(state_syms) + ) + + def _get_unique_root( + self, + root_found: sp.Expr, + roots: List[Event], + ) -> Union[sp.Symbol, None]: + """ + Collects roots of Heaviside functions and events and stores them in + the roots list. It checks for redundancy to not store symbolically + equivalent root functions more than once. + + :param root_found: + equation of the root function + :param roots: + list of already known root functions with identifier + + :returns: + unique identifier for root, or ``None`` if the root is not + time-dependent + """ + if not self._expr_is_time_dependent(root_found): + return None + + for root in roots: + if sp.simplify(root_found - root.get_val()) == 0: + return root.get_id() + + # create an event for a new root function + root_symstr = f'Heaviside_{len(roots)}' + roots.append(Event( + identifier=sp.Symbol(root_symstr), + name=root_symstr, + value=root_found, + state_update=None, + )) + return roots[-1].get_id() + + def _collect_heaviside_roots( + self, + args: Sequence[sp.Expr], + ) -> List[sp.Expr]: + """ + Recursively checks an expression for the occurrence of Heaviside + functions and return all roots found + + :param args: + args attribute of the expanded expression + + :returns: + root functions that were extracted from Heaviside function + arguments + """ + root_funs = [] + for arg in args: + if arg.func == sp.Heaviside: + root_funs.append(arg.args[0]) + elif arg.has(sp.Heaviside): + root_funs.extend(self._collect_heaviside_roots(arg.args)) + + # substitute 'w' expressions into root expressions now, to avoid + # rewriting '{model_name}_root.cpp' and '{model_name}_stau.cpp' headers + # to include 'w.h' + w_sorted = toposort_symbols(dict(zip( + [expr.get_id() for expr in self._expressions], + [expr.get_val() for expr in self._expressions], + ))) + root_funs = [ + r.subs(w_sorted) + for r in root_funs + ] + + return root_funs + + def _process_heavisides( + self, + dxdt: sp.Expr, + roots: List[Event], + ) -> sp.Expr: + """ + Parses the RHS of a state variable, checks for Heaviside functions, + collects unique roots functions that can be tracked by SUNDIALS and + replaces Heaviside Functions by amici helper variables that will be + updated based on SUNDIALS root tracking. + + :param dxdt: + right-hand side of state variable + :param roots: + list of known root functions with identifier + + :returns: + dxdt with Heaviside functions replaced by amici helper variables + """ + + # expanding the rhs will in general help to collect the same + # heaviside function + dt_expanded = dxdt.expand() + # track all the old Heaviside expressions in tmp_roots_old + # replace them later by the new expressions + heavisides = [] + # run through the expression tree and get the roots + tmp_roots_old = self._collect_heaviside_roots(dt_expanded.args) + for tmp_old in tmp_roots_old: + # we want unique identifiers for the roots + tmp_new = self._get_unique_root(tmp_old, roots) + # `tmp_new` is None if the root is not time-dependent. + if tmp_new is None: + continue + # For Heavisides, we need to add the negative function as well + self._get_unique_root(sp.sympify(- tmp_old), roots) + heavisides.append((sp.Heaviside(tmp_old), tmp_new)) + + if heavisides: + # only apply subs if necessary + for heaviside_sympy, heaviside_amici in heavisides: + dxdt = dxdt.subs(heaviside_sympy, heaviside_amici) + + return dxdt + + +class ODEExporter: + """ + The ODEExporter class generates AMICI C++ files for ODE model as + defined in symbolic expressions. + + :ivar model: + ODE definition + + :ivar verbose: + more verbose output if True + + :ivar assume_pow_positivity: + if set to true, a special pow function is + used to avoid problems with state variables that may become negative + due to numerical errors + + :ivar compiler: + distutils/setuptools compiler selection to build the Python extension + + :ivar functions: + carries C++ function signatures and other specifications + + :ivar model_name: + name of the model that will be used for compilation + + :ivar model_path: + path to the generated model specific files + + :ivar model_swig_path: + path to the generated swig files + + :ivar allow_reinit_fixpar_initcond: + indicates whether reinitialization of + initial states depending on fixedParameters is allowed for this model + + :ivar _build_hints: + If the given model uses special functions, this set contains hints for + model building. + + :ivar generate_sensitivity_code: + Specifies whether code for sensitivity computation is to be generated + + .. note:: + When importing large models (several hundreds of species or + parameters), import time can potentially be reduced by using multiple + CPU cores. This is controlled by setting the ``AMICI_IMPORT_NPROCS`` + environment variable to the number of parallel processes that are to be + used (default: 1). Note that for small models this may (slightly) + increase import times. + """ + + def __init__( + self, + ode_model: ODEModel, + outdir: Optional[Union[Path, str]] = None, + verbose: Optional[Union[bool, int]] = False, + assume_pow_positivity: Optional[bool] = False, + compiler: Optional[str] = None, + allow_reinit_fixpar_initcond: Optional[bool] = True, + generate_sensitivity_code: Optional[bool] = True, + model_name: Optional[str] = 'model' + ): + """ + Generate AMICI C++ files for the ODE provided to the constructor. + + :param ode_model: + ODE definition + + :param outdir: + see :meth:`amici.ode_export.ODEExporter.set_paths` + + :param verbose: + verbosity level for logging, ``True``/``False`` default to + :data:`logging.Error`/:data:`logging.DEBUG` + + :param assume_pow_positivity: + if set to true, a special pow function is + used to avoid problems with state variables that may become + negative due to numerical errors + + :param compiler: distutils/setuptools compiler selection to build the + python extension + + :param allow_reinit_fixpar_initcond: + see :class:`amici.ode_export.ODEExporter` + + :param generate_sensitivity_code: + specifies whether code required for sensitivity computation will be + generated + + :param model_name: + name of the model to be used during code generation + """ + set_log_level(logger, verbose) + + self.verbose: bool = logger.getEffectiveLevel() <= logging.DEBUG + self.assume_pow_positivity: bool = assume_pow_positivity + self.compiler: str = compiler + + self.model_path: str = '' + self.model_swig_path: str = '' + + self.set_name(model_name) + self.set_paths(outdir) + + # Signatures and properties of generated model functions (see + # include/amici/model.h for details) + self.model: ODEModel = ode_model + + # To only generate a subset of functions, apply subselection here + self.functions: Dict[str, _FunctionInfo] = copy.deepcopy(functions) + + self.allow_reinit_fixpar_initcond: bool = allow_reinit_fixpar_initcond + self._build_hints = set() + self.generate_sensitivity_code: bool = generate_sensitivity_code + + @log_execution_time('generating cpp code', logger) + def generate_model_code(self) -> None: + """ + Generates the native C++ code for the loaded model and a Matlab + script that can be run to compile a mex file from the C++ code + """ + with _monkeypatched(sp.Pow, '_eval_derivative', + _custom_pow_eval_derivative): + + self._prepare_model_folder() + self._generate_c_code() + self._generate_m_code() + + @log_execution_time('compiling cpp code', logger) + def compile_model(self) -> None: + """ + Compiles the generated code it into a simulatable module + """ + self._compile_c_code(compiler=self.compiler, + verbose=self.verbose) + + def _prepare_model_folder(self) -> None: + """ + Create model directory or remove all files if the output directory + already exists. + """ + os.makedirs(self.model_path, exist_ok=True) + + for file in os.listdir(self.model_path): + file_path = os.path.join(self.model_path, file) + if os.path.isfile(file_path): + os.remove(file_path) + + def _generate_c_code(self) -> None: + """ + Create C++ code files for the model based on + :attribute:`ODEExporter.model`. + """ + for func_name, func_info in self.functions.items(): + if func_name in sensi_functions + sparse_sensi_functions and \ + not self.generate_sensitivity_code: + continue + + if func_info.generate_body: + dec = log_execution_time(f'writing {func_name}.cpp', logger) + dec(self._write_function_file)(func_name) + if func_name in sparse_functions and func_info.body: + self._write_function_index(func_name, 'colptrs') + self._write_function_index(func_name, 'rowvals') + + for name in self.model.sym_names(): + # only generate for those that have nontrivial implementation, + # check for both basic variables (not in functions) and function + # computed values + if (name in self.functions + and not self.functions[name].body + and name not in nobody_functions) \ + or (name not in self.functions and + len(self.model.sym(name)) == 0): + continue + self._write_index_files(name) + + self._write_wrapfunctions_cpp() + self._write_wrapfunctions_header() + self._write_model_header_cpp() + self._write_c_make_file() + self._write_swig_files() + self._write_module_setup() + + shutil.copy(CXX_MAIN_TEMPLATE_FILE, + os.path.join(self.model_path, 'main.cpp')) + + def _compile_c_code(self, + verbose: Optional[Union[bool, int]] = False, + compiler: Optional[str] = None) -> None: + """ + Compile the generated model code + + :param verbose: + Make model compilation verbose + + :param compiler: + distutils/setuptools compiler selection to build the python + extension + """ + # setup.py assumes it is run from within the model directory + module_dir = self.model_path + script_args = [sys.executable, os.path.join(module_dir, 'setup.py')] + + if verbose: + script_args.append('--verbose') + else: + script_args.append('--quiet') + + script_args.extend(['build_ext', f'--build-lib={module_dir}']) + + if compiler is not None: + script_args.extend([f'--compiler={compiler}']) + + # distutils.core.run_setup looks nicer, but does not let us check the + # result easily + try: + result = subprocess.run(script_args, + cwd=module_dir, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + check=True) + except subprocess.CalledProcessError as e: + print(e.output.decode('utf-8')) + print("Failed building the model extension.") + if self._build_hints: + print("Note:") + print('\n'.join(self._build_hints)) + raise + + if verbose: + print(result.stdout.decode('utf-8')) + + def _generate_m_code(self) -> None: + """ + Create a Matlab script for compiling code files to a mex file + """ + + # Second order code is not yet implemented. Once this is done, + # those variables will have to be replaced by + # "self.model.true()", or the corresponding "model.self.o2flag" + nxtrue_rdata = self.model.num_states_rdata() + nytrue = self.model.num_obs() + nztrue = self.model.num_eventobs() + o2flag = 0 + + lines = [ + '% This compile script was automatically created from' + ' Python SBML import.', + '% If mex compiler is set up within MATLAB, it can be run' + ' from MATLAB ', + '% in order to compile a mex-file from the Python' + ' generated C++ files.', + '', + f"modelName = '{self.model_name}';", + "amimodel.compileAndLinkModel(modelName, '', [], [], [], []);", + f"amimodel.generateMatlabWrapper({nxtrue_rdata}, " + f"{nytrue}, {self.model.num_par()}, " + f"{self.model.num_const()}, {nztrue}, {o2flag}, ...", + " [], ['simulate_' modelName '.m'], modelName, ...", + " 'lin', 1, 1);" + ] + + # write compile script (for mex) + compile_script = os.path.join(self.model_path, 'compileMexFile.m') + with open(compile_script, 'w') as fileout: + fileout.write('\n'.join(lines)) + + def _write_index_files(self, name: str) -> None: + """ + Write index file for a symbolic array. + + :param name: + key in ``self.model._syms`` for which the respective file should + be written + """ + if name not in self.model.sym_names(): + raise ValueError(f'Unknown symbolic array: {name}') + + symbols = self.model.sparsesym(name) if name in sparse_functions \ + else self.model.sym(name).T + + # flatten multiobs + if isinstance(next(iter(symbols), None), list): + symbols = [symbol for obs in symbols for symbol in obs] + + lines = [] + for index, symbol in enumerate(symbols): + symbol_name = strip_pysb(symbol) + if str(symbol) == '0': + continue + if str(symbol_name) == '': + raise ValueError(f'{name} contains a symbol called ""') + lines.append(f'#define {symbol_name} {name}[{index}]') + if name == 'stau': + # we only need a single macro, as all entries have the same symbol + break + + filename = os.path.join(self.model_path, f'{self.model_name}_{name}.h') + with open(filename, 'w') as fileout: + fileout.write('\n'.join(lines)) + + def _write_function_file(self, function: str) -> None: + """ + Generate equations and write the C++ code for the function + ``function``. + + :param function: + name of the function to be written (see ``self.functions``) + """ + + # first generate the equations to make sure we have everything we + # need in subsequent steps + if function in sparse_functions: + equations = self.model.sparseeq(function) + elif not self.allow_reinit_fixpar_initcond \ + and function == 'sx0_fixedParameters': + # Not required. Will create empty function body. + equations = sp.Matrix() + else: + equations = self.model.eq(function) + + # function header + lines = [ + '#include "amici/symbolic_functions.h"', + '#include "amici/defines.h"', + '#include "sundials/sundials_types.h"', + '', + '#include ', + '#include ', + '' + ] + + func_info = self.functions[function] + + # extract symbols that need definitions from signature + # don't add includes for files that won't be generated. + # Unfortunately we cannot check for `self.functions[sym].body` + # here since it may not have been generated yet. + for sym in re.findall( + r'const (?:realtype|double) \*([\w]+)[0]*(?:,|$)', + func_info.arguments + ): + if sym not in self.model.sym_names(): + continue + + if sym in sparse_functions: + iszero = smart_is_zero_matrix(self.model.sparseeq(sym)) + elif sym in self.functions: + iszero = smart_is_zero_matrix(self.model.eq(sym)) + else: + iszero = len(self.model.sym(sym)) == 0 + + if iszero: + continue + + lines.append(f'#include "{self.model_name}_{sym}.h"') + + # include return symbols + if function in self.model.sym_names() and \ + function not in non_unique_id_symbols: + lines.append(f'#include "{self.model_name}_{function}.h"') + + lines.extend([ + '', + 'namespace amici {', + f'namespace model_{self.model_name} {{', + '', + f'{func_info.return_type} {function}_{self.model_name}' + f'({func_info.arguments}){{' + ]) + + # function body + body = self._get_function_body(function, equations) + if not body: + return + + if self.assume_pow_positivity and func_info.assume_pow_positivity: + pow_rx = re.compile(r'(^|\W)std::pow\(') + body = [ + # execute this twice to catch cases where the ending '(' would + # be the starting (^|\W) for the following match + pow_rx.sub(r'\1amici::pos_pow(', + pow_rx.sub(r'\1amici::pos_pow(', line)) + for line in body + ] + + self.functions[function].body = body + + lines += body + lines.extend([ + '}', + '', + f'}} // namespace model_{self.model_name}', + '} // namespace amici\n', + ]) + + # check custom functions + for fun in CUSTOM_FUNCTIONS: + if 'include' in fun and any(fun['c++'] in line for line in lines): + if 'build_hint' in fun: + self._build_hints.add(fun['build_hint']) + lines.insert(0, fun['include']) + + # if not body is None: + filename = os.path.join(self.model_path, + f'{self.model_name}_{function}.cpp') + with open(filename, 'w') as fileout: + fileout.write('\n'.join(lines)) + + def _write_function_index(self, function: str, indextype: str) -> None: + """ + Generate equations and write the C++ code for the function + ``function``. + + :param function: + name of the function to be written (see ``self.functions``) + + :param indextype: + type of index {'colptrs', 'rowvals'} + """ + if indextype == 'colptrs': + values = self.model.colptrs(function) + setter = 'indexptrs' + elif indextype == 'rowvals': + values = self.model.rowvals(function) + setter = 'indexvals' + else: + raise ValueError('Invalid value for indextype, must be colptrs or ' + f'rowvals: {indextype}') + + # function signature + if function in multiobs_functions: + signature = f'(SUNMatrixWrapper &{function}, int index)' + else: + signature = f'(SUNMatrixWrapper &{function})' + + lines = [ + '#include "amici/sundials_matrix_wrapper.h"', + '#include "sundials/sundials_types.h"', + '', + '#include ', + '#include ', + '', + 'namespace amici {', + f'namespace model_{self.model_name} {{', + '', + ] + + # Generate static array with indices + if len(values): + static_array_name = f"{function}_{indextype}_{self.model_name}_" + if function in multiobs_functions: + # list of index vectors + lines.append( + "static constexpr std::array, {len(values)}> " + f"{static_array_name} = {{{{" + ) + lines.extend([' {' + + ', '.join(map(str, index_vector)) + '}, ' + for index_vector in values]) + lines.append("}};") + else: + # single index vector + lines.extend([ + "static constexpr std::array {static_array_name} = {{", + ' ' + ', '.join(map(str, values)), + "};" + ]) + + lines.extend([ + '', + f'void {function}_{indextype}_{self.model_name}{signature}{{', + ]) + + if len(values): + if function in multiobs_functions: + lines.append( + f" {function}.set_{setter}" + f"(gsl::make_span({static_array_name}[index]));" + ) + else: + lines.append( + f" {function}.set_{setter}" + f"(gsl::make_span({static_array_name}));" + ) + + lines.extend([ + '}' + '', + f'}} // namespace model_{self.model_name}', + '} // namespace amici\n', + ]) + + filename = f'{self.model_name}_{function}_{indextype}.cpp' + filename = os.path.join(self.model_path, filename) + + with open(filename, 'w') as fileout: + fileout.write('\n'.join(lines)) + + def _get_function_body( + self, + function: str, + equations: sp.Matrix + ) -> List[str]: + """ + Generate C++ code for body of function ``function``. + + :param function: + name of the function to be written (see ``self.functions``) + + :param equations: + symbolic definition of the function body + + :return: + generated C++ code + """ + lines = [] + + if ( + len(equations) == 0 + or ( + isinstance(equations, (sp.Matrix, sp.ImmutableDenseMatrix)) + and min(equations.shape) == 0 + ) + ): + # dJydy is a list + return lines + + if not self.allow_reinit_fixpar_initcond and function in { + 'sx0_fixedParameters', + 'x0_fixedParameters', + }: + return lines + + if function == 'sx0_fixedParameters': + # here we only want to overwrite values where x0_fixedParameters + # was applied + + lines.extend([ + # Keep list of indices of fixed parameters occurring in x0 + " static const std::array _x0_fixedParameters_idxs = {", + " " + + ', '.join(str(x) + for x in self.model._x0_fixedParameters_idx), + " };", + "", + # Set all parameters that are to be reset to 0, so that the + # switch statement below only needs to handle non-zero entries + # (which usually reduces file size and speeds up + # compilation significantly). + " for(auto idx: reinitialization_state_idxs) {", + " if(std::find(_x0_fixedParameters_idxs.cbegin(), " + "_x0_fixedParameters_idxs.cend(), idx) != " + "_x0_fixedParameters_idxs.cend())\n" + " sx0_fixedParameters[idx] = 0.0;", + " }" + ]) + + cases = {} + for ipar in range(self.model.num_par()): + expressions = [] + for index, formula in zip( + self.model._x0_fixedParameters_idx, + equations[:, ipar] + ): + if not formula.is_zero: + expressions.extend([ + f'if(std::find(' + 'reinitialization_state_idxs.cbegin(), ' + f'reinitialization_state_idxs.cend(), {index}) != ' + 'reinitialization_state_idxs.cend())', + f' {function}[{index}] = ' + f'{self.model._code_printer.doprint(formula)};' + ]) + cases[ipar] = expressions + lines.extend(get_switch_statement('ip', cases, 1)) + + elif function == 'x0_fixedParameters': + for index, formula in zip( + self.model._x0_fixedParameters_idx, + equations + ): + lines.append( + f' if(std::find(reinitialization_state_idxs.cbegin(), ' + f'reinitialization_state_idxs.cend(), {index}) != ' + 'reinitialization_state_idxs.cend())\n ' + f'{function}[{index}] = ' + f'{self.model._code_printer.doprint(formula)};' + ) + + elif function in event_functions: + cases = { + ie: self.model._code_printer._get_sym_lines_array( + equations[ie], function, 0) + for ie in range(self.model.num_events()) + if not smart_is_zero_matrix(equations[ie]) + } + lines.extend(get_switch_statement('ie', cases, 1)) + + elif function in event_sensi_functions: + outer_cases = {} + for ie, inner_equations in enumerate(equations): + inner_lines = [] + inner_cases = { + ipar: self.model._code_printer._get_sym_lines_array( + inner_equations[:, ipar], function, 0) + for ipar in range(self.model.num_par()) + if not smart_is_zero_matrix(inner_equations[:, ipar]) + } + inner_lines.extend(get_switch_statement( + 'ip', inner_cases, 0)) + outer_cases[ie] = copy.copy(inner_lines) + lines.extend(get_switch_statement('ie', outer_cases, 1)) + + elif function in sensi_functions \ + and equations.shape[1] == self.model.num_par(): + cases = { + ipar: self.model._code_printer._get_sym_lines_array( + equations[:, ipar], function, 0) + for ipar in range(self.model.num_par()) + if not smart_is_zero_matrix(equations[:, ipar]) + } + lines.extend(get_switch_statement('ip', cases, 1)) + elif function in multiobs_functions: + if function == 'dJydy': + cases = { + iobs: self.model._code_printer._get_sym_lines_array( + equations[iobs], function, 0) + for iobs in range(self.model.num_obs()) + if not smart_is_zero_matrix(equations[iobs]) + } + else: + cases = { + iobs: self.model._code_printer._get_sym_lines_array( + equations[:, iobs], function, 0) + for iobs in range(equations.shape[1]) + if not smart_is_zero_matrix(equations[:, iobs]) + } + if function.startswith(('Jz', 'dJz', 'Jrz', 'dJrz')): + iterator = 'iz' + else: + iterator = 'iy' + lines.extend(get_switch_statement(iterator, cases, 1)) + + elif function in self.model.sym_names() \ + and function not in non_unique_id_symbols: + if function in sparse_functions: + symbols = self.model.sparsesym(function) + else: + symbols = self.model.sym(function) + lines += self.model._code_printer._get_sym_lines_symbols( + symbols, equations, function, 4) + + else: + lines += self.model._code_printer._get_sym_lines_array( + equations, function, 4) + + return [line for line in lines if line] + + def _write_wrapfunctions_cpp(self) -> None: + """ + Write model-specific 'wrapper' file (``wrapfunctions.cpp``). + """ + template_data = {'MODELNAME': self.model_name} + apply_template( + os.path.join(amiciSrcPath, 'wrapfunctions.template.cpp'), + os.path.join(self.model_path, 'wrapfunctions.cpp'), + template_data + ) + + def _write_wrapfunctions_header(self) -> None: + """ + Write model-specific header file (``wrapfunctions.h``). + """ + template_data = {'MODELNAME': str(self.model_name)} + apply_template( + os.path.join(amiciSrcPath, 'wrapfunctions.ODE_template.h'), + os.path.join(self.model_path, 'wrapfunctions.h'), + template_data + ) + + def _write_model_header_cpp(self) -> None: + """ + Write model-specific header and cpp file (MODELNAME.{h,cpp}). + """ + + tpl_data = { + 'MODELNAME': self.model_name, + 'NX_RDATA': self.model.num_states_rdata(), + 'NXTRUE_RDATA': self.model.num_states_rdata(), + 'NX_SOLVER': self.model.num_states_solver(), + 'NXTRUE_SOLVER': self.model.num_states_solver(), + 'NX_SOLVER_REINIT': self.model.num_state_reinits(), + 'NY': self.model.num_obs(), + 'NYTRUE': self.model.num_obs(), + 'NZ': self.model.num_eventobs(), + 'NZTRUE': self.model.num_eventobs(), + 'NEVENT': self.model.num_events(), + 'NOBJECTIVE': '1', + 'NW': len(self.model.sym('w')), + 'NDWDP': len(self.model.sparsesym( + 'dwdp', force_generate=self.generate_sensitivity_code + )), + 'NDWDX': len(self.model.sparsesym('dwdx')), + 'NDWDW': len(self.model.sparsesym('dwdw')), + 'NDXDOTDW': len(self.model.sparsesym('dxdotdw')), + 'NDXDOTDP_EXPLICIT': len(self.model.sparsesym( + 'dxdotdp_explicit', + force_generate=self.generate_sensitivity_code + )), + 'NDXDOTDX_EXPLICIT': len(self.model.sparsesym( + 'dxdotdx_explicit')), + 'NDJYDY': 'std::vector{%s}' + % ','.join(str(len(x)) + for x in self.model.sparsesym('dJydy')), + 'NDXRDATADXSOLVER': len(self.model.sparsesym('dx_rdatadx_solver')), + 'NDXRDATADTCL': len(self.model.sparsesym('dx_rdatadtcl')), + 'NDTOTALCLDXRDATA': len(self.model.sparsesym('dtotal_cldx_rdata')), + 'UBW': self.model.num_states_solver(), + 'LBW': self.model.num_states_solver(), + 'NP': self.model.num_par(), + 'NK': self.model.num_const(), + 'O2MODE': 'amici::SecondOrderMode::none', + # using code printer ensures proper handling of nan/inf + 'PARAMETERS': self.model._code_printer.doprint( + self.model.val('p'))[1:-1], + 'FIXED_PARAMETERS': self.model._code_printer.doprint( + self.model.val('k'))[1:-1], + 'PARAMETER_NAMES_INITIALIZER_LIST': + self._get_symbol_name_initializer_list('p'), + 'STATE_NAMES_INITIALIZER_LIST': + self._get_symbol_name_initializer_list('x_rdata'), + 'FIXED_PARAMETER_NAMES_INITIALIZER_LIST': + self._get_symbol_name_initializer_list('k'), + 'OBSERVABLE_NAMES_INITIALIZER_LIST': + self._get_symbol_name_initializer_list('y'), + 'OBSERVABLE_TRAFO_INITIALIZER_LIST': + '\n'.join( + f'ObservableScaling::{trafo}, // y[{idx}]' + for idx, trafo in enumerate( + self.model.get_observable_transformations() + ) + ), + 'EXPRESSION_NAMES_INITIALIZER_LIST': + self._get_symbol_name_initializer_list('w'), + 'PARAMETER_IDS_INITIALIZER_LIST': + self._get_symbol_id_initializer_list('p'), + 'STATE_IDS_INITIALIZER_LIST': + self._get_symbol_id_initializer_list('x_rdata'), + 'FIXED_PARAMETER_IDS_INITIALIZER_LIST': + self._get_symbol_id_initializer_list('k'), + 'OBSERVABLE_IDS_INITIALIZER_LIST': + self._get_symbol_id_initializer_list('y'), + 'EXPRESSION_IDS_INITIALIZER_LIST': + self._get_symbol_id_initializer_list('w'), + 'STATE_IDXS_SOLVER_INITIALIZER_LIST': + ', '.join( + str(idx) + for idx, state in enumerate(self.model._states) + if not state.has_conservation_law() + ), + 'REINIT_FIXPAR_INITCOND': + AmiciCxxCodePrinter.print_bool( + self.allow_reinit_fixpar_initcond), + 'AMICI_VERSION_STRING': __version__, + 'AMICI_COMMIT_STRING': __commit__, + 'W_RECURSION_DEPTH': self.model._w_recursion_depth, + 'QUADRATIC_LLH': AmiciCxxCodePrinter.print_bool( + self.model._has_quadratic_nllh), + 'ROOT_INITIAL_VALUES': + ', '.join(map( + lambda event: AmiciCxxCodePrinter.print_bool( + event.get_initial_value()), + self.model._events)), + 'Z2EVENT': + ', '.join(map(str, self.model._z2event)) + } + + for func_name, func_info in self.functions.items(): + if func_name in nobody_functions: + continue + + if not func_info.body: + tpl_data[f'{func_name.upper()}_DEF'] = '' + + if func_name in sensi_functions + sparse_sensi_functions and \ + not self.generate_sensitivity_code: + impl = '' + else: + impl = get_model_override_implementation( + func_name, self.model_name, nobody=True + ) + + tpl_data[f'{func_name.upper()}_IMPL'] = impl + + if func_name in sparse_functions: + for indexfield in ['colptrs', 'rowvals']: + if func_name in sparse_sensi_functions and \ + not self.generate_sensitivity_code: + impl = '' + else: + impl = get_sunindex_override_implementation( + func_name, self.model_name, indexfield, + nobody=True + ) + tpl_data[f'{func_name.upper()}_{indexfield.upper()}_DEF'] \ + = '' + tpl_data[f'{func_name.upper()}_{indexfield.upper()}_IMPL'] \ + = impl + continue + + tpl_data[f'{func_name.upper()}_DEF'] = \ + get_function_extern_declaration(func_name, self.model_name) + tpl_data[f'{func_name.upper()}_IMPL'] = \ + get_model_override_implementation(func_name, self.model_name) + if func_name in sparse_functions: + tpl_data[f'{func_name.upper()}_COLPTRS_DEF'] = \ + get_sunindex_extern_declaration( + func_name, self.model_name, 'colptrs') + tpl_data[f'{func_name.upper()}_COLPTRS_IMPL'] = \ + get_sunindex_override_implementation( + func_name, self.model_name, 'colptrs') + tpl_data[f'{func_name.upper()}_ROWVALS_DEF'] = \ + get_sunindex_extern_declaration( + func_name, self.model_name, 'rowvals') + tpl_data[f'{func_name.upper()}_ROWVALS_IMPL'] = \ + get_sunindex_override_implementation( + func_name, self.model_name, 'rowvals') + + if self.model.num_states_solver() == self.model.num_states_rdata(): + tpl_data['X_RDATA_DEF'] = '' + tpl_data['X_RDATA_IMPL'] = '' + + tpl_data = {k: str(v) for k, v in tpl_data.items()} + + apply_template( + os.path.join(amiciSrcPath, 'model_header.ODE_template.h'), + os.path.join(self.model_path, f'{self.model_name}.h'), + tpl_data + ) + + apply_template( + os.path.join(amiciSrcPath, 'model.ODE_template.cpp'), + os.path.join(self.model_path, f'{self.model_name}.cpp'), + tpl_data + ) + + def _get_symbol_name_initializer_list(self, name: str) -> str: + """ + Get SBML name initializer list for vector of names for the given + model entity + + :param name: + any key present in ``self.model._syms`` + + :return: + Template initializer list of names + """ + return '\n'.join( + f'"{symbol}", // {name}[{idx}]' + for idx, symbol in enumerate(self.model.name(name)) + ) + + def _get_symbol_id_initializer_list(self, name: str) -> str: + """ + Get C++ initializer list for vector of names for the given model + entity + + :param name: + any key present in ``self.model._syms`` + + :return: + Template initializer list of ids + """ + return '\n'.join( + f'"{self.model._code_printer.doprint(symbol)}", // {name}[{idx}]' + for idx, symbol in enumerate(self.model.sym(name)) + ) + + def _write_c_make_file(self): + """Write CMake ``CMakeLists.txt`` file for this model.""" + sources = '\n'.join( + f + ' ' for f in os.listdir(self.model_path) + if f.endswith('.cpp') and f != 'main.cpp' + ) + + template_data = {'MODELNAME': self.model_name, + 'SOURCES': sources, + 'AMICI_VERSION': __version__} + apply_template( + MODEL_CMAKE_TEMPLATE_FILE, + Path(self.model_path, 'CMakeLists.txt'), + template_data + ) + + def _write_swig_files(self) -> None: + """Write SWIG interface files for this model.""" + Path(self.model_swig_path).mkdir(exist_ok=True) + template_data = {'MODELNAME': self.model_name} + apply_template( + Path(amiciSwigPath, 'modelname.template.i'), + Path(self.model_swig_path, self.model_name + '.i'), + template_data + ) + shutil.copy(SWIG_CMAKE_TEMPLATE_FILE, + Path(self.model_swig_path, 'CMakeLists.txt')) + + def _write_module_setup(self) -> None: + """ + Create a setuptools ``setup.py`` file for compile the model module. + """ + + template_data = {'MODELNAME': self.model_name, + 'AMICI_VERSION': __version__, + 'PACKAGE_VERSION': '0.1.0'} + apply_template(Path(amiciModulePath, 'setup.template.py'), + Path(self.model_path, 'setup.py'), + template_data) + apply_template(Path(amiciModulePath, 'MANIFEST.template.in'), + Path(self.model_path, 'MANIFEST.in'), {}) + # write __init__.py for the model module + Path(self.model_path, self.model_name).mkdir(exist_ok=True) + + apply_template( + Path(amiciModulePath, '__init__.template.py'), + Path(self.model_path, self.model_name, '__init__.py'), + template_data + ) + + def set_paths(self, output_dir: Optional[Union[str, Path]] = None) -> None: + """ + Set output paths for the model and create if necessary + + :param output_dir: + relative or absolute path where the generated model + code is to be placed. If ``None``, this will default to + ``amici-{self.model_name}`` in the current working directory. + will be created if it does not exist. + + """ + if output_dir is None: + output_dir = os.path.join(os.getcwd(), + f'amici-{self.model_name}') + + self.model_path = os.path.abspath(output_dir) + self.model_swig_path = os.path.join(self.model_path, 'swig') + + def set_name(self, model_name: str) -> None: + """ + Sets the model name + + :param model_name: + name of the model (may only contain upper and lower case letters, + digits and underscores, and must not start with a digit) + """ + if not is_valid_identifier(model_name): + raise ValueError( + f"'{model_name}' is not a valid model name. " + "Model name may only contain upper and lower case letters, " + "digits and underscores, and must not start with a digit.") + + self.model_name = model_name + + +class TemplateAmici(Template): + """ + Template format used in AMICI (see :class:`string.Template` for more + details). + + :cvar delimiter: + delimiter that identifies template variables + """ + delimiter = 'TPL_' + + +def apply_template(source_file: Union[str, Path], + target_file: Union[str, Path], + template_data: Dict[str, str]) -> None: + """ + Load source file, apply template substitution as provided in + templateData and save as targetFile. + + :param source_file: + relative or absolute path to template file + + :param target_file: + relative or absolute path to output file + + :param template_data: + template keywords to substitute (key is template + variable without :attr:`TemplateAmici.delimiter`) + """ + with open(source_file) as filein: + src = TemplateAmici(filein.read()) + result = src.safe_substitute(template_data) + with open(target_file, 'w') as fileout: + fileout.write(result) + + +def get_function_extern_declaration(fun: str, name: str) -> str: + """ + Constructs the extern function declaration for a given function + + :param fun: + function name + :param name: + model name + + :return: + C++ function definition string + """ + f = functions[fun] + return f'extern {f.return_type} {fun}_{name}({f.arguments});' + + +def get_sunindex_extern_declaration(fun: str, name: str, + indextype: str) -> str: + """ + Constructs the function declaration for an index function of a given + function + + :param fun: + function name + + :param name: + model name + + :param indextype: + index function {'colptrs', 'rowvals'} + + :return: + C++ function declaration string + """ + index_arg = ', int index' if fun in multiobs_functions else '' + return \ + f'extern void {fun}_{indextype}_{name}' \ + f'(SUNMatrixWrapper &{indextype}{index_arg});' + + +def get_model_override_implementation(fun: str, name: str, + nobody: bool = False) -> str: + """ + Constructs ``amici::Model::*`` override implementation for a given function + + :param fun: + function name + + :param name: + model name + + :param nobody: + whether the function has a nontrivial implementation + + :return: + C++ function implementation string + """ + impl = '{return_type} f{fun}({signature}) override {{' + + if nobody: + impl += '}}\n' + else: + impl += '\n{ind8}{fun}_{name}({eval_signature});\n{ind4}}}\n' + + func_info = functions[fun] + + return impl.format( + ind4=' ' * 4, + ind8=' ' * 8, + fun=fun, + name=name, + signature=func_info.arguments, + eval_signature=remove_argument_types(func_info.arguments), + return_type=func_info.return_type + ) + + +def get_sunindex_override_implementation(fun: str, name: str, + indextype: str, + nobody: bool = False) -> str: + """ + Constructs the ``amici::Model`` function implementation for an index + function of a given function + + :param fun: + function name + + :param name: + model name + + :param indextype: + index function {'colptrs', 'rowvals'} + + :param nobody: + whether the corresponding function has a nontrivial implementation + + :return: + C++ function implementation string + """ + index_arg = ', int index' if fun in multiobs_functions else '' + index_arg_eval = ', index' if fun in multiobs_functions else '' + + impl = 'void f{fun}_{indextype}({signature}) override {{' + + if nobody: + impl += '}}\n' + else: + impl += '{ind8}{fun}_{indextype}_{name}({eval_signature});\n{ind4}}}\n' + + return impl.format( + ind4=' ' * 4, + ind8=' ' * 8, + fun=fun, + indextype=indextype, + name=name, + signature=f'SUNMatrixWrapper &{indextype}{index_arg}', + eval_signature=f'{indextype}{index_arg_eval}', + ) + + +def remove_argument_types(signature: str) -> str: + """ + Strips argument types from a function signature + + :param signature: + function signature + + :return: + string that can be used to construct function calls with the same + variable names and ordering as in the function signature + """ + # remove * prefix for pointers (pointer must always be removed before + # values otherwise we will inadvertently dereference values, + # same applies for const specifications) + # + # always add whitespace after type definition for cosmetic reasons + known_types = [ + 'const realtype *', + 'const double *', + 'const realtype ', + 'double *', + 'realtype *', + 'const int ', + 'int ', + 'SUNMatrixContent_Sparse ', + 'gsl::span' + ] + + for type_str in known_types: + signature = signature.replace(type_str, '') + + return signature + + +def is_valid_identifier(x: str) -> bool: + """ + Check whether `x` is a valid identifier for conditions, parameters, + observables... . Identifiers may only contain upper and lower case letters, + digits and underscores, and must not start with a digit. + + :param x: + string to check + + :return: + ``True`` if valid, ``False`` otherwise + """ + + return IDENTIFIER_PATTERN.match(x) is not None + + +@contextlib.contextmanager +def _monkeypatched(obj: object, name: str, patch: Any): + """ + Temporarily monkeypatches an object. + + :param obj: + object to be patched + + :param name: + name of the attribute to be patched + + :param patch: + patched value + """ + pre_patched_value = getattr(obj, name) + setattr(obj, name, patch) + try: + yield object + finally: + setattr(obj, name, pre_patched_value) + + +def _custom_pow_eval_derivative(self, s): + """ + Custom Pow derivative that removes a removable singularity for + ``self.base == 0`` and ``self.base.diff(s) == 0``. This function is + intended to be monkeypatched into :py:method:`sympy.Pow._eval_derivative`. + + :param self: + sp.Pow class + + :param s: + variable with respect to which the derivative will be computed + """ + dbase = self.base.diff(s) + dexp = self.exp.diff(s) + part1 = sp.Pow(self.base, self.exp - 1) * self.exp * dbase + part2 = self * dexp * sp.log(self.base) + if self.base.is_nonzero or dbase.is_nonzero or part2.is_zero: + # first piece never applies or is zero anyways + return part1 + part2 + + return part1 + sp.Piecewise( + (self.base, sp.And(sp.Eq(self.base, 0), sp.Eq(dbase, 0))), + (part2, True) + ) + + +def _jacobian_element(i, j, eq_i, sym_var_j): + """Compute a single element of a jacobian""" + return (i, j), eq_i.diff(sym_var_j) + + +def _parallel_applyfunc( + obj: sp.Matrix, + func: Callable +) -> sp.Matrix: + """Parallel implementation of sympy's Matrix.applyfunc""" + if (n_procs := int(os.environ.get("AMICI_IMPORT_NPROCS", 1))) == 1: + # serial + return obj.applyfunc(func) + + # parallel + from pickle import PicklingError + from sympy.matrices.dense import DenseMatrix + from multiprocessing import get_context + # "spawn" should avoid potential deadlocks occurring with fork + # see e.g. https://stackoverflow.com/a/66113051 + ctx = get_context('spawn') + with ctx.Pool(n_procs) as p: + try: + if isinstance(obj, DenseMatrix): + return obj._new(obj.rows, obj.cols, p.map(func, obj)) + elif isinstance(obj, sp.SparseMatrix): + dok = obj.todok() + mapped = p.map(func, dok.values()) + dok = {k: v for k, v in zip(dok.keys(), mapped) if v != 0} + return obj._new(obj.rows, obj.cols, dok) + else: + raise ValueError(f"Unsupported matrix type {type(obj)}") + except PicklingError as e: + raise ValueError( + f"Couldn't pickle {func}. This is likely because the argument " + "was not a module-level function. Either rewrite the argument " + "to a module-level function or disable parallelization by " + "setting `AMICI_IMPORT_NPROCS=1`." + ) from e diff --git a/python/sdist/amici/ode_model.py b/python/sdist/amici/ode_model.py deleted file mode 120000 index cbd58833cd..0000000000 --- a/python/sdist/amici/ode_model.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/ode_model.py \ No newline at end of file diff --git a/python/sdist/amici/ode_model.py b/python/sdist/amici/ode_model.py new file mode 100644 index 0000000000..a21526d27d --- /dev/null +++ b/python/sdist/amici/ode_model.py @@ -0,0 +1,616 @@ +"""Objects for AMICI's internal ODE model representation""" + + +import sympy as sp +import numbers + +from typing import ( + Optional, Union, Dict, SupportsFloat, Set +) + +from .import_utils import ObservableTransformation, \ + generate_measurement_symbol, generate_regularization_symbol,\ + RESERVED_SYMBOLS +from .import_utils import cast_to_sym + +__all__ = [ + 'ConservationLaw', 'Constant', 'Event', 'Expression', 'LogLikelihoodY', + 'LogLikelihoodZ', 'LogLikelihoodRZ', 'ModelQuantity', 'Observable', + 'Parameter', 'SigmaY', 'SigmaZ', 'State', 'EventObservable' +] + + +class ModelQuantity: + """ + Base class for model components + """ + def __init__(self, + identifier: sp.Symbol, + name: str, + value: Union[SupportsFloat, numbers.Number, sp.Expr]): + """ + Create a new ModelQuantity instance. + + :param identifier: + unique identifier of the quantity + + :param name: + individual name of the quantity (does not need to be unique) + + :param value: + either formula, numeric value or initial value + """ + + if not isinstance(identifier, sp.Symbol): + raise TypeError(f'identifier must be sympy.Symbol, was ' + f'{type(identifier)}') + + if str(identifier) in RESERVED_SYMBOLS or \ + (hasattr(identifier, 'name') and + identifier.name in RESERVED_SYMBOLS): + raise ValueError(f'Cannot add model quantity with name "{name}", ' + f'please rename.') + self._identifier: sp.Symbol = identifier + + if not isinstance(name, str): + raise TypeError(f'name must be str, was {type(name)}') + + self._name: str = name + + self._value: sp.Expr = cast_to_sym(value, 'value') + + def __repr__(self) -> str: + """ + Representation of the ModelQuantity object + + :return: + string representation of the ModelQuantity + """ + return str(self._identifier) + + def get_id(self) -> sp.Symbol: + """ + ModelQuantity identifier + + :return: + identifier of the ModelQuantity + """ + return self._identifier + + def get_name(self) -> str: + """ + ModelQuantity name + + :return: + name of the ModelQuantity + """ + return self._name + + def get_val(self) -> sp.Expr: + """ + ModelQuantity value + + :return: + value of the ModelQuantity + """ + return self._value + + def set_val(self, val: sp.Expr): + """ + Set ModelQuantity value + + :return: + value of the ModelQuantity + """ + self._value = cast_to_sym(val, 'value') + + +class ConservationLaw(ModelQuantity): + """ + A conservation law defines the absolute the total amount of a + (weighted) sum of states + + """ + def __init__(self, + identifier: sp.Symbol, + name: str, + value: sp.Expr, + coefficients: Dict[sp.Symbol, sp.Expr], + state_id: sp.Symbol): + """ + Create a new ConservationLaw instance. + + :param identifier: + unique identifier of the ConservationLaw + + :param name: + individual name of the ConservationLaw (does not need to be + unique) + + :param value: formula (sum of states) + + :param coefficients: + coefficients of the states in the sum + + :param state_id: + identifier of the state that this conservation law replaces + """ + self._state_expr: sp.Symbol = identifier - (value - state_id) + self._coefficients: Dict[sp.Symbol, sp.Expr] = coefficients + self._ncoeff: sp.Expr = coefficients[state_id] + super(ConservationLaw, self).__init__(identifier, name, value) + + def get_ncoeff(self, state_id) -> Union[sp.Expr, int, float]: + """ + Computes the normalized coefficient a_i/a_j where i is the index of + the provided state_id and j is the index of the state that is + replaced by this conservation law. This can be used to compute both + dtotal_cl/dx_rdata (=ncoeff) and dx_rdata/dx_solver (=-ncoeff). + + :param state_id: + identifier of the state + + :return: normalized coefficent of the state + """ + return self._coefficients.get(state_id, 0.0) / self._ncoeff + + def get_x_rdata(self): + """ + Returns the expression that allows computation of x_rdata for the state + that this conservation law replaces. + + :return: x_rdata expression + """ + return self._state_expr + + +class State(ModelQuantity): + """ + A State variable defines an entity that evolves with time according to + the provided time derivative, abbreviated by ``x``. + + :ivar _conservation_law: + algebraic formula that allows computation of this + state according to a conservation law + + :ivar _dt: + algebraic formula that defines the temporal derivative of this state + + """ + def __init__(self, + identifier: sp.Symbol, + name: str, + init: sp.Expr, + dt: sp.Expr): + """ + Create a new State instance. Extends :meth:`ModelQuantity.__init__` + by ``dt`` + + :param identifier: + unique identifier of the state + + :param name: + individual name of the state (does not need to be unique) + + :param init: + initial value + + :param dt: + time derivative + """ + super(State, self).__init__(identifier, name, init) + self._dt = cast_to_sym(dt, 'dt') + self._conservation_law: Union[ConservationLaw, None] = None + + def set_conservation_law(self, law: ConservationLaw) -> None: + """ + Sets the conservation law of a state. + + If a conservation law is set, the respective state will be replaced by + an algebraic formula according to the respective conservation law. + + :param law: + linear sum of states that if added to this state remain + constant over time + """ + if not isinstance(law, ConservationLaw): + raise TypeError(f'conservation law must have type ConservationLaw' + f', was {type(law)}') + + self._conservation_law = law + + def set_dt(self, + dt: sp.Expr) -> None: + """ + Sets the time derivative + + :param dt: + time derivative + """ + self._dt = cast_to_sym(dt, 'dt') + + def get_dt(self) -> sp.Expr: + """ + Gets the time derivative + + :return: + time derivative + """ + return self._dt + + def get_free_symbols(self) -> Set[sp.Basic]: + """ + Gets the set of free symbols in time derivative and initial conditions + + :return: + free symbols + """ + return self._dt.free_symbols.union(self._value.free_symbols) + + def has_conservation_law(self): + """ + Checks whether this state has a conservation law assigned. + + :return: True if assigned, False otherwise + """ + return self._conservation_law is not None + + def get_x_rdata(self): + """ + Returns the expression that allows computation of x_rdata for this + state, accounting for conservation laws. + + :return: x_rdata expression + """ + if self._conservation_law is None: + return self.get_id() + else: + return self._conservation_law.get_x_rdata() + + def get_dx_rdata_dx_solver(self, state_id): + """ + Returns the expression that allows computation of + ``dx_rdata_dx_solver`` for this state, accounting for conservation + laws. + + :return: dx_rdata_dx_solver expression + """ + if self._conservation_law is None: + return sp.Integer(self._identifier == state_id) + else: + return -self._conservation_law.get_ncoeff(state_id) + + +class Observable(ModelQuantity): + """ + An Observable links model simulations to experimental measurements, + abbreviated by ``y``. + + :ivar _measurement_symbol: + sympy symbol used in the objective function to represent + measurements to this observable + + :ivar trafo: + observable transformation, only applies when evaluating objective + function or residuals + """ + + _measurement_symbol: Union[sp.Symbol, None] = None + + def __init__(self, + identifier: sp.Symbol, + name: str, + value: sp.Expr, + measurement_symbol: Optional[sp.Symbol] = None, + transformation: Optional[ObservableTransformation] = 'lin'): + """ + Create a new Observable instance. + + :param identifier: + unique identifier of the Observable + + :param name: + individual name of the Observable (does not need to be unique) + + :param value: + formula + + :param transformation: + observable transformation, only applies when evaluating objective + function or residuals + """ + super(Observable, self).__init__(identifier, name, value) + self._measurement_symbol = measurement_symbol + self._regularization_symbol = None + self.trafo = transformation + + def get_measurement_symbol(self) -> sp.Symbol: + if self._measurement_symbol is None: + self._measurement_symbol = generate_measurement_symbol( + self.get_id() + ) + + return self._measurement_symbol + + def get_regularization_symbol(self) -> sp.Symbol: + if self._regularization_symbol is None: + self._regularization_symbol = generate_regularization_symbol( + self.get_id() + ) + + return self._regularization_symbol + + +class EventObservable(Observable): + """ + An Event Observable links model simulations to event related experimental + measurements, abbreviated by ``z``. + + :ivar _event: + symbolic event identifier + """ + + def __init__(self, + identifier: sp.Symbol, + name: str, + value: sp.Expr, + event: sp.Symbol, + measurement_symbol: Optional[sp.Symbol] = None, + transformation: Optional[ObservableTransformation] = 'lin',): + """ + Create a new EventObservable instance. + + :param identifier: + See :py:meth:`Observable.__init__`. + + :param name: + See :py:meth:`Observable.__init__`. + + :param value: + See :py:meth:`Observable.__init__`. + + :param transformation: + See :py:meth:`Observable.__init__`. + + :param event: + Symbolic identifier of the corresponding event. + """ + super(EventObservable, self).__init__(identifier, name, value, + measurement_symbol, + transformation) + self._event: sp.Symbol = event + + def get_event(self) -> sp.Symbol: + """ + Get the symbolic identifier of the corresponding event. + + :return: symbolic identifier + """ + return self._event + + +class Sigma(ModelQuantity): + """ + A Standard Deviation Sigma rescales the distance between simulations + and measurements when computing residuals or objective functions, + abbreviated by ``sigma{y,z}``. + """ + def __init__(self, + identifier: sp.Symbol, + name: str, + value: sp.Expr): + """ + Create a new Standard Deviation instance. + + :param identifier: + unique identifier of the Standard Deviation + + :param name: + individual name of the Standard Deviation (does not need to + be unique) + + :param value: + formula + """ + if self.__class__.__name__ == "Sigma": + raise RuntimeError( + "This class is meant to be sub-classed, not used directly." + ) + super(Sigma, self).__init__(identifier, name, value) + + +class SigmaY(Sigma): + """ + Standard deviation for observables + """ + + +class SigmaZ(Sigma): + """ + Standard deviation for event observables + """ + + +class Expression(ModelQuantity): + """ + An Expression is a recurring elements in symbolic formulas. Specifying + this may yield more compact expression which may lead to substantially + shorter model compilation times, but may also reduce model simulation time. + Abbreviated by ``w``. + """ + def __init__(self, + identifier: sp.Symbol, + name: str, + value: sp.Expr): + """ + Create a new Expression instance. + + :param identifier: + unique identifier of the Expression + + :param name: + individual name of the Expression (does not need to be unique) + + :param value: + formula + """ + super(Expression, self).__init__(identifier, name, value) + + +class Parameter(ModelQuantity): + """ + A Parameter is a free variable in the model with respect to which + sensitivities may be computed, abbreviated by ``p``. + """ + + def __init__(self, + identifier: sp.Symbol, + name: str, + value: numbers.Number): + """ + Create a new Expression instance. + + :param identifier: + unique identifier of the Parameter + + :param name: + individual name of the Parameter (does not need to be + unique) + + :param value: + numeric value + """ + super(Parameter, self).__init__(identifier, name, value) + + +class Constant(ModelQuantity): + """ + A Constant is a fixed variable in the model with respect to which + sensitivities cannot be computed, abbreviated by ``k``. + """ + + def __init__(self, + identifier: sp.Symbol, + name: str, + value: numbers.Number): + """ + Create a new Expression instance. + + :param identifier: + unique identifier of the Constant + + :param name: + individual name of the Constant (does not need to be unique) + + :param value: + numeric value + """ + super(Constant, self).__init__(identifier, name, value) + + +class LogLikelihood(ModelQuantity): + """ + A LogLikelihood defines the distance between measurements and + experiments for a particular observable. The final LogLikelihood value + in the simulation will be the sum of all specified LogLikelihood + instances evaluated at all timepoints, abbreviated by ``Jy``. + """ + + def __init__(self, + identifier: sp.Symbol, + name: str, + value: sp.Expr): + """ + Create a new Expression instance. + + :param identifier: + unique identifier of the LogLikelihood + + :param name: + individual name of the LogLikelihood (does not need to be + unique) + + :param value: + formula + """ + if self.__class__.__name__ == "LogLikelihood": + raise RuntimeError( + "This class is meant to be sub-classed, not used directly." + ) + super(LogLikelihood, self).__init__(identifier, name, value) + + +class LogLikelihoodY(LogLikelihood): + """ + Loglikelihood for observables + """ + + +class LogLikelihoodZ(LogLikelihood): + """ + Loglikelihood for event observables + """ + + +class LogLikelihoodRZ(LogLikelihood): + """ + Loglikelihood for event observables regularization + """ + + +class Event(ModelQuantity): + """ + An Event defines either a SBML event or a root of the argument of a + Heaviside function. The Heaviside functions will be tracked via the + vector ``h`` during simulation and are needed to inform the ODE solver + about a discontinuity in either the right-hand side or the states + themselves, causing a reinitialization of the solver. + """ + + def __init__(self, + identifier: sp.Symbol, + name: str, + value: sp.Expr, + state_update: Union[sp.Expr, None], + initial_value: Optional[bool] = True): + """ + Create a new Event instance. + + :param identifier: + unique identifier of the Event + + :param name: + individual name of the Event (does not need to be unique) + + :param value: + formula for the root / trigger function + + :param state_update: + formula for the bolus function (None for Heaviside functions, + zero vector for events without bolus) + + :param initial_value: + initial boolean value of the trigger function at t0. If set to + `False`, events may trigger at ``t==t0``, otherwise not. + """ + super(Event, self).__init__(identifier, name, value) + # add the Event specific components + self._state_update = state_update + self._initial_value = initial_value + + def get_initial_value(self) -> bool: + """ + Return the initial value for the root function. + + :return: + initial value formula + """ + return self._initial_value + + def __eq__(self, other): + """ + Check equality of events at the level of trigger/root functions, as we + need to collect unique root functions for ``roots.cpp`` + """ + return self.get_val() == other.get_val() and \ + (self.get_initial_value() == other.get_initial_value()) diff --git a/python/sdist/amici/pandas.py b/python/sdist/amici/pandas.py deleted file mode 120000 index 6e1bc9cfff..0000000000 --- a/python/sdist/amici/pandas.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/pandas.py \ No newline at end of file diff --git a/python/sdist/amici/pandas.py b/python/sdist/amici/pandas.py new file mode 100644 index 0000000000..ad11278630 --- /dev/null +++ b/python/sdist/amici/pandas.py @@ -0,0 +1,743 @@ +""" +Pandas Wrappers +--------------- +This module contains convenience wrappers that allow for easy interconversion +between C++ objects from :mod:`amici.amici` and pandas DataFrames +""" + +import pandas as pd +import numpy as np +import math +import copy + +from typing import List, Union, Optional, Dict, SupportsFloat +from .numpy import ExpDataView +import amici + +__all__ = [ + 'get_expressions_as_dataframe', + 'getEdataFromDataFrame', + 'getDataObservablesAsDataFrame', + 'getSimulationObservablesAsDataFrame', + 'getSimulationStatesAsDataFrame', + 'getResidualsAsDataFrame' +] + +ExpDatas = Union[ + List[amici.amici.ExpData], List[amici.ExpDataPtr], + amici.amici.ExpData, amici.ExpDataPtr +] +ReturnDatas = Union[ + List[amici.ReturnDataView], amici.ReturnDataView +] + +AmiciModel = Union[amici.ModelPtr, amici.Model] + + +def _process_edata_list(edata_list: ExpDatas) -> List[amici.amici.ExpData]: + """ + Maps single instances of :class:`amici.amici.ExpData` to lists of + :class:`amici.amici.ExpData` + + :param edata_list: + list of instances or single instance + + :return: + list of instance(s) + """ + if isinstance(edata_list, (amici.amici.ExpData, amici.ExpDataPtr)): + return [edata_list] + else: + return edata_list + + +def _process_rdata_list(rdata_list: ReturnDatas) -> List[amici.ReturnDataView]: + """ + Maps single instances of :class:`amici.ReturnData` to lists of + :class:`amici.ReturnData` + + :param rdata_list: + list of instances or single instance + + :return: + list of instance(s) + """ + if isinstance(rdata_list, amici.ReturnDataView): + return [rdata_list] + else: + return rdata_list + + +def getDataObservablesAsDataFrame( + model: AmiciModel, + edata_list: ExpDatas, + by_id: Optional[bool] = False) -> pd.DataFrame: + """ + Write Observables from experimental data as DataFrame. + + :param model: + Model instance. + + :param edata_list: + list of ExpData instances with experimental data. + May also be a single ExpData instance. + + :param by_id: + If True, uses observable ids as column names in the generated + DataFrame, otherwise the possibly more descriptive observable names + are used. + + :return: + pandas DataFrame with conditions/timepoints as rows and observables as + columns. + """ + edata_list = _process_edata_list(edata_list) + + # list of all column names using either ids or names + cols = _get_extended_observable_cols(model, by_id=by_id) + + # aggregate records + dicts = [] + for edata in edata_list: + npdata = ExpDataView(edata) + for i_time, timepoint in enumerate(edata.getTimepoints()): + datadict = { + 'time': timepoint, + 'datatype': 'data' + } + # add observables and noises + for i_obs, obs in enumerate(_get_names_or_ids( + model, 'Observable', by_id=by_id)): + datadict[obs] = npdata['observedData'][i_time, i_obs] + datadict[obs + '_std'] = \ + npdata['observedDataStdDev'][i_time, i_obs] + + # add conditions + _fill_conditions_dict(datadict, model, edata, by_id=by_id) + + dicts.append(datadict) + + return pd.DataFrame.from_records(dicts, columns=cols) + + +def getSimulationObservablesAsDataFrame( + model: amici.Model, + edata_list: ExpDatas, + rdata_list: ReturnDatas, + by_id: Optional[bool] = False +) -> pd.DataFrame: + """ + Write Observables from simulation results as DataFrame. + + :param model: + Model instance. + + :param edata_list: + list of ExpData instances with experimental data. + May also be a single ExpData instance. + + :param rdata_list: + list of ReturnData instances corresponding to ExpData. + May also be a single ReturnData instance. + + :param by_id: + If True, ids are used as identifiers, otherwise the possibly more + descriptive names. + + :return: + pandas DataFrame with conditions/timepoints as rows and observables as + columns. + """ + edata_list = _process_edata_list(edata_list) + rdata_list = _process_rdata_list(rdata_list) + + # list of all column names using either names or ids + cols = _get_extended_observable_cols(model, by_id=by_id) + + # aggregate records + dicts = [] + for edata, rdata in zip(edata_list, rdata_list): + for i_time, timepoint in enumerate(rdata['t']): + datadict = { + 'time': timepoint, + 'datatype': 'simulation', + } + # append simulations + for i_obs, obs in enumerate(_get_names_or_ids( + model, 'Observable', by_id=by_id)): + datadict[obs] = rdata['y'][i_time, i_obs] + datadict[obs + '_std'] = rdata['sigmay'][i_time, i_obs] + + # use edata to fill conditions columns + _fill_conditions_dict(datadict, model, edata, by_id=by_id) + + # append to dataframe + dicts.append(datadict) + + return pd.DataFrame.from_records(dicts, columns=cols) + + +def getSimulationStatesAsDataFrame( + model: amici.Model, + edata_list: ExpDatas, + rdata_list: ReturnDatas, + by_id: Optional[bool] = False) -> pd.DataFrame: + """ + Get model state according to lists of ReturnData and ExpData. + + :param model: + Model instance. + + :param edata_list: + list of ExpData instances with experimental data. + May also be a single ExpData instance. + + :param rdata_list: + list of ReturnData instances corresponding to ExpData. + May also be a single ReturnData instance. + + :param by_id: + If True, ids are used as identifiers, otherwise the possibly more + descriptive names. + + :return: pandas DataFrame with conditions/timepoints as rows and + state variables as columns. + """ + edata_list = _process_edata_list(edata_list) + rdata_list = _process_rdata_list(rdata_list) + + # get conditions and state column names by name or id + cols = _get_state_cols(model, by_id=by_id) + + # aggregate records + dicts = [] + for edata, rdata in zip(edata_list, rdata_list): + for i_time, timepoint in enumerate(rdata['t']): + datadict = { + 'time': timepoint, + } + + # append states + for i_state, state in enumerate( + _get_names_or_ids(model, 'State', by_id=by_id)): + datadict[state] = rdata['x'][i_time, i_state] + + # use data to fill condition columns + _fill_conditions_dict(datadict, model, edata, by_id=by_id) + + # append to dataframe + dicts.append(datadict) + + return pd.DataFrame.from_records(dicts, columns=cols) + + +def get_expressions_as_dataframe( + model: amici.Model, + edata_list: ExpDatas, + rdata_list: ReturnDatas, + by_id: Optional[bool] = False) -> pd.DataFrame: + """ + Get values of model expressions from lists of ReturnData as DataFrame. + + :param model: + Model instance. + + :param edata_list: + list of ExpData instances with experimental data. + May also be a single ExpData instance. + + :param rdata_list: + list of ReturnData instances corresponding to ExpData. + May also be a single ReturnData instance. + + :param by_id: + If True, ids are used as identifiers, otherwise the possibly more + descriptive names. + + :return: pandas DataFrame with conditions/timepoints as rows and + model expressions as columns. + """ + edata_list = _process_edata_list(edata_list) + rdata_list = _process_rdata_list(rdata_list) + + # get conditions and state column names by name or id + cols = _get_expression_cols(model, by_id=by_id) + + # aggregate records + dicts = [] + for edata, rdata in zip(edata_list, rdata_list): + for i_time, timepoint in enumerate(rdata['t']): + datadict = { + 'time': timepoint, + } + + # append expressions + for i_expr, expr in enumerate( + _get_names_or_ids(model, 'Expression', by_id=by_id)): + datadict[expr] = rdata['w'][i_time, i_expr] + + # use data to fill condition columns + _fill_conditions_dict(datadict, model, edata, by_id=by_id) + + # append to dataframe + dicts.append(datadict) + + return pd.DataFrame.from_records(dicts, columns=cols) + + +def getResidualsAsDataFrame(model: amici.Model, + edata_list: ExpDatas, + rdata_list: ReturnDatas, + by_id: Optional[bool] = False) -> pd.DataFrame: + """ + Convert a list of ReturnData and ExpData to pandas DataFrame with + residuals. + + :param model: + Model instance. + + :param edata_list: + list of ExpData instances with experimental data. May also be a + single ExpData instance. + + :param rdata_list: + list of ReturnData instances corresponding to ExpData. May also be a + single ReturnData instance. + + :param by_id: bool, optional (default = False) + If True, ids are used as identifiers, otherwise the possibly more + descriptive names. + + :return: + pandas DataFrame with conditions and residuals. + """ + edata_list = _process_edata_list(edata_list) + rdata_list = _process_rdata_list(rdata_list) + + # create observable and simulation dataframes + df_edata = getDataObservablesAsDataFrame( + model, edata_list, by_id=by_id) + df_rdata = getSimulationObservablesAsDataFrame( + model, edata_list, rdata_list, by_id=by_id) + + # get all column names using names or ids + cols = _get_observable_cols(model, by_id=by_id) + + # aggregate records + dicts = [] + for row in df_rdata.index: + datadict = { + 'time': df_rdata.loc[row]['time'], + 't_presim': df_rdata.loc[row]['t_presim'] + } + + # iterate over observables + for obs in _get_names_or_ids(model, 'Observable', by_id=by_id): + # compute residual and append to dict + datadict[obs] = abs( + (df_edata.loc[row][obs] - df_rdata.loc[row][obs]) / + df_rdata.loc[row][obs + '_std']) + + # iterate over fixed parameters + for par in _get_names_or_ids(model, 'FixedParameter', by_id=by_id): + # fill in conditions + datadict[par] = df_rdata.loc[row][par] + datadict[par + '_preeq'] = df_rdata.loc[row][par + '_preeq'] + datadict[par + '_presim'] = df_rdata.loc[row][par + '_presim'] + + # append to dataframe + dicts.append(datadict) + + return pd.DataFrame.from_records(dicts, columns=cols) + + +def _fill_conditions_dict(datadict: Dict[str, float], + model: AmiciModel, + edata: amici.amici.ExpData, + by_id: bool) -> Dict[str, float]: + """ + Helper function that fills in condition parameters from model and + edata. + + :param datadict: + dictionary in which condition parameters will be inserted + as key value pairs. + + :param model: + Model instance. + + :param edata: + ExpData instance. + + :param by_id: + If True, ids are used as identifiers, otherwise the possibly more + descriptive names. + + :return: + dictionary with filled condition parameters. + + """ + datadict['condition_id'] = edata.id + datadict['t_presim'] = edata.t_presim + + for i_par, par in enumerate( + _get_names_or_ids(model, 'FixedParameter', by_id=by_id)): + if len(edata.fixedParameters): + datadict[par] = edata.fixedParameters[i_par] + else: + datadict[par] = model.getFixedParameters()[i_par] + + if len(edata.fixedParametersPreequilibration): + datadict[par + '_preeq'] = \ + edata.fixedParametersPreequilibration[i_par] + else: + datadict[par + '_preeq'] = np.nan + + if len(edata.fixedParametersPresimulation): + datadict[par + '_presim'] = \ + edata.fixedParametersPresimulation[i_par] + else: + datadict[par + '_presim'] = np.nan + return datadict + + +def _get_extended_observable_cols(model: AmiciModel, + by_id: bool) -> List[str]: + """ + Construction helper for extended observable dataframe headers. + + :param model: + Model instance. + + :param by_id: + If True, ids are used as identifiers, otherwise the possibly more + descriptive names. + + :return: + column names as list. + """ + return \ + ['condition_id', 'time', 'datatype', 't_presim'] + \ + _get_names_or_ids(model, 'FixedParameter', by_id=by_id) + \ + [name + '_preeq' for name in + _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ + [name + '_presim' for name in + _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ + _get_names_or_ids(model, 'Observable', by_id=by_id) + \ + [name + '_std' for name in + _get_names_or_ids(model, 'Observable', by_id=by_id)] + + +def _get_observable_cols(model: AmiciModel, + by_id: bool) -> List[str]: + """ + Construction helper for observable dataframe headers. + + :param model: + Model instance. + + :param by_id: + If True, ids are used as identifiers, otherwise the possibly more + descriptive names. + + :return: + column names as list. + """ + return \ + ['condition_id', 'time', 't_presim'] + \ + _get_names_or_ids(model, 'FixedParameter', by_id=by_id) + \ + [name + '_preeq' for name in + _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ + [name + '_presim' for name in + _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ + _get_names_or_ids(model, 'Observable', by_id=by_id) + + +def _get_state_cols(model: AmiciModel, + by_id: bool) -> List[str]: + """ + Construction helper for state dataframe headers. + + :param model: + Model instance. + + :param by_id: + If True, ids are used as identifiers, otherwise the possibly more + descriptive names. + + :return: + column names as list. + """ + return \ + ['condition_id', 'time', 't_presim'] + \ + _get_names_or_ids(model, 'FixedParameter', by_id=by_id) + \ + [name + '_preeq' for name in + _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ + [name + '_presim' for name in + _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ + _get_names_or_ids(model, 'State', by_id=by_id) + + +def _get_expression_cols(model: AmiciModel, by_id: bool) -> List[str]: + """Construction helper for expression dataframe headers. + + :param model: + Model instance. + + :param by_id: + If True, ids are used as identifiers, otherwise the possibly more + descriptive names. + + :return: + column names as list. + """ + return \ + ['condition_id', 'time', 't_presim'] + \ + _get_names_or_ids(model, 'FixedParameter', by_id=by_id) + \ + [name + '_preeq' for name in + _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ + [name + '_presim' for name in + _get_names_or_ids(model, 'FixedParameter', by_id=by_id)] + \ + _get_names_or_ids(model, 'Expression', by_id=by_id) + + +def _get_names_or_ids(model: AmiciModel, + variable: str, + by_id: bool) -> List[str]: + """ + Obtains a unique list of identifiers for the specified variable. + First tries model.getVariableNames and then uses model.getVariableIds. + + :param model: + Model instance. + + :param variable: + variable name. + + :param by_id: + If True, ids are used as identifiers, otherwise first the possibly + more descriptive names are used. + + :return: + column names as list. + """ + # check whether variable type permitted + variable_options = [ + 'Parameter', 'FixedParameter', 'Observable', 'State', 'Expression' + ] + if variable not in variable_options: + raise ValueError('Variable must be in ' + str(variable_options)) + + # extract attributes + names = list(getattr(model, f'get{variable}Names')()) + ids = list(getattr(model, f'get{variable}Ids')()) + + # find out if model has names and ids + has_names = getattr(model, f'has{variable}Names')() + has_ids = getattr(model, f'has{variable}Ids')() + + # extract labels + if not by_id and has_names and len(set(names)) == len(names): + # use variable names + return names + elif has_ids: + # use variable ids + return ids + else: + # unable to create unique labels + if by_id: + msg = f"Model {variable} ids are not set." + else: + msg = f"Model {variable} names are not unique and " \ + f"{variable} ids are not set." + raise ValueError(msg) + + +def _get_specialized_fixed_parameters( + model: AmiciModel, + condition: Union[Dict[str, SupportsFloat], pd.Series], + overwrite: Union[Dict[str, SupportsFloat], pd.Series], + by_id: bool +) -> List[float]: + """ + Copies values in condition and overwrites them according to key + value pairs specified in overwrite. + + :param model: + Model instance. + :param condition: + fixedParameter values. + :param overwrite: + dict specifying which values in condition are to be replaced. + :param by_id: + bool + If True, ids are used as identifiers, otherwise the possibly more + descriptive names. + + :return: + overwritten FixedParameter as list. + """ + cond = copy.deepcopy(condition) + for field in overwrite: + cond[field] = overwrite[field] + return [float(cond[name]) for name in _get_names_or_ids( + model, 'FixedParameter', by_id=by_id)] + + +def constructEdataFromDataFrame( + df: pd.DataFrame, + model: AmiciModel, + condition: pd.Series, + by_id: Optional[bool] = False +) -> amici.amici.ExpData: + """ + Constructs an ExpData instance according to the provided Model + and DataFrame. + + :param df: + pd.DataFrame with Observable Names/Ids as columns. + Standard deviations may be specified by appending '_std' as suffix. + + :param model: + Model instance. + + :param condition: + pd.Series with FixedParameter Names/Ids as columns. + Preequilibration conditions may be specified by appending + '_preeq' as suffix. Presimulation conditions may be specified by + appending '_presim' as suffix. + + :param by_id: + Indicate whether in the arguments, column headers are based on ids or + names. This should correspond to the way `df` and `condition` was + created in the first place. + + :return: + ExpData instance. + """ + # initialize edata + edata = amici.ExpData(model.get()) + + # timepoints + df = df.sort_values(by='time', ascending=True) + edata.setTimepoints(df['time'].values.astype(float)) + + # get fixed parameters from condition + overwrite_preeq = {} + overwrite_presim = {} + for par in list(_get_names_or_ids(model, 'FixedParameter', by_id=by_id)): + if par + '_preeq' in condition.keys() \ + and not math.isnan(condition[par + '_preeq'].astype(float)): + overwrite_preeq[par] = condition[par + '_preeq'].astype(float) + if par + '_presim' in condition.keys() \ + and not math.isnan(condition[par + '_presim'].astype(float)): + overwrite_presim[par] = condition[par + '_presim'].astype(float) + + # fill in fixed parameters + edata.fixedParameters = condition[ + _get_names_or_ids(model, 'FixedParameter', by_id=by_id) + ].astype(float).values + + # fill in preequilibration parameters + if any([overwrite_preeq[key] != condition[key] for key in + overwrite_preeq]): + edata.fixedParametersPreequilibration = \ + _get_specialized_fixed_parameters( + model, condition, overwrite_preeq, by_id=by_id) + elif len(overwrite_preeq): + edata.fixedParametersPreequilibration = copy.deepcopy( + edata.fixedParameters + ) + + # fill in presimulation parameters + if any([overwrite_presim[key] != condition[key] for key in + overwrite_presim.keys()]): + edata.fixedParametersPresimulation = _get_specialized_fixed_parameters( + model, condition, overwrite_presim, by_id=by_id + ) + elif len(overwrite_presim.keys()): + edata.fixedParametersPresimulation = copy.deepcopy( + edata.fixedParameters + ) + + # fill in presimulation time + if 't_presim' in condition.keys(): + edata.t_presim = float(condition['t_presim']) + + # fill in data and stds + for obs_index, obs in enumerate( + _get_names_or_ids(model, 'Observable', by_id=by_id)): + if obs in df.keys(): + edata.setObservedData(df[obs].values.astype(float), obs_index) + if obs + '_std' in df.keys(): + edata.setObservedDataStdDev( + df[obs + '_std'].values.astype(float), obs_index + ) + + return edata + + +def getEdataFromDataFrame( + model: AmiciModel, + df: pd.DataFrame, + by_id: Optional[bool] = False +) -> List[amici.amici.ExpData]: + """ + Constructs a ExpData instances according to the provided Model and + DataFrame. + + :param df: + dataframe with Observable Names/Ids, FixedParameter Names/Ids + and time as columns. Standard deviations may be specified by + appending '_std' as suffix. Preequilibration fixedParameters may be + specified by appending '_preeq' as suffix. Presimulation + fixedParameters may be specified by appending '_presim' as suffix. + Presimulation time may be specified as 't_presim' column. + + :param model: + Model instance. + + :param by_id: + Whether the column names in `df` are based on ids or names, + corresponding to how the dataframe was created in the first place. + + :return: + list of ExpData instances. + """ + edata_list = [] + + # aggregate features that define a condition + + # fixed parameters + condition_parameters = _get_names_or_ids(model, 'FixedParameter', + by_id=by_id) + # preeq and presim parameters + for par in _get_names_or_ids(model, 'FixedParameter', by_id=by_id): + if par + '_preeq' in df.columns: + condition_parameters.append(par + '_preeq') + if par + '_presim' in df.columns: + condition_parameters.append(par + '_presim') + # presimulation time + if 't_presim' in df.columns: + condition_parameters.append('t_presim') + # drop duplicates to create final conditions + conditions = df[condition_parameters].drop_duplicates() + + # iterate over conditions + for ir, row in conditions.iterrows(): + # subselect rows that match condition + selected = np.ones((len(df),), dtype=bool) + for par_label, par in row.iteritems(): + if math.isnan(par): + selected = selected & np.isnan( + df[par_label].astype(float).values + ) + else: + selected = selected & (df[par_label] == par) + edata_df = df[selected] + + edata_list.append( + constructEdataFromDataFrame(edata_df, model, row, by_id=by_id) + ) + + return edata_list diff --git a/python/sdist/amici/parameter_mapping.py b/python/sdist/amici/parameter_mapping.py deleted file mode 120000 index 45a20acfc3..0000000000 --- a/python/sdist/amici/parameter_mapping.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/parameter_mapping.py \ No newline at end of file diff --git a/python/sdist/amici/parameter_mapping.py b/python/sdist/amici/parameter_mapping.py new file mode 100644 index 0000000000..ed3eaa2b08 --- /dev/null +++ b/python/sdist/amici/parameter_mapping.py @@ -0,0 +1,427 @@ +""" +Parameter mapping +----------------- + +When performing parameter inference, often parameters need to be mapped from +simulation to estimation parameters, and parameters can differ between +conditions. This can be handled using the `ParameterMapping`. + +Note +~~~~ + +While the parameter mapping can be used directly with AMICI, it was developed +for usage together with PEtab, for which the whole workflow of generating +the mapping is automatized. +""" +from __future__ import annotations + +import numbers +import warnings +from typing import Any, Dict, List, Union, Set +from collections.abc import Sequence +from itertools import chain + +import amici +import numpy as np +from petab.C import * # noqa: F403 + + +SingleParameterMapping = Dict[str, Union[numbers.Number, str]] +SingleScaleMapping = Dict[str, str] +AmiciModel = Union[amici.Model, amici.ModelPtr] + + +class ParameterMappingForCondition: + """Parameter mapping for condition. + + Contains mappings for free parameters, fixed parameters, and fixed + preequilibration parameters, both for parameters and scales. + + In the scale mappings, for each simulation parameter the scale + on which the value is passed (and potentially gradients are to be + returned) is given. In the parameter mappings, for each simulation + parameter a corresponding optimization parameter (or a numeric value) + is given. + + If a mapping is not passed, the parameter mappings are assumed to be empty, + and if a scale mapping is not passed, all scales are set to linear. + + :param map_sim_var: + Mapping for free simulation parameters. + :param scale_map_sim_var: + Scales for free simulation parameters. + :param map_preeq_fix: + Mapping for fixed preequilibration parameters. + :param scale_map_preeq_fix: + Scales for fixed preequilibration parameters. + :param map_sim_fix: + Mapping for fixed simulation parameters. + :param scale_map_sim_fix: + Scales for fixed simulation parameters. + """ + + def __init__( + self, + map_sim_var: SingleParameterMapping = None, + scale_map_sim_var: SingleScaleMapping = None, + map_preeq_fix: SingleParameterMapping = None, + scale_map_preeq_fix: SingleScaleMapping = None, + map_sim_fix: SingleParameterMapping = None, + scale_map_sim_fix: SingleScaleMapping = None, + ): + if map_sim_var is None: + map_sim_var = {} + self.map_sim_var = map_sim_var + + if scale_map_sim_var is None: + scale_map_sim_var = {key: LIN for key in map_sim_var} + self.scale_map_sim_var = scale_map_sim_var + + if map_preeq_fix is None: + map_preeq_fix = {} + self.map_preeq_fix = map_preeq_fix + + if scale_map_preeq_fix is None: + scale_map_preeq_fix = {key: LIN for key in map_preeq_fix} + self.scale_map_preeq_fix = scale_map_preeq_fix + + if map_sim_fix is None: + map_sim_fix = {} + self.map_sim_fix = map_sim_fix + + if scale_map_sim_fix is None: + scale_map_sim_fix = {key: LIN for key in map_sim_fix} + self.scale_map_sim_fix = scale_map_sim_fix + + def __repr__(self): + return (f"{self.__class__.__name__}(" + f"map_sim_var={repr(self.map_sim_var)}," + f"scale_map_sim_var={repr(self.scale_map_sim_var)}," + f"map_preeq_fix={repr(self.map_preeq_fix)}," + f"scale_map_preeq_fix={repr(self.scale_map_preeq_fix)}," + f"map_sim_fix={repr(self.map_sim_fix)}," + f"scale_map_sim_fix={repr(self.scale_map_sim_fix)})") + + @property + def free_symbols(self) -> Set[str]: + """Get IDs of all (symbolic) parameters present in this mapping""" + return { + p for p in chain( + self.map_sim_var.values(), + self.map_preeq_fix.values(), + self.map_sim_fix.values() + ) + if isinstance(p, str) + } + + +class ParameterMapping(Sequence): + r"""Parameter mapping for multiple conditions. + + This can be used like a list of :class:`ParameterMappingForCondition`\ s. + + :param parameter_mappings: + List of parameter mappings for specific conditions. + """ + + def __init__( + self, + parameter_mappings: List[ParameterMappingForCondition] = None + ): + super().__init__() + if parameter_mappings is None: + parameter_mappings = [] + self.parameter_mappings = parameter_mappings + + def __iter__(self): + yield from self.parameter_mappings + + def __getitem__( + self, item + ) -> Union[ParameterMapping, ParameterMappingForCondition]: + result = self.parameter_mappings[item] + if isinstance(result, ParameterMappingForCondition): + return result + return ParameterMapping(result) + + def __len__(self): + return len(self.parameter_mappings) + + def append( + self, + parameter_mapping_for_condition: ParameterMappingForCondition + ): + """Append a condition specific parameter mapping.""" + self.parameter_mappings.append(parameter_mapping_for_condition) + + def __repr__(self): + return f"{self.__class__.__name__}({repr(self.parameter_mappings)})" + + @property + def free_symbols(self) -> Set[str]: + """Get IDs of all (symbolic) parameters present in this mapping""" + return set.union(*(mapping.free_symbols for mapping in self)) + + +def fill_in_parameters( + edatas: List[amici.ExpData], + problem_parameters: Dict[str, numbers.Number], + scaled_parameters: bool, + parameter_mapping: ParameterMapping, + amici_model: AmiciModel +) -> None: + """Fill fixed and dynamic parameters into the edatas (in-place). + + :param edatas: + List of experimental datas :class:`amici.amici.ExpData` with + everything except parameters filled. + :param problem_parameters: + Problem parameters as parameterId=>value dict. Only + parameters included here will be set. Remaining parameters will + be used as currently set in `amici_model`. + :param scaled_parameters: + If True, problem_parameters are assumed to be on the scale provided + in the parameter mapping. If False, they are assumed + to be in linear scale. + :param parameter_mapping: + Parameter mapping for all conditions. + :param amici_model: + AMICI model. + """ + if unused_parameters := (set(problem_parameters.keys()) + - parameter_mapping.free_symbols): + warnings.warn("The following problem parameters were not used: " + + str(unused_parameters), RuntimeWarning) + + for edata, mapping_for_condition in zip(edatas, parameter_mapping): + fill_in_parameters_for_condition( + edata, problem_parameters, scaled_parameters, + mapping_for_condition, amici_model) + + +def fill_in_parameters_for_condition( + edata: amici.ExpData, + problem_parameters: Dict[str, numbers.Number], + scaled_parameters: bool, + parameter_mapping: ParameterMappingForCondition, + amici_model: AmiciModel) -> None: + """Fill fixed and dynamic parameters into the edata for condition + (in-place). + + :param edata: + Experimental data object to fill parameters into. + :param problem_parameters: + Problem parameters as parameterId=>value dict. Only + parameters included here will be set. Remaining parameters will + be used as already set in `amici_model` and `edata`. + :param scaled_parameters: + If True, problem_parameters are assumed to be on the scale provided + in the parameter mapping. If False, they + are assumed to be in linear scale. + :param parameter_mapping: + Parameter mapping for current condition. + :param amici_model: + AMICI model + """ + map_sim_var = parameter_mapping.map_sim_var + scale_map_sim_var = parameter_mapping.scale_map_sim_var + map_preeq_fix = parameter_mapping.map_preeq_fix + scale_map_preeq_fix = parameter_mapping.scale_map_preeq_fix + map_sim_fix = parameter_mapping.map_sim_fix + scale_map_sim_fix = parameter_mapping.scale_map_sim_fix + + # Parameter mapping may contain parameter_ids as values, these *must* + # be replaced + + def _get_par(model_par, value, mapping): + """Replace parameter IDs in mapping dicts by values from + problem_parameters where necessary""" + if isinstance(value, str): + try: + # estimated parameter + return problem_parameters[value] + except KeyError: + # condition table overrides must have been handled already, + # e.g. by the PEtab parameter mapping, but parameters from + # InitialAssignments may still be present. + return _get_par(value, mapping[value], mapping) + if model_par in problem_parameters: + # user-provided + return problem_parameters[model_par] + # prevent nan-propagation in derivative + if np.isnan(value): + return 0.0 + # constant value + return value + + map_preeq_fix = {key: _get_par(key, val, map_preeq_fix) + for key, val in map_preeq_fix.items()} + map_sim_fix = {key: _get_par(key, val, map_sim_fix) + for key, val in map_sim_fix.items()} + map_sim_var = {key: _get_par(key, val, dict(map_sim_fix, **map_sim_var)) + for key, val in map_sim_var.items()} + + # If necessary, (un)scale parameters + if scaled_parameters: + unscale_parameters_dict(map_preeq_fix, scale_map_preeq_fix) + unscale_parameters_dict(map_sim_fix, scale_map_sim_fix) + if not scaled_parameters: + # We scale all parameters to the scale they are estimated on, and pass + # that information to amici via edata.{parameters,pscale}. + # The scaling is necessary to obtain correct derivatives. + scale_parameters_dict(map_sim_var, scale_map_sim_var) + # We can skip preequilibration parameters, because they are identical + # with simulation parameters, and only the latter are used from here + # on. + + ########################################################################## + # variable parameters and parameter scale + + # parameter list from mapping dict + parameters = [map_sim_var[par_id] + for par_id in amici_model.getParameterIds()] + + # scales list from mapping dict + scales = [petab_to_amici_scale(scale_map_sim_var[par_id]) + for par_id in amici_model.getParameterIds()] + + # plist + plist = [ + ip for ip, par_id in enumerate(amici_model.getParameterIds()) + if isinstance(parameter_mapping.map_sim_var[par_id], str) + ] + + if parameters: + edata.parameters = np.asarray(parameters, dtype=float) + + if scales: + edata.pscale = amici.parameterScalingFromIntVector(scales) + + if plist: + edata.plist = plist + + ########################################################################## + # fixed parameters preequilibration + if map_preeq_fix: + fixed_pars_preeq = [map_preeq_fix[par_id] + for par_id in amici_model.getFixedParameterIds()] + edata.fixedParametersPreequilibration = fixed_pars_preeq + + ########################################################################## + # fixed parameters simulation + if map_sim_fix: + fixed_pars_sim = [map_sim_fix[par_id] + for par_id in amici_model.getFixedParameterIds()] + edata.fixedParameters = fixed_pars_sim + + +def petab_to_amici_scale(petab_scale: str) -> int: + """Convert petab scale id to amici scale id.""" + if petab_scale == LIN: + return amici.ParameterScaling_none + if petab_scale == LOG10: + return amici.ParameterScaling_log10 + if petab_scale == LOG: + return amici.ParameterScaling_ln + raise ValueError(f"PEtab scale not recognized: {petab_scale}") + + +def amici_to_petab_scale(amici_scale: int) -> str: + """Convert amici scale id to petab scale id.""" + if amici_scale == amici.ParameterScaling_none: + return LIN + if amici_scale == amici.ParameterScaling_log10: + return LOG10 + if amici_scale == amici.ParameterScaling_ln: + return LOG + raise ValueError(f"AMICI scale not recognized: {amici_scale}") + + +def scale_parameter(value: numbers.Number, + petab_scale: str) -> numbers.Number: + """Bring parameter from linear scale to target scale. + + :param value: + Value to scale + :param petab_scale: + Target scale of ``value`` + + :return: + ``value`` on target scale + """ + if petab_scale == LIN: + return value + if petab_scale == LOG10: + return np.log10(value) + if petab_scale == LOG: + return np.log(value) + raise ValueError(f"Unknown parameter scale {petab_scale}. " + f"Must be from {(LIN, LOG, LOG10)}") + + +def unscale_parameter(value: numbers.Number, + petab_scale: str) -> numbers.Number: + """Bring parameter from scale to linear scale. + + :param value: + Value to scale + :param petab_scale: + Target scale of ``value`` + + :return: + ``value`` on linear scale + """ + if petab_scale == LIN: + return value + if petab_scale == LOG10: + return np.power(10, value) + if petab_scale == LOG: + return np.exp(value) + raise ValueError(f"Unknown parameter scale {petab_scale}. " + f"Must be from {(LIN, LOG, LOG10)}") + + +def scale_parameters_dict( + value_dict: Dict[Any, numbers.Number], + petab_scale_dict: Dict[Any, str]) -> None: + """ + Bring parameters from linear scale to target scale. + + Bring values in ``value_dict`` from linear scale to the scale + provided in ``petab_scale_dict`` (in-place). + Both arguments are expected to have the same length and matching keys. + + :param value_dict: + Values to scale + + :param petab_scale_dict: + Target scales of ``values`` + """ + if value_dict.keys() != petab_scale_dict.keys(): + raise AssertionError("Keys don't match.") + + for key, value in value_dict.items(): + value_dict[key] = scale_parameter(value, petab_scale_dict[key]) + + +def unscale_parameters_dict( + value_dict: Dict[Any, numbers.Number], + petab_scale_dict: Dict[Any, str]) -> None: + """ + Bring parameters from target scale to linear scale. + + Bring values in ``value_dict`` from linear scale to the scale + provided in ``petab_scale_dict`` (in-place). + Both arguments are expected to have the same length and matching keys. + + :param value_dict: + Values to scale + + :param petab_scale_dict: + Target scales of ``values`` + """ + if value_dict.keys() != petab_scale_dict.keys(): + raise AssertionError("Keys don't match.") + + for key, value in value_dict.items(): + value_dict[key] = unscale_parameter(value, petab_scale_dict[key]) diff --git a/python/sdist/amici/petab_import.py b/python/sdist/amici/petab_import.py deleted file mode 120000 index d524fe81f1..0000000000 --- a/python/sdist/amici/petab_import.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/petab_import.py \ No newline at end of file diff --git a/python/sdist/amici/petab_import.py b/python/sdist/amici/petab_import.py new file mode 100644 index 0000000000..61909340c3 --- /dev/null +++ b/python/sdist/amici/petab_import.py @@ -0,0 +1,830 @@ +""" +PEtab Import +------------ +Import a model in the :mod:`petab` (https://github.com/PEtab-dev/PEtab) format +into AMICI. +""" +import argparse +import importlib +import logging +import os +import re +import shutil +import tempfile +from _collections import OrderedDict +from itertools import chain +from pathlib import Path +from typing import Dict, List, Optional, Tuple, Union +from warnings import warn + +import libsbml +import pandas as pd +import petab +import sympy as sp +from petab.C import * +from petab.parameters import get_valid_parameters_for_parameter_table + +import amici +from amici.logging import get_logger, log_execution_time, set_log_level + +try: + from amici.petab_import_pysb import PysbPetabProblem, import_model_pysb +except ModuleNotFoundError: + # pysb not available + PysbPetabProblem = None + import_model_pysb = None + +logger = get_logger(__name__, logging.WARNING) + +# ID of model parameter that is to be added to SBML model to indicate +# preequilibration +PREEQ_INDICATOR_ID = 'preequilibration_indicator' + + +def _add_global_parameter(sbml_model: libsbml.Model, + parameter_id: str, + parameter_name: str = None, + constant: bool = False, + units: str = 'dimensionless', + value: float = 0.0) -> libsbml.Parameter: + """Add new global parameter to SBML model + + Arguments: + sbml_model: SBML model + parameter_id: ID of the new parameter + parameter_name: Name of the new parameter + constant: Is parameter constant? + units: SBML unit ID + value: parameter value + + Returns: + The created parameter + """ + if parameter_name is None: + parameter_name = parameter_id + + p = sbml_model.createParameter() + p.setId(parameter_id) + p.setName(parameter_name) + p.setConstant(constant) + p.setValue(value) + p.setUnits(units) + return p + + +def get_fixed_parameters( + petab_problem: petab.Problem +) -> List[str]: + """ + Determine, set and return fixed model parameters. + + Non-estimated parameters and parameters specified in the condition table + are turned into constants (unless they are overridden). + Only global SBML parameters are considered. Local parameters are ignored. + + :param petab_problem: + The PEtab problem instance + + :return: + List of IDs of parameters which are to be considered constant. + """ + # initial concentrations for species or initial compartment sizes in + # condition table will need to be turned into fixed parameters + + # if there is no initial assignment for that species, we'd need + # to create one. to avoid any naming collision right away, we don't + # allow that for now + + # we can't handle them yet + compartments = [ + col for col in petab_problem.condition_df + if petab_problem.sbml_model.getCompartment(col) is not None + ] + if compartments: + raise NotImplementedError("Can't handle initial compartment sizes " + "at the moment. Consider creating an " + f"initial assignment for {compartments}") + + # if we have a parameter table, all parameters that are allowed to be + # listed in the parameter table, but are not marked as estimated, can be + # turned in to AMICI constants + # due to legacy API, we might not always have a parameter table, though + fixed_parameters = set() + if petab_problem.parameter_df is not None: + all_parameters = get_valid_parameters_for_parameter_table( + model=petab_problem.model, + condition_df=petab_problem.condition_df, + observable_df=petab_problem.observable_df + if petab_problem.observable_df is not None + else pd.DataFrame(columns=petab.OBSERVABLE_DF_REQUIRED_COLS), + measurement_df=petab_problem.measurement_df + if petab_problem.measurement_df is not None + else pd.DataFrame(columns=petab.MEASUREMENT_DF_REQUIRED_COLS), + ) + estimated_parameters = petab_problem.parameter_df.index.values[ + petab_problem.parameter_df[ESTIMATE] == 1] + fixed_parameters = set(all_parameters) - set(estimated_parameters) + + sbml_model = petab_problem.sbml_model + condition_df = petab_problem.condition_df + + # Column names are model parameter IDs, compartment IDs or species IDs. + # Thereof, all parameters except for any overridden ones should be made + # constant. + # (Could potentially still be made constant, but leaving them might + # increase model reusability) + + # handle parameters in condition table + if condition_df is not None: + logger.debug(f'Condition table: {condition_df.shape}') + + # remove overridden parameters (`object`-type columns) + fixed_parameters.update( + p for p in condition_df.columns + # get rid of conditionName column + if p != CONDITION_NAME + # there is no parametric override + # TODO: could check if the final overriding parameter is estimated + # or not, but for now, we skip the parameter if there is any kind + # of overriding + if condition_df[p].dtype != 'O' + # p is a parameter + and sbml_model.getParameter(p) is not None + # but not a rule target + and sbml_model.getRuleByVariable(p) is None + ) + + # Ensure mentioned parameters exist in the model. Remove additional ones + # from list + for fixed_parameter in fixed_parameters.copy(): + # check global parameters + if not sbml_model.getParameter(fixed_parameter): + logger.warning(f"Parameter or species '{fixed_parameter}'" + " provided in condition table but not present in" + " model. Ignoring.") + fixed_parameters.remove(fixed_parameter) + + return list(sorted(fixed_parameters)) + + +def species_to_parameters(species_ids: List[str], + sbml_model: 'libsbml.Model') -> List[str]: + """ + Turn a SBML species into parameters and replace species references + inside the model instance. + + :param species_ids: + List of SBML species ID to convert to parameters with the same ID as + the replaced species. + + :param sbml_model: + SBML model to modify + + :return: + List of IDs of species which have been converted to parameters + """ + transformables = [] + + for species_id in species_ids: + species = sbml_model.getSpecies(species_id) + + if species.getHasOnlySubstanceUnits(): + logger.warning( + f"Ignoring {species.getId()} which has only substance units." + " Conversion not yet implemented.") + continue + + if math.isnan(species.getInitialConcentration()): + logger.warning( + f"Ignoring {species.getId()} which has no initial " + "concentration. Amount conversion not yet implemented.") + continue + + transformables.append(species_id) + + # Must not remove species while iterating over getListOfSpecies() + for species_id in transformables: + species = sbml_model.removeSpecies(species_id) + par = sbml_model.createParameter() + par.setId(species.getId()) + par.setName(species.getName()) + par.setConstant(True) + par.setValue(species.getInitialConcentration()) + par.setUnits(species.getUnits()) + + # Remove from reactants and products + for reaction in sbml_model.getListOfReactions(): + for species_id in transformables: + # loop, since removeX only removes one instance + while reaction.removeReactant(species_id): + # remove from reactants + pass + while reaction.removeProduct(species_id): + # remove from products + pass + while reaction.removeModifier(species_id): + # remove from modifiers + pass + + return transformables + + +def import_petab_problem( + petab_problem: petab.Problem, + model_output_dir: Union[str, Path, None] = None, + model_name: str = None, + force_compile: bool = False, + **kwargs) -> 'amici.Model': + """ + Import model from petab problem. + + :param petab_problem: + A petab problem containing all relevant information on the model. + + :param model_output_dir: + Directory to write the model code to. Will be created if doesn't + exist. Defaults to current directory. + + :param model_name: + Name of the generated model. If model file name was provided, + this defaults to the file name without extension, otherwise + the model ID will be used. + + :param force_compile: + Whether to compile the model even if the target folder is not empty, + or the model exists already. + + :param kwargs: + Additional keyword arguments to be passed to + :meth:`amici.sbml_import.SbmlImporter.sbml2amici`. + + :return: + The imported model. + """ + # generate folder and model name if necessary + if model_output_dir is None: + if PysbPetabProblem and isinstance(petab_problem, PysbPetabProblem): + raise ValueError("Parameter `model_output_dir` is required.") + + model_output_dir = \ + _create_model_output_dir_name(petab_problem.sbml_model) + else: + model_output_dir = os.path.abspath(model_output_dir) + + if PysbPetabProblem and isinstance(petab_problem, PysbPetabProblem) \ + and model_name is None: + model_name = petab_problem.pysb_model.name + elif model_name is None: + model_name = _create_model_name(model_output_dir) + + # create folder + if not os.path.exists(model_output_dir): + os.makedirs(model_output_dir) + + # check if compilation necessary + if force_compile or not _can_import_model(model_name, model_output_dir): + # check if folder exists + if os.listdir(model_output_dir) and not force_compile: + raise ValueError( + f"Cannot compile to {model_output_dir}: not empty. " + "Please assign a different target or set `force_compile`.") + + # remove folder if exists + if os.path.exists(model_output_dir): + shutil.rmtree(model_output_dir) + + logger.info(f"Compiling model {model_name} to {model_output_dir}.") + # compile the model + if PysbPetabProblem and isinstance(petab_problem, PysbPetabProblem): + import_model_pysb( + petab_problem, + model_name=model_name, + model_output_dir=model_output_dir, + **kwargs) + else: + import_model_sbml( + petab_problem=petab_problem, + model_name=model_name, + model_output_dir=model_output_dir, + **kwargs) + + # import model + model_module = amici.import_model_module(model_name, model_output_dir) + model = model_module.getModel() + + logger.info(f"Successfully loaded model {model_name} " + f"from {model_output_dir}.") + + return model + + +def _create_model_output_dir_name(sbml_model: 'libsbml.Model') -> Path: + """ + Find a folder for storing the compiled amici model. + If possible, use the sbml model id, otherwise create a random folder. + The folder will be located in the `amici_models` subfolder of the current + folder. + """ + BASE_DIR = Path("amici_models").absolute() + BASE_DIR.mkdir(exist_ok=True) + # try sbml model id + if sbml_model_id := sbml_model.getId(): + return BASE_DIR / sbml_model_id + + # create random folder name + return Path(tempfile.mkdtemp(dir=BASE_DIR)) + + +def _create_model_name(folder: Union[str, Path]) -> str: + """ + Create a name for the model. + Just re-use the last part of the folder. + """ + return os.path.split(os.path.normpath(folder))[-1] + + +def _can_import_model( + model_name: str, + model_output_dir: Union[str, Path] +) -> bool: + """ + Check whether a module of that name can already be imported. + """ + # try to import (in particular checks version) + try: + with amici.add_path(model_output_dir): + model_module = importlib.import_module(model_name) + except ModuleNotFoundError: + return False + + # no need to (re-)compile + return hasattr(model_module, "getModel") + + +@log_execution_time('Importing PEtab model', logger) +def import_model_sbml( + sbml_model: Union[str, Path, 'libsbml.Model'] = None, + condition_table: Optional[Union[str, Path, pd.DataFrame]] = None, + observable_table: Optional[Union[str, Path, pd.DataFrame]] = None, + measurement_table: Optional[Union[str, Path, pd.DataFrame]] = None, + petab_problem: petab.Problem = None, + model_name: Optional[str] = None, + model_output_dir: Optional[Union[str, Path]] = None, + verbose: Optional[Union[bool, int]] = True, + allow_reinit_fixpar_initcond: bool = True, + validate: bool = True, + **kwargs) -> amici.SbmlImporter: + """ + Create AMICI model from PEtab problem + + :param sbml_model: + PEtab SBML model or SBML file name. + Deprecated, pass ``petab_problem`` instead. + + :param condition_table: + PEtab condition table. If provided, parameters from there will be + turned into AMICI constant parameters (i.e. parameters w.r.t. which + no sensitivities will be computed). + Deprecated, pass ``petab_problem`` instead. + + :param observable_table: + PEtab observable table. Deprecated, pass ``petab_problem`` instead. + + :param measurement_table: + PEtab measurement table. Deprecated, pass ``petab_problem`` instead. + + :param petab_problem: + PEtab problem. + + :param model_name: + Name of the generated model. If model file name was provided, + this defaults to the file name without extension, otherwise + the SBML model ID will be used. + + :param model_output_dir: + Directory to write the model code to. Will be created if doesn't + exist. Defaults to current directory. + + :param verbose: + Print/log extra information. + + :param allow_reinit_fixpar_initcond: + See :class:`amici.ode_export.ODEExporter`. Must be enabled if initial + states are to be reset after preequilibration. + + :param validate: + Whether to validate the PEtab problem + + :param kwargs: + Additional keyword arguments to be passed to + :meth:`amici.sbml_import.SbmlImporter.sbml2amici`. + + :return: + The created :class:`amici.sbml_import.SbmlImporter` instance. + """ + from petab.models.sbml_model import SbmlModel + + set_log_level(logger, verbose) + + logger.info("Importing model ...") + + if any([sbml_model, condition_table, observable_table, measurement_table]): + warn("The `sbml_model`, `condition_table`, `observable_table`, and " + "`measurement_table` arguments are deprecated and will be " + "removed in a future version. Use `petab_problem` instead.", + DeprecationWarning, stacklevel=2) + if petab_problem: + raise ValueError("Must not pass a `petab_problem` argument in " + "combination with any of `sbml_model`, " + "`condition_table`, `observable_table`, or " + "`measurement_table`.") + + petab_problem = petab.Problem( + model=SbmlModel(sbml_model) + if isinstance(sbml_model, libsbml.Model) + else SbmlModel.from_file(sbml_model), + condition_df=petab.get_condition_df(condition_table), + observable_df=petab.get_observable_df(observable_table), + ) + + if petab_problem.observable_df is None: + raise NotImplementedError("PEtab import without observables table " + "is currently not supported.") + + assert isinstance(petab_problem.model, SbmlModel) + + if validate: + logger.info("Validating PEtab problem ...") + petab.lint_problem(petab_problem) + + # Model name from SBML ID or filename + if model_name is None: + if not (model_name := petab_problem.model.sbml_model.getId()): + if not isinstance(sbml_model, (str, Path)): + raise ValueError("No `model_name` was provided and no model " + "ID was specified in the SBML model.") + model_name = os.path.splitext(os.path.split(sbml_model)[-1])[0] + + if model_output_dir is None: + model_output_dir = os.path.join( + os.getcwd(), f"{model_name}-amici{amici.__version__}" + ) + + logger.info(f"Model name is '{model_name}'.\n" + f"Writing model code to '{model_output_dir}'.") + + # Create a copy, because it will be modified by SbmlImporter + sbml_doc = petab_problem.model.sbml_model.getSBMLDocument().clone() + sbml_model = sbml_doc.getModel() + + show_model_info(sbml_model) + + sbml_importer = amici.SbmlImporter(sbml_model) + sbml_model = sbml_importer.sbml + + allow_n_noise_pars = \ + not petab.lint.observable_table_has_nontrivial_noise_formula( + petab_problem.observable_df + ) + if petab_problem.measurement_df is not None and \ + petab.lint.measurement_table_has_timepoint_specific_mappings( + petab_problem.measurement_df, + allow_scalar_numeric_noise_parameters=allow_n_noise_pars + ): + raise ValueError( + 'AMICI does not support importing models with timepoint specific ' + 'mappings for noise or observable parameters. Please flatten ' + 'the problem and try again.' + ) + + if petab_problem.observable_df is not None: + observables, noise_distrs, sigmas = \ + get_observation_model(petab_problem.observable_df) + else: + observables = noise_distrs = sigmas = None + + logger.info(f'Observables: {len(observables)}') + logger.info(f'Sigmas: {len(sigmas)}') + + if len(sigmas) != len(observables): + raise AssertionError( + f'Number of provided observables ({len(observables)}) and sigmas ' + f'({len(sigmas)}) do not match.') + + # TODO: adding extra output parameters is currently not supported, + # so we add any output parameters to the SBML model. + # this should be changed to something more elegant + # + formulas = chain((val['formula'] for val in observables.values()), + sigmas.values()) + output_parameters = OrderedDict() + for formula in formulas: + # we want reproducible parameter ordering upon repeated import + free_syms = sorted(sp.sympify(formula).free_symbols, + key=lambda symbol: symbol.name) + for free_sym in free_syms: + sym = str(free_sym) + if sbml_model.getElementBySId(sym) is None and sym != 'time' \ + and sym not in observables: + output_parameters[sym] = None + logger.debug("Adding output parameters to model: " + f"{list(output_parameters.keys())}") + for par in output_parameters.keys(): + _add_global_parameter(sbml_model, par) + # + + # TODO: to parameterize initial states or compartment sizes, we currently + # need initial assignments. if they occur in the condition table, we + # create a new parameter initial_${startOrCompartmentID}. + # feels dirty and should be changed (see also #924) + # + + initial_states = [col for col in petab_problem.condition_df + if element_is_state(sbml_model, col)] + fixed_parameters = [] + if initial_states: + # add preequilibration indicator variable + # NOTE: would only be required if we actually have preequilibration + # adding it anyways. can be optimized-out later + if sbml_model.getParameter(PREEQ_INDICATOR_ID) is not None: + raise AssertionError("Model already has a parameter with ID " + f"{PREEQ_INDICATOR_ID}. Cannot handle " + "species and compartments in condition table " + "then.") + indicator = sbml_model.createParameter() + indicator.setId(PREEQ_INDICATOR_ID) + indicator.setName(PREEQ_INDICATOR_ID) + # Can only reset parameters after preequilibration if they are fixed. + fixed_parameters.append(PREEQ_INDICATOR_ID) + logger.debug("Adding preequilibration indicator " + f"constant {PREEQ_INDICATOR_ID}") + logger.debug(f"Adding initial assignments for {initial_states}") + for assignee_id in initial_states: + init_par_id_preeq = f"initial_{assignee_id}_preeq" + init_par_id_sim = f"initial_{assignee_id}_sim" + for init_par_id in [init_par_id_preeq, init_par_id_sim]: + if sbml_model.getElementBySId(init_par_id) is not None: + raise ValueError( + "Cannot create parameter for initial assignment " + f"for {assignee_id} because an entity named " + f"{init_par_id} exists already in the model.") + init_par = sbml_model.createParameter() + init_par.setId(init_par_id) + init_par.setName(init_par_id) + assignment = sbml_model.getInitialAssignment(assignee_id) + if assignment is None: + assignment = sbml_model.createInitialAssignment() + assignment.setSymbol(assignee_id) + else: + logger.debug('The SBML model has an initial assignment defined ' + f'for model entity {assignee_id}, but this entity ' + 'also has an initial value defined in the PEtab ' + 'condition table. The SBML initial assignment will ' + 'be overwritten to handle preequilibration and ' + 'initial values specified by the PEtab problem.') + formula = f'{PREEQ_INDICATOR_ID} * {init_par_id_preeq} ' \ + f'+ (1 - {PREEQ_INDICATOR_ID}) * {init_par_id_sim}' + math_ast = libsbml.parseL3Formula(formula) + assignment.setMath(math_ast) + # + + fixed_parameters.extend( + get_fixed_parameters( + petab_problem=petab_problem, + )) + + logger.debug(f"Fixed parameters are {fixed_parameters}") + logger.info(f"Overall fixed parameters: {len(fixed_parameters)}") + logger.info("Variable parameters: " + + str(len(sbml_model.getListOfParameters()) + - len(fixed_parameters))) + + # Create Python module from SBML model + sbml_importer.sbml2amici( + model_name=model_name, + output_dir=model_output_dir, + observables=observables, + constant_parameters=fixed_parameters, + sigmas=sigmas, + allow_reinit_fixpar_initcond=allow_reinit_fixpar_initcond, + noise_distributions=noise_distrs, + verbose=verbose, + **kwargs) + + return sbml_importer + + +# for backwards compatibility +import_model = import_model_sbml + + +def get_observation_model( + observable_df: pd.DataFrame, +) -> Tuple[Dict[str, Dict[str, str]], Dict[str, str], + Dict[str, Union[str, float]]]: + """ + Get observables, sigmas, and noise distributions from PEtab observation + table in a format suitable for + :meth:`amici.sbml_import.SbmlImporter.sbml2amici`. + + :param observable_df: + PEtab observables table + + :return: + Tuple of dicts with observables, noise distributions, and sigmas. + """ + + if observable_df is None: + return {}, {}, {} + + observables = {} + sigmas = {} + + nan_pat = r'^[nN]a[nN]$' + for _, observable in observable_df.iterrows(): + oid = str(observable.name) + # need to sanitize due to https://github.com/PEtab-dev/PEtab/issues/447 + name = re.sub(nan_pat, '', str(observable.get(OBSERVABLE_NAME, ''))) + formula_obs = re.sub(nan_pat, '', str(observable[OBSERVABLE_FORMULA])) + formula_noise = re.sub(nan_pat, '', str(observable[NOISE_FORMULA])) + observables[oid] = {'name': name, 'formula': formula_obs} + sigmas[oid] = formula_noise + + # PEtab does currently not allow observables in noiseFormula and AMICI + # cannot handle states in sigma expressions. Therefore, where possible, + # replace species occurring in error model definition by observableIds. + replacements = { + sp.sympify(observable['formula']): sp.Symbol(observable_id) + for observable_id, observable in observables.items() + } + for observable_id, formula in sigmas.items(): + repl = sp.sympify(formula).subs(replacements) + sigmas[observable_id] = str(repl) + + noise_distrs = petab_noise_distributions_to_amici(observable_df) + + return observables, noise_distrs, sigmas + + +def petab_noise_distributions_to_amici(observable_df: pd.DataFrame + ) -> Dict[str, str]: + """ + Map from the petab to the amici format of noise distribution + identifiers. + + :param observable_df: + PEtab observable table + + :return: + Dictionary of observable_id => AMICI noise-distributions + """ + amici_distrs = {} + for _, observable in observable_df.iterrows(): + amici_val = '' + + if OBSERVABLE_TRANSFORMATION in observable \ + and isinstance(observable[OBSERVABLE_TRANSFORMATION], str) \ + and observable[OBSERVABLE_TRANSFORMATION]: + amici_val += observable[OBSERVABLE_TRANSFORMATION] + '-' + + if NOISE_DISTRIBUTION in observable \ + and isinstance(observable[NOISE_DISTRIBUTION], str) \ + and observable[NOISE_DISTRIBUTION]: + amici_val += observable[NOISE_DISTRIBUTION] + else: + amici_val += 'normal' + amici_distrs[observable.name] = amici_val + + return amici_distrs + + +def petab_scale_to_amici_scale(scale_str: str) -> int: + """Convert PEtab parameter scaling string to AMICI scaling integer""" + + if scale_str == petab.LIN: + return amici.ParameterScaling_none + if scale_str == petab.LOG: + return amici.ParameterScaling_ln + if scale_str == petab.LOG10: + return amici.ParameterScaling_log10 + + raise ValueError(f"Invalid parameter scale {scale_str}") + + +def show_model_info(sbml_model: 'libsbml.Model'): + """Log some model quantities""" + + logger.info(f'Species: {len(sbml_model.getListOfSpecies())}') + logger.info('Global parameters: ' + + str(len(sbml_model.getListOfParameters()))) + logger.info(f'Reactions: {len(sbml_model.getListOfReactions())}') + + +def element_is_state(sbml_model: libsbml.Model, sbml_id: str) -> bool: + """Does the element with ID `sbml_id` correspond to a state variable? + """ + if sbml_model.getCompartment(sbml_id) is not None: + return True + if sbml_model.getSpecies(sbml_id) is not None: + return True + if (rule := sbml_model.getRuleByVariable(sbml_id)) is not None \ + and rule.getTypeCode() == libsbml.SBML_RATE_RULE: + return True + + return False + + +def _parse_cli_args(): + """ + Parse command line arguments + + :return: + Parsed CLI arguments from :mod:`argparse`. + """ + + parser = argparse.ArgumentParser( + description='Import PEtab-format model into AMICI.') + + # General options: + parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', + help='More verbose output') + parser.add_argument('-o', '--output-dir', dest='model_output_dir', + help='Name of the model directory to create') + parser.add_argument('--no-compile', action='store_false', + dest='compile', + help='Only generate model code, do not compile') + parser.add_argument('--flatten', dest='flatten', default=False, + action='store_true', + help='Flatten measurement specific overrides of ' + 'observable and noise parameters') + parser.add_argument('--no-sensitivities', dest='generate_sensitivity_code', + default=True, action='store_false', + help='Skip generation of sensitivity code') + + # Call with set of files + parser.add_argument('-s', '--sbml', dest='sbml_file_name', + help='SBML model filename') + parser.add_argument('-m', '--measurements', dest='measurement_file_name', + help='Measurement table') + parser.add_argument('-c', '--conditions', dest='condition_file_name', + help='Conditions table') + parser.add_argument('-p', '--parameters', dest='parameter_file_name', + help='Parameter table') + parser.add_argument('-b', '--observables', dest='observable_file_name', + help='Observable table') + + parser.add_argument('-y', '--yaml', dest='yaml_file_name', + help='PEtab YAML problem filename') + + parser.add_argument('-n', '--model-name', dest='model_name', + help='Name of the python module generated for the ' + 'model') + + args = parser.parse_args() + + if not args.yaml_file_name \ + and not all((args.sbml_file_name, args.condition_file_name, + args.observable_file_name)): + parser.error('When not specifying a model name or YAML file, then ' + 'SBML, condition and observable file must be specified') + + return args + + +def main(): + """ + Command line interface to import a model in the PEtab + (https://github.com/PEtab-dev/PEtab/) format into AMICI. + """ + args = _parse_cli_args() + + if args.yaml_file_name: + pp = petab.Problem.from_yaml(args.yaml_file_name) + else: + pp = petab.Problem.from_files( + sbml_file=args.sbml_file_name, + condition_file=args.condition_file_name, + measurement_file=args.measurement_file_name, + parameter_file=args.parameter_file_name, + observable_files=args.observable_file_name) + + # Check for valid PEtab before potentially modifying it + petab.lint_problem(pp) + + if args.flatten: + petab.flatten_timepoint_specific_output_overrides(pp) + + import_model(model_name=args.model_name, + sbml_model=pp.sbml_model, + condition_table=pp.condition_df, + observable_table=pp.observable_df, + measurement_table=pp.measurement_df, + model_output_dir=args.model_output_dir, + compile=args.compile, + generate_sensitivity_code=args.generate_sensitivity_code, + verbose=args.verbose, + validate=False) + + +if __name__ == '__main__': + main() diff --git a/python/sdist/amici/petab_import_pysb.py b/python/sdist/amici/petab_import_pysb.py deleted file mode 120000 index 01591fc62e..0000000000 --- a/python/sdist/amici/petab_import_pysb.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/petab_import_pysb.py \ No newline at end of file diff --git a/python/sdist/amici/petab_import_pysb.py b/python/sdist/amici/petab_import_pysb.py new file mode 100644 index 0000000000..02e5aca038 --- /dev/null +++ b/python/sdist/amici/petab_import_pysb.py @@ -0,0 +1,388 @@ +""" +PySB-PEtab Import +----------------- +Import a model in the PySB-adapted :mod:`petab` +(https://github.com/PEtab-dev/PEtab) format into AMICI. +""" + +import logging +import os +from itertools import chain +from pathlib import Path +from typing import Dict, Iterable, Optional, Tuple, Union + +import libsbml +import petab +import pysb +import sympy as sp +from petab.C import (CONDITION_FILES, CONDITION_NAME, FORMAT_VERSION, + MEASUREMENT_FILES, NOISE_FORMULA, OBSERVABLE_FILES, + OBSERVABLE_FORMULA, PARAMETER_FILE, SBML_FILES, + VISUALIZATION_FILES) + +from . import petab_import +from .logging import get_logger, log_execution_time, set_log_level + +logger = get_logger(__name__, logging.WARNING) + + +class PysbPetabProblem(petab.Problem): + """Representation of a PySB-model-based PEtab problem + + This class extends :class:`petab.Problem` with a PySB model. + The model is augmented with the observation model based on the PEtab + observable table. + For now, a dummy SBML model is created which allows used the existing + SBML-PEtab API. + + :ivar pysb_model: + PySB model instance from of this PEtab problem. + + """ + + def __init__(self, pysb_model: 'pysb.Model' = None, *args, **kwargs): + """ + Constructor + + :param pysb_model: PySB model instance for this PEtab problem + :param args: See :meth:`petab.Problem.__init__` + :param kwargs: See :meth:`petab.Problem.__init__` + """ + flatten = kwargs.pop('flatten', False) + super().__init__(*args, **kwargs) + if flatten: + petab.flatten_timepoint_specific_output_overrides(self) + + self.pysb_model: 'pysb.Model' = pysb_model + self._add_observation_model() + + if self.pysb_model is not None: + self.sbml_document, self.sbml_model = \ + create_dummy_sbml( + self.pysb_model, + observable_ids=self.observable_df.index.values + if self.observable_df is not None else None + ) + + def _add_observation_model(self): + """Extend PySB model by observation model as defined in the PEtab + observables table""" + + # add any required output parameters + local_syms = {sp.Symbol.__str__(comp): comp for comp in + self.pysb_model.components if + isinstance(comp, sp.Symbol)} + for formula in [*self.observable_df[OBSERVABLE_FORMULA], + *self.observable_df[NOISE_FORMULA]]: + sym = sp.sympify(formula, locals=local_syms) + for s in sym.free_symbols: + if not isinstance(s, pysb.Component): + p = pysb.Parameter(str(s), 1.0, _export=False) + self.pysb_model.add_component(p) + local_syms[sp.Symbol.__str__(p)] = p + + # add observables and sigmas to pysb model + for (observable_id, observable_formula, noise_formula) \ + in zip(self.observable_df.index, + self.observable_df[OBSERVABLE_FORMULA], + self.observable_df[NOISE_FORMULA]): + obs_symbol = sp.sympify(observable_formula, locals=local_syms) + if observable_id in self.pysb_model.expressions.keys(): + obs_expr = self.pysb_model.expressions[observable_id] + else: + obs_expr = pysb.Expression(observable_id, obs_symbol, + _export=False) + self.pysb_model.add_component(obs_expr) + local_syms[observable_id] = obs_expr + + sigma_id = f"{observable_id}_sigma" + sigma_symbol = sp.sympify( + noise_formula, + locals=local_syms + ) + sigma_expr = pysb.Expression(sigma_id, sigma_symbol, _export=False) + self.pysb_model.add_component(sigma_expr) + local_syms[sigma_id] = sigma_expr + + @staticmethod + def from_files( + condition_file: + Union[str, Path, Iterable[Union[str, Path]]] = None, + measurement_file: + Union[str, Path, Iterable[Union[str, Path]]] = None, + parameter_file: + Union[str, Path, Iterable[Union[str, Path]]] = None, + visualization_files: + Union[str, Path, Iterable[Union[str, Path]]] = None, + observable_files: + Union[str, Path, Iterable[Union[str, Path]]] = None, + pysb_model_file: Union[str, Path] = None, + flatten: bool = False + ) -> 'PysbPetabProblem': + """ + Factory method to load model and tables from files. + + :param condition_file: + PEtab condition table + + :param measurement_file: + PEtab measurement table + + :param parameter_file: + PEtab parameter table + + :param visualization_files: + PEtab visualization tables + + :param observable_files: + PEtab observables tables + + :param pysb_model_file: + PySB model file + + :param flatten: + Flatten the petab problem + + :return: + Petab Problem + """ + + condition_df = measurement_df = parameter_df = visualization_df = None + observable_df = None + + if condition_file: + condition_df = petab.conditions.get_condition_df(condition_file) + + if measurement_file: + # If there are multiple tables, we will merge them + measurement_df = petab.core.concat_tables( + measurement_file, petab.measurements.get_measurement_df) + + if parameter_file: + parameter_df = petab.parameters.get_parameter_df(parameter_file) + + if visualization_files: + # If there are multiple tables, we will merge them + visualization_df = petab.core.concat_tables( + visualization_files, petab.core.get_visualization_df) + + if observable_files: + # If there are multiple tables, we will merge them + observable_df = petab.core.concat_tables( + observable_files, petab.observables.get_observable_df) + from amici.pysb_import import pysb_model_from_path + return PysbPetabProblem( + pysb_model=pysb_model_from_path( + pysb_model_file=pysb_model_file), + condition_df=condition_df, + measurement_df=measurement_df, + parameter_df=parameter_df, + observable_df=observable_df, + visualization_df=visualization_df, + flatten=flatten + ) + + @staticmethod + def from_yaml(yaml_config: Union[Dict, Path, str], + flatten: bool = False) -> 'PysbPetabProblem': + """ + Factory method to load model and tables as specified by YAML file. + + NOTE: The PySB model is currently expected in the YAML file under + ``sbml_files``. + + :param yaml_config: + PEtab configuration as dictionary or YAML file name + + :param flatten: + Flatten the petab problem + + :return: + Petab Problem + """ + from petab.yaml import (load_yaml, is_composite_problem, + assert_single_condition_and_sbml_file) + if isinstance(yaml_config, (str, Path)): + path_prefix = os.path.dirname(yaml_config) + yaml_config = load_yaml(yaml_config) + else: + path_prefix = "" + + if is_composite_problem(yaml_config): + raise ValueError('petab.Problem.from_yaml() can only be used for ' + 'yaml files comprising a single model. ' + 'Consider using ' + 'petab.CompositeProblem.from_yaml() instead.') + + if yaml_config[FORMAT_VERSION] != petab.__format_version__: + raise ValueError("Provided PEtab files are of unsupported version" + f"{yaml_config[FORMAT_VERSION]}. Expected " + f"{petab.__format_version__}.") + + problem0 = yaml_config['problems'][0] + + assert_single_condition_and_sbml_file(problem0) + + if isinstance(yaml_config[PARAMETER_FILE], list): + parameter_file = [ + os.path.join(path_prefix, f) + for f in yaml_config[PARAMETER_FILE] + ] + else: + parameter_file = os.path.join( + path_prefix, yaml_config[PARAMETER_FILE]) + + return PysbPetabProblem.from_files( + pysb_model_file=os.path.join( + path_prefix, problem0[SBML_FILES][0]), + measurement_file=[os.path.join(path_prefix, f) + for f in problem0[MEASUREMENT_FILES]], + condition_file=os.path.join( + path_prefix, problem0[CONDITION_FILES][0]), + parameter_file=parameter_file, + visualization_files=[ + os.path.join(path_prefix, f) + for f in problem0.get(VISUALIZATION_FILES, [])], + observable_files=[ + os.path.join(path_prefix, f) + for f in problem0.get(OBSERVABLE_FILES, [])], + flatten=flatten + ) + + +def create_dummy_sbml( + pysb_model: 'pysb.Model', + observable_ids: Optional[Iterable[str]] = None +) -> Tuple['libsbml.Model', 'libsbml.SBMLDocument']: + """Create SBML dummy model for to use PySB models with PEtab. + + Model must at least contain PEtab problem parameter and noise parameters + for observables. + + :param pysb_model: PySB model + :param observable_ids: Observable IDs + :return: A dummy SBML model and document. + """ + + import libsbml + + document = libsbml.SBMLDocument(3, 1) + dummy_sbml_model = document.createModel() + dummy_sbml_model.setTimeUnits("second") + dummy_sbml_model.setExtentUnits("mole") + dummy_sbml_model.setSubstanceUnits('mole') + + # mandatory if there are species + c = dummy_sbml_model.createCompartment() + c.setId('dummy_compartment') + c.setConstant(False) + + # parameters are required for parameter mapping + for parameter in pysb_model.parameters: + p = dummy_sbml_model.createParameter() + p.setId(parameter.name) + p.setConstant(True) + p.setValue(0.0) + + # noise parameters are required for every observable + for observable_id in observable_ids: + p = dummy_sbml_model.createParameter() + p.setId(f"noiseParameter1_{observable_id}") + p.setConstant(True) + p.setValue(0.0) + + # pysb observables and expressions are required in case they occur in + # the observableFormula or noiseFormula. + # as this code is only temporary and not performance-critical, we just add + # all of them. we just need an sbml entity with the same ID. sbml species + # seem to be the simplest, as parameters would interfere with parameter + # mapping later on + for component in chain(pysb_model.expressions, pysb_model.observables): + s = dummy_sbml_model.createSpecies() + s.setId(component.name) + s.setInitialAmount(0.0) + s.setHasOnlySubstanceUnits(False) + s.setBoundaryCondition(False) + s.setCompartment('dummy_compartment') + s.setConstant(False) + + return document, dummy_sbml_model + + +@log_execution_time('Importing PEtab model', logger) +def import_model_pysb( + petab_problem: PysbPetabProblem, + model_output_dir: Optional[Union[str, Path]] = None, + verbose: Optional[Union[bool, int]] = True, + model_name: Optional[str] = None, + **kwargs +) -> None: + """ + Create AMICI model from PySB-PEtab problem + + :param petab_problem: + PySB PEtab problem + + :param model_output_dir: + Directory to write the model code to. Will be created if doesn't + exist. Defaults to current directory. + + :param verbose: + Print/log extra information. + + :param model_name: + Name of the generated model module + + :param kwargs: + Additional keyword arguments to be passed to + :meth:`amici.pysb_import.pysb2amici`. + """ + set_log_level(logger, verbose) + + logger.info("Importing model ...") + + observable_table = petab_problem.observable_df + pysb_model = petab_problem.pysb_model + + # For pysb, we only allow parameters in the condition table + # those must be pysb model parameters (either natively, or output + # parameters from measurement or condition table that have been added in + # PysbPetabProblem) + model_parameters = [p.name for p in pysb_model.parameters] + for x in petab_problem.condition_df.columns: + if x == CONDITION_NAME: + continue + + if x not in model_parameters: + raise NotImplementedError( + "For PySB PEtab import, only model parameters, but no states " + "or compartments are allowed in the condition table." + f"Offending column: {x}" + ) + + constant_parameters = petab_import.get_fixed_parameters( + petab_problem) + + if observable_table is None: + observables = None + sigmas = None + noise_distrs = None + else: + observables = [expr.name for expr in pysb_model.expressions + if expr.name in observable_table.index] + + sigmas = {obs_id: f"{obs_id}_sigma" for obs_id in observables} + + noise_distrs = petab_import.petab_noise_distributions_to_amici( + observable_table) + + from amici.pysb_import import pysb2amici + pysb2amici(model=pysb_model, + output_dir=model_output_dir, + model_name=model_name, + verbose=True, + observables=observables, + sigmas=sigmas, + constant_parameters=constant_parameters, + noise_distributions=noise_distrs, + **kwargs) diff --git a/python/sdist/amici/petab_objective.py b/python/sdist/amici/petab_objective.py deleted file mode 120000 index 9d08244e11..0000000000 --- a/python/sdist/amici/petab_objective.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/petab_objective.py \ No newline at end of file diff --git a/python/sdist/amici/petab_objective.py b/python/sdist/amici/petab_objective.py new file mode 100644 index 0000000000..eb0f7ec6cf --- /dev/null +++ b/python/sdist/amici/petab_objective.py @@ -0,0 +1,808 @@ +""" +PEtab Objective +--------------- +Functionality related to running simulations or evaluating the objective +function as defined by a PEtab problem +""" + +import copy +import logging +import numbers +from typing import (List, Sequence, Optional, Dict, Tuple, Union, Any, + Collection, Iterator) + +import amici +from amici.sbml_import import get_species_initial +import libsbml +import numpy as np +import pandas as pd +import petab +import sympy as sp +from petab.C import * # noqa: F403 + +from . import AmiciModel, AmiciExpData +from .logging import get_logger, log_execution_time +from .petab_import import PREEQ_INDICATOR_ID, element_is_state +from .parameter_mapping import ( + fill_in_parameters, ParameterMappingForCondition, ParameterMapping) + +logger = get_logger(__name__) + + +# string constant definitions +LLH = 'llh' +SLLH = 'sllh' +FIM = 'fim' +S2LLH = 's2llh' +RES = 'res' +SRES = 'sres' +RDATAS = 'rdatas' + + +@log_execution_time('Simulating PEtab model', logger) +def simulate_petab( + petab_problem: petab.Problem, + amici_model: AmiciModel, + solver: Optional[amici.Solver] = None, + problem_parameters: Optional[Dict[str, float]] = None, + simulation_conditions: Union[pd.DataFrame, Dict] = None, + edatas: List[AmiciExpData] = None, + parameter_mapping: ParameterMapping = None, + scaled_parameters: Optional[bool] = False, + log_level: int = logging.WARNING, + num_threads: int = 1, + failfast: bool = True, +) -> Dict[str, Any]: + """Simulate PEtab model. + + :param petab_problem: + PEtab problem to work on. + :param amici_model: + AMICI Model assumed to be compatible with ``petab_problem``. + :param solver: + An AMICI solver. Will use default options if None. + :param problem_parameters: + Run simulation with these parameters. If None, PEtab `nominalValues` + will be used). To be provided as dict, mapping PEtab problem + parameters to SBML IDs. + :param simulation_conditions: + Result of :py:func:`petab.get_simulation_conditions`. Can be provided + to save time if this has be obtained before. + Not required if ``edatas`` and ``parameter_mapping`` are provided. + :param edatas: + Experimental data. Parameters are inserted in-place for simulation. + :param parameter_mapping: + Optional precomputed PEtab parameter mapping for efficiency, as + generated by :py:func:`create_parameter_mapping`. + :param scaled_parameters: + If ``True``, ``problem_parameters`` are assumed to be on the scale + provided in the PEtab parameter table and will be unscaled. + If ``False``, they are assumed to be in linear scale. + :param log_level: + Log level, see :mod:`amici.logging` module. + :param num_threads: + Number of threads to use for simulating multiple conditions + (only used if compiled with OpenMP). + :param failfast: + Returns as soon as an integration failure is encountered, skipping + any remaining simulations. + + :return: + Dictionary of + + * cost function value (``LLH``), + * list of :class:`amici.amici.ReturnData` (``RDATAS``), + + corresponding to the different simulation conditions. + For ordering of simulation conditions, see + :meth:`petab.Problem.get_simulation_conditions_from_measurement_df`. + """ + logger.setLevel(log_level) + + if solver is None: + solver = amici_model.getSolver() + + # Get parameters + if problem_parameters is None: + # Use PEtab nominal values as default + problem_parameters = {t.Index: getattr(t, NOMINAL_VALUE) for t in + petab_problem.parameter_df.itertuples()} + if scaled_parameters: + raise NotImplementedError( + "scaled_parameters=True in combination with " + "problem_parameters=None is currently not supported.") + + # number of amici simulations will be number of unique + # (preequilibrationConditionId, simulationConditionId) pairs. + # Can be optimized by checking for identical condition vectors. + if simulation_conditions is None and parameter_mapping is None \ + and edatas is None: + simulation_conditions = \ + petab_problem.get_simulation_conditions_from_measurement_df() + + # Get parameter mapping + if parameter_mapping is None: + parameter_mapping = create_parameter_mapping( + petab_problem=petab_problem, + simulation_conditions=simulation_conditions, + scaled_parameters=scaled_parameters, + amici_model=amici_model) + + # Get edatas + if edatas is None: + # Generate ExpData with all condition-specific information + edatas = create_edatas( + amici_model=amici_model, + petab_problem=petab_problem, + simulation_conditions=simulation_conditions) + + # Fill parameters in ExpDatas (in-place) + fill_in_parameters( + edatas=edatas, + problem_parameters=problem_parameters, + scaled_parameters=scaled_parameters, + parameter_mapping=parameter_mapping, + amici_model=amici_model) + + # Simulate + rdatas = amici.runAmiciSimulations( + amici_model, solver, edata_list=edatas, + num_threads=num_threads, failfast=failfast) + + # Compute total llh + llh = sum(rdata['llh'] for rdata in rdatas) + + # Log results + sim_cond = petab_problem.get_simulation_conditions_from_measurement_df() + for i, rdata in enumerate(rdatas): + sim_cond_id = "N/A" if sim_cond.empty else sim_cond.iloc[i, :].values + logger.debug( + f"Condition: {sim_cond_id}, status: {rdata['status']}, " + f"llh: {rdata['llh']}" + ) + + return { + LLH: llh, + RDATAS: rdatas + } + + +def create_parameterized_edatas( + amici_model: AmiciModel, + petab_problem: petab.Problem, + problem_parameters: Dict[str, numbers.Number], + scaled_parameters: bool = False, + parameter_mapping: ParameterMapping = None, + simulation_conditions: Union[pd.DataFrame, Dict] = None, +) -> List[amici.ExpData]: + """Create list of :class:amici.ExpData objects with parameters filled in. + + :param amici_model: + AMICI Model assumed to be compatible with ``petab_problem``. + :param petab_problem: + PEtab problem to work on. + :param problem_parameters: + Run simulation with these parameters. If None, PEtab `nominalValues` + will be used). To be provided as dict, mapping PEtab problem + parameters to SBML IDs. + :param scaled_parameters: + If ``True``, ``problem_parameters`` are assumed to be on the scale + provided in the PEtab parameter table and will be unscaled. + If ``False``, they are assumed to be in linear scale. + :param parameter_mapping: + Optional precomputed PEtab parameter mapping for efficiency, as + generated by :func:`create_parameter_mapping`. + :param simulation_conditions: + Result of :func:`petab.get_simulation_conditions`. Can be provided to + save time if this has been obtained before. + + :return: + List with one :class:`amici.amici.ExpData` per simulation condition, + with filled in timepoints, data and parameters. + """ + # number of amici simulations will be number of unique + # (preequilibrationConditionId, simulationConditionId) pairs. + # Can be optimized by checking for identical condition vectors. + if simulation_conditions is None: + simulation_conditions = \ + petab_problem.get_simulation_conditions_from_measurement_df() + + # Get parameter mapping + if parameter_mapping is None: + parameter_mapping = create_parameter_mapping( + petab_problem=petab_problem, + simulation_conditions=simulation_conditions, + scaled_parameters=scaled_parameters, + amici_model=amici_model) + + # Generate ExpData with all condition-specific information + edatas = create_edatas( + amici_model=amici_model, + petab_problem=petab_problem, + simulation_conditions=simulation_conditions) + + # Fill parameters in ExpDatas (in-place) + fill_in_parameters( + edatas=edatas, + problem_parameters=problem_parameters, + scaled_parameters=scaled_parameters, + parameter_mapping=parameter_mapping, + amici_model=amici_model) + + return edatas + + +def create_parameter_mapping( + petab_problem: petab.Problem, + simulation_conditions: Union[pd.DataFrame, List[Dict]], + scaled_parameters: bool, + amici_model: AmiciModel, + **parameter_mapping_kwargs, +) -> ParameterMapping: + """Generate AMICI specific parameter mapping. + + :param petab_problem: + PEtab problem + :param simulation_conditions: + Result of :func:`petab.get_simulation_conditions`. Can be provided to + save time if this has been obtained before. + :param scaled_parameters: + If ``True``, problem_parameters are assumed to be on the scale provided + in the PEtab parameter table and will be unscaled. If ``False``, they + are assumed to be in linear scale. + :param amici_model: + AMICI model. + :param parameter_mapping_kwargs: + Optional keyword arguments passed to + :func:`petab.get_optimization_to_simulation_parameter_mapping`. + To allow changing fixed PEtab problem parameters (``estimate=0``), + use ``fill_fixed_parameters=False``. + :return: + List of the parameter mappings. + """ + if simulation_conditions is None: + simulation_conditions = \ + petab_problem.get_simulation_conditions_from_measurement_df() + if isinstance(simulation_conditions, list): + simulation_conditions = pd.DataFrame(data=simulation_conditions) + + # Because AMICI globalizes all local parameters during model import, + # we need to do that here as well to prevent parameter mapping errors + # (PEtab does currently not care about SBML LocalParameters) + if petab_problem.sbml_document: + converter_config = libsbml.SBMLLocalParameterConverter() \ + .getDefaultProperties() + petab_problem.sbml_document.convert(converter_config) + else: + logger.debug("No petab_problem.sbml_document is set. Cannot convert " + "SBML LocalParameters. If the model contains " + "LocalParameters, parameter mapping will fail.") + + default_parameter_mapping_kwargs = { + "warn_unmapped": False, + "scaled_parameters": scaled_parameters, + "allow_timepoint_specific_numeric_noise_parameters": + not petab.lint.observable_table_has_nontrivial_noise_formula( + petab_problem.observable_df), + } + if parameter_mapping_kwargs is None: + parameter_mapping_kwargs = {} + + prelim_parameter_mapping = \ + petab.get_optimization_to_simulation_parameter_mapping( + condition_df=petab_problem.condition_df, + measurement_df=petab_problem.measurement_df, + parameter_df=petab_problem.parameter_df, + observable_df=petab_problem.observable_df, + model=petab_problem.model, + **dict(default_parameter_mapping_kwargs, + **parameter_mapping_kwargs) + ) + + parameter_mapping = ParameterMapping() + for (_, condition), prelim_mapping_for_condition in \ + zip(simulation_conditions.iterrows(), prelim_parameter_mapping): + mapping_for_condition = create_parameter_mapping_for_condition( + prelim_mapping_for_condition, condition, petab_problem, + amici_model) + parameter_mapping.append(mapping_for_condition) + + return parameter_mapping + + +def create_parameter_mapping_for_condition( + parameter_mapping_for_condition: petab.ParMappingDictQuadruple, + condition: Union[pd.Series, Dict], + petab_problem: petab.Problem, + amici_model: AmiciModel +) -> ParameterMappingForCondition: + """Generate AMICI specific parameter mapping for condition. + + :param parameter_mapping_for_condition: + Preliminary parameter mapping for condition. + :param condition: + :class:`pandas.DataFrame` row with ``preequilibrationConditionId`` and + ``simulationConditionId``. + :param petab_problem: + Underlying PEtab problem. + :param amici_model: + AMICI model. + + :return: + The parameter and parameter scale mappings, for fixed + preequilibration, fixed simulation, and variable simulation + parameters, and then the respective scalings. + """ + (condition_map_preeq, condition_map_sim, condition_scale_map_preeq, + condition_scale_map_sim) = parameter_mapping_for_condition + logger.debug(f"PEtab mapping: {parameter_mapping_for_condition}") + + if len(condition_map_preeq) != len(condition_scale_map_preeq) \ + or len(condition_map_sim) != len(condition_scale_map_sim): + raise AssertionError("Number of parameters and number of parameter " + "scales do not match.") + if len(condition_map_preeq) \ + and len(condition_map_preeq) != len(condition_map_sim): + logger.debug(f"Preequilibration parameter map: {condition_map_preeq}") + logger.debug(f"Simulation parameter map: {condition_map_sim}") + raise AssertionError("Number of parameters for preequilbration " + "and simulation do not match.") + + ########################################################################## + # initial states + # Initial states have been set during model import based on the SBML model. + # If initial states were overwritten in the PEtab condition table, they are + # applied here. + # During model generation, parameters for initial concentrations and + # respective initial assignments have been created for the + # relevant species, here we add these parameters to the parameter mapping. + # In absence of preequilibration this could also be handled via + # ExpData.x0, but in the case of preequilibration this would not allow for + # resetting initial states. + + states_in_condition_table = [ + col for col in petab_problem.condition_df + if element_is_state(petab_problem.sbml_model, col) + ] + if states_in_condition_table: + # set indicator fixed parameter for preeq + # (we expect here, that this parameter was added during import and + # that it was not added by the user with a different meaning...) + if condition_map_preeq: + condition_map_preeq[PREEQ_INDICATOR_ID] = 1.0 + condition_scale_map_preeq[PREEQ_INDICATOR_ID] = LIN + + condition_map_sim[PREEQ_INDICATOR_ID] = 0.0 + condition_scale_map_sim[PREEQ_INDICATOR_ID] = LIN + + def _set_initial_state(condition_id, element_id, init_par_id, + par_map, scale_map): + value = petab.to_float_if_float( + petab_problem.condition_df.loc[condition_id, element_id]) + if pd.isna(value): + element = petab_problem.sbml_model.getElementBySId(element_id) + type_code = element.getTypeCode() + initial_assignment = petab_problem.sbml_model\ + .getInitialAssignmentBySymbol(element_id) + if initial_assignment: + initial_assignment = sp.sympify( + libsbml.formulaToL3String(initial_assignment.getMath()) + ) + if type_code == libsbml.SBML_SPECIES: + value = get_species_initial(element) \ + if initial_assignment is None else initial_assignment + elif type_code == libsbml.SBML_PARAMETER: + value = element.getValue()\ + if initial_assignment is None else initial_assignment + elif type_code == libsbml.SBML_COMPARTMENT: + value = element.getSize()\ + if initial_assignment is None else initial_assignment + else: + raise NotImplementedError( + f"Don't know what how to handle {element_id} in " + "condition table.") + + try: + value = float(value) + except (ValueError, TypeError): + if sp.nsimplify(value).is_Atom: + # Get rid of multiplication with one + value = sp.nsimplify(value) + else: + raise NotImplementedError( + "Cannot handle non-trivial initial state " + f"expression for {element_id}: {value}") + # this should be a parameter ID + value = str(value) + logger.debug(f'The species {element_id} has no initial value ' + f'defined for the condition {condition_id} in ' + 'the PEtab conditions table. The initial value is ' + f'now set to {value}, which is the initial value ' + 'defined in the SBML model.') + par_map[init_par_id] = value + if isinstance(value, float): + # numeric initial state + scale_map[init_par_id] = petab.LIN + else: + # parametric initial state + scale_map[init_par_id] = \ + petab_problem.parameter_df[PARAMETER_SCALE]\ + .get(value, petab.LIN) + + for element_id in states_in_condition_table: + # for preequilibration + init_par_id = f'initial_{element_id}_preeq' + if condition.get(PREEQUILIBRATION_CONDITION_ID): + condition_id = condition[PREEQUILIBRATION_CONDITION_ID] + _set_initial_state( + condition_id, element_id, init_par_id, condition_map_preeq, + condition_scale_map_preeq) + else: + # need to set dummy value for preeq parameter anyways, as it + # is expected below (set to 0, not nan, because will be + # multiplied with indicator variable in initial assignment) + condition_map_sim[init_par_id] = 0.0 + condition_scale_map_sim[init_par_id] = LIN + + # for simulation + condition_id = condition[SIMULATION_CONDITION_ID] + init_par_id = f'initial_{element_id}_sim' + _set_initial_state( + condition_id, element_id, init_par_id, condition_map_sim, + condition_scale_map_sim) + + ########################################################################## + # separate fixed and variable AMICI parameters, because we may have + # different fixed parameters for preeq and sim condition, but we cannot + # have different variable parameters. without splitting, + # merge_preeq_and_sim_pars_condition below may fail. + # TODO: This can be done already in parameter mapping creation. + variable_par_ids = amici_model.getParameterIds() + fixed_par_ids = amici_model.getFixedParameterIds() + + condition_map_preeq_var, condition_map_preeq_fix = \ + subset_dict(condition_map_preeq, variable_par_ids, fixed_par_ids) + + condition_scale_map_preeq_var, condition_scale_map_preeq_fix = \ + subset_dict(condition_scale_map_preeq, variable_par_ids, fixed_par_ids) + + condition_map_sim_var, condition_map_sim_fix = \ + subset_dict(condition_map_sim, variable_par_ids, fixed_par_ids) + + condition_scale_map_sim_var, condition_scale_map_sim_fix = \ + subset_dict(condition_scale_map_sim, variable_par_ids, fixed_par_ids) + + logger.debug("Fixed parameters preequilibration: " + f"{condition_map_preeq_fix}") + logger.debug("Fixed parameters simulation: " + f"{condition_map_sim_fix}") + logger.debug("Variable parameters preequilibration: " + f"{condition_map_preeq_var}") + logger.debug("Variable parameters simulation: " + f"{condition_map_sim_var}") + + petab.merge_preeq_and_sim_pars_condition( + condition_map_preeq_var, condition_map_sim_var, + condition_scale_map_preeq_var, condition_scale_map_sim_var, + condition) + logger.debug(f"Merged: {condition_map_sim_var}") + + parameter_mapping_for_condition = ParameterMappingForCondition( + map_preeq_fix=condition_map_preeq_fix, + map_sim_fix=condition_map_sim_fix, + map_sim_var=condition_map_sim_var, + scale_map_preeq_fix=condition_scale_map_preeq_fix, + scale_map_sim_fix=condition_scale_map_sim_fix, + scale_map_sim_var=condition_scale_map_sim_var + ) + + return parameter_mapping_for_condition + + +def create_edatas( + amici_model: AmiciModel, + petab_problem: petab.Problem, + simulation_conditions: Union[pd.DataFrame, Dict] = None, +) -> List[amici.ExpData]: + """Create list of :class:`amici.amici.ExpData` objects for PEtab problem. + + :param amici_model: + AMICI model. + :param petab_problem: + Underlying PEtab problem. + :param simulation_conditions: + Result of :func:`petab.get_simulation_conditions`. Can be provided to + save time if this has be obtained before. + + :return: + List with one :class:`amici.amici.ExpData` per simulation condition, + with filled in timepoints and data. + """ + if simulation_conditions is None: + simulation_conditions = \ + petab_problem.get_simulation_conditions_from_measurement_df() + + observable_ids = amici_model.getObservableIds() + + measurement_groupvar = [petab.SIMULATION_CONDITION_ID] + if petab.PREEQUILIBRATION_CONDITION_ID in simulation_conditions: + measurement_groupvar.append(petab.PREEQUILIBRATION_CONDITION_ID) + measurement_dfs = dict(list( + petab_problem.measurement_df.groupby(measurement_groupvar) + )) + + edatas = [] + for _, condition in simulation_conditions.iterrows(): + # Create amici.ExpData for each simulation + if petab.PREEQUILIBRATION_CONDITION_ID in condition: + measurement_index = ( + condition.get(petab.SIMULATION_CONDITION_ID), + condition.get(petab.PREEQUILIBRATION_CONDITION_ID) + ) + else: + measurement_index = condition.get(petab.SIMULATION_CONDITION_ID) + edata = create_edata_for_condition( + condition=condition, + amici_model=amici_model, + measurement_df=measurement_dfs[measurement_index], + petab_problem=petab_problem, + observable_ids=observable_ids, + ) + edatas.append(edata) + + return edatas + + +def create_edata_for_condition( + condition: Union[Dict, pd.Series], + measurement_df: pd.DataFrame, + amici_model: AmiciModel, + petab_problem: petab.Problem, + observable_ids: List[str], +) -> amici.ExpData: + """Get :class:`amici.amici.ExpData` for the given PEtab condition. + + Sets timepoints, observed data and sigmas. + + :param condition: + :class:`pandas.DataFrame` row with ``preequilibrationConditionId`` and + ``simulationConditionId``. + :param measurement_df: + :class:`pandas.DataFrame` with measurements for the given condition. + :param amici_model: + AMICI model + :param petab_problem: + Underlying PEtab problem + :param observable_ids: + List of observable IDs + + :return: + ExpData instance. + """ + if amici_model.nytrue != len(observable_ids): + raise AssertionError("Number of AMICI model observables does not " + "match number of PEtab observables.") + + # create an ExpData object + edata = amici.ExpData(amici_model) + edata.id = condition[SIMULATION_CONDITION_ID] + if condition.get(PREEQUILIBRATION_CONDITION_ID): + edata.id += "+" + condition.get(PREEQUILIBRATION_CONDITION_ID) + ########################################################################## + # enable initial parameters reinitialization + states_in_condition_table = [ + col for col in petab_problem.condition_df + if not pd.isna(petab_problem.condition_df.loc[ + condition[SIMULATION_CONDITION_ID], col]) + and element_is_state(petab_problem.sbml_model, col) + ] + if condition.get(PREEQUILIBRATION_CONDITION_ID) \ + and states_in_condition_table: + state_ids = amici_model.getStateIds() + state_idx_reinitalization = [state_ids.index(s) + for s in states_in_condition_table] + edata.reinitialization_state_idxs_sim = state_idx_reinitalization + logger.debug("Enabling state reinitialization for condition " + f"{condition.get(PREEQUILIBRATION_CONDITION_ID, '')} - " + f"{condition.get(SIMULATION_CONDITION_ID)} " + f"{states_in_condition_table}") + + ########################################################################## + # timepoints + + # find replicate numbers of time points + timepoints_w_reps = _get_timepoints_with_replicates( + df_for_condition=measurement_df) + edata.setTimepoints(timepoints_w_reps) + + ########################################################################## + # measurements and sigmas + y, sigma_y = _get_measurements_and_sigmas( + df_for_condition=measurement_df, timepoints_w_reps=timepoints_w_reps, + observable_ids=observable_ids) + edata.setObservedData(y.flatten()) + edata.setObservedDataStdDev(sigma_y.flatten()) + + return edata + + +def subset_dict(full: Dict[Any, Any], + *args: Collection[Any]) -> Iterator[Dict[Any, Any]]: + """Get subset of dictionary based on provided keys + + :param full: + Dictionary to subset + :param args: + Collections of keys to be contained in the different subsets + + :return: + subsetted dictionary + """ + for keys in args: + yield {key: val for (key, val) in full.items() if key in keys} + + +def _get_timepoints_with_replicates( + df_for_condition: pd.DataFrame) -> List[numbers.Number]: + """ + Get list of timepoints including replicate measurements + + :param df_for_condition: + PEtab measurement table subset for a single condition. + + :return: + Sorted list of timepoints, including multiple timepoints accounting + for replicate measurements. + """ + # create sorted list of all timepoints for which measurements exist + timepoints = sorted(df_for_condition[TIME].unique().astype(float)) + + # find replicate numbers of time points + timepoints_w_reps = [] + for time in timepoints: + # subselect for time + df_for_time = df_for_condition[ + df_for_condition.time.astype(float) == time + ] + # rep number is maximum over rep numbers for observables + n_reps = max(df_for_time.groupby( + [OBSERVABLE_ID, TIME]).size()) + # append time point n_rep times + timepoints_w_reps.extend([time] * n_reps) + + return timepoints_w_reps + + +def _get_measurements_and_sigmas( + df_for_condition: pd.DataFrame, + timepoints_w_reps: Sequence[numbers.Number], + observable_ids: Sequence[str], + ) -> Tuple[np.array, np.array]: + """ + Get measurements and sigmas + + Generate arrays with measurements and sigmas in AMICI format from a + PEtab measurement table subset for a single condition. + + :param df_for_condition: + Subset of PEtab measurement table for one condition + + :param timepoints_w_reps: + Timepoints for which there exist measurements, including replicates + + :param observable_ids: + List of observable IDs for mapping IDs to indices. + + :return: + arrays for measurement and sigmas + """ + # prepare measurement matrix + y = np.full(shape=(len(timepoints_w_reps), len(observable_ids)), + fill_value=np.nan) + # prepare sigma matrix + sigma_y = y.copy() + + timepoints = sorted(df_for_condition[TIME].unique().astype(float)) + + for time in timepoints: + # subselect for time + df_for_time = df_for_condition[df_for_condition[TIME] == time] + time_ix_0 = timepoints_w_reps.index(time) + + # remember used time indices for each observable + time_ix_for_obs_ix = {} + + # iterate over measurements + for _, measurement in df_for_time.iterrows(): + # extract observable index + observable_ix = observable_ids.index(measurement[OBSERVABLE_ID]) + + # update time index for observable + if observable_ix in time_ix_for_obs_ix: + time_ix_for_obs_ix[observable_ix] += 1 + else: + time_ix_for_obs_ix[observable_ix] = time_ix_0 + + # fill observable and possibly noise parameter + y[time_ix_for_obs_ix[observable_ix], + observable_ix] = measurement[MEASUREMENT] + if isinstance(measurement.get(NOISE_PARAMETERS, None), + numbers.Number): + sigma_y[time_ix_for_obs_ix[observable_ix], + observable_ix] = measurement[NOISE_PARAMETERS] + return y, sigma_y + + +def rdatas_to_measurement_df( + rdatas: Sequence[amici.ReturnData], + model: AmiciModel, + measurement_df: pd.DataFrame) -> pd.DataFrame: + """ + Create a measurement dataframe in the PEtab format from the passed + ``rdatas`` and own information. + + :param rdatas: + A sequence of rdatas with the ordering of + :func:`petab.get_simulation_conditions`. + + :param model: + AMICI model used to generate ``rdatas``. + + :param measurement_df: + PEtab measurement table used to generate ``rdatas``. + + :return: + A dataframe built from the rdatas in the format of ``measurement_df``. + """ + simulation_conditions = petab.get_simulation_conditions( + measurement_df) + + observable_ids = model.getObservableIds() + rows = [] + # iterate over conditions + for (_, condition), rdata in zip(simulation_conditions.iterrows(), rdatas): + # current simulation matrix + y = rdata.y + # time array used in rdata + t = list(rdata.ts) + + # extract rows for condition + cur_measurement_df = petab.get_rows_for_condition( + measurement_df, condition) + + # iterate over entries for the given condition + # note: this way we only generate a dataframe entry for every + # row that existed in the original dataframe. if we want to + # e.g. have also timepoints non-existent in the original file, + # we need to instead iterate over the rdata['y'] entries + for _, row in cur_measurement_df.iterrows(): + # copy row + row_sim = copy.deepcopy(row) + + # extract simulated measurement value + timepoint_idx = t.index(row[TIME]) + observable_idx = observable_ids.index(row[OBSERVABLE_ID]) + measurement_sim = y[timepoint_idx, observable_idx] + + # change measurement entry + row_sim[MEASUREMENT] = measurement_sim + + rows.append(row_sim) + + return pd.DataFrame(rows) + + +def rdatas_to_simulation_df( + rdatas: Sequence[amici.ReturnData], + model: AmiciModel, + measurement_df: pd.DataFrame) -> pd.DataFrame: + """Create a PEtab simulation dataframe from + :class:`amici.amici.ReturnData` s. + + See :func:`rdatas_to_measurement_df` for details, only that model outputs + will appear in column ``simulation`` instead of ``measurement``.""" + + df = rdatas_to_measurement_df(rdatas=rdatas, model=model, + measurement_df=measurement_df) + + return df.rename(columns={MEASUREMENT: SIMULATION}) diff --git a/python/sdist/amici/petab_simulate.py b/python/sdist/amici/petab_simulate.py deleted file mode 120000 index 350628c21e..0000000000 --- a/python/sdist/amici/petab_simulate.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/petab_simulate.py \ No newline at end of file diff --git a/python/sdist/amici/petab_simulate.py b/python/sdist/amici/petab_simulate.py new file mode 100644 index 0000000000..71744ff51b --- /dev/null +++ b/python/sdist/amici/petab_simulate.py @@ -0,0 +1,110 @@ +""" +PEtab Simulate +-------------- +Functionality related to the use of AMICI for simulation with PEtab's +Simulator class. + +Use cases: + +- generate data for use with PEtab's plotting methods +- generate synthetic data +""" + +import inspect +import sys +from typing import Callable + +import pandas as pd + +from amici import SensitivityMethod_none +from amici import AmiciModel +from amici.petab_import import import_petab_problem +from amici.petab_objective import (simulate_petab, + rdatas_to_measurement_df, + RDATAS) +import petab + +AMICI_MODEL = 'amici_model' +AMICI_SOLVER = 'solver' +MODEL_NAME = 'model_name' +MODEL_OUTPUT_DIR = 'model_output_dir' + +PETAB_PROBLEM = 'petab_problem' + + +class PetabSimulator(petab.simulate.Simulator): + """Implementation of the PEtab `Simulator` class that uses AMICI.""" + def __init__(self, *args, amici_model: AmiciModel = None, **kwargs): + super().__init__(*args, **kwargs) + self.amici_model = amici_model + + def simulate_without_noise(self, **kwargs) -> pd.DataFrame: + """ + See :py:func:`petab.simulate.Simulator.simulate()` docstring. + + Additional keyword arguments can be supplied to specify arguments for + the AMICI PEtab import, simulate, and export methods. See the + docstrings for the respective methods for argument options: + - :py:func:`amici.petab_import.import_petab_problem`, and + - :py:func:`amici.petab_objective.simulate_petab`. + + Note that some arguments are expected to have already been specified + in the Simulator constructor (including the PEtab problem). + """ + if AMICI_MODEL in {*kwargs, *dir(self)} and ( + any(k in kwargs for k in + inspect.signature(import_petab_problem).parameters)): + print('Arguments related to the PEtab import are unused if ' + f'`{AMICI_MODEL}` is specified, or the ' + '`PetabSimulator.simulate()` method was previously called.') + + kwargs[PETAB_PROBLEM] = self.petab_problem + + # The AMICI model instance for the PEtab problem is saved in the state, + # such that it need not be supplied with each request for simulated + # data. Any user-supplied AMICI model will overwrite the model saved + # in the state. + if AMICI_MODEL not in kwargs: + if self.amici_model is None: + if MODEL_NAME not in kwargs: + kwargs[MODEL_NAME] = AMICI_MODEL + # If the model name is the name of a module that is already + # cached, it can cause issues during import. + while kwargs[MODEL_NAME] in sys.modules: + kwargs[MODEL_NAME] += str(self.rng.integers(10)) + if MODEL_OUTPUT_DIR not in kwargs: + kwargs[MODEL_OUTPUT_DIR] = self.working_dir + self.amici_model = subset_call(import_petab_problem, kwargs) + kwargs[AMICI_MODEL] = self.amici_model + self.amici_model = kwargs[AMICI_MODEL] + + if AMICI_SOLVER not in kwargs: + kwargs[AMICI_SOLVER] = self.amici_model.getSolver() + kwargs[AMICI_SOLVER].setSensitivityMethod( + SensitivityMethod_none) + + result = subset_call(simulate_petab, kwargs) + return rdatas_to_measurement_df(result[RDATAS], + self.amici_model, + self.petab_problem.measurement_df) + + +def subset_call(method: Callable, kwargs: dict): + """ + Helper function to call a method with the intersection of arguments in the + method signature and the supplied arguments. + + :param method: + The method to be called. + :param kwargs: + The argument superset as a dictionary, similar to `**kwargs` in method + signatures. + :return: + The output of `method`, called with the applicable arguments in + `kwargs`. + """ + method_args = inspect.signature(method).parameters + subset_kwargs = {k: v + for k, v in kwargs.items() + if k in method_args} + return method(**subset_kwargs) diff --git a/python/sdist/amici/plotting.py b/python/sdist/amici/plotting.py deleted file mode 120000 index 5195d2f4c7..0000000000 --- a/python/sdist/amici/plotting.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/plotting.py \ No newline at end of file diff --git a/python/sdist/amici/plotting.py b/python/sdist/amici/plotting.py new file mode 100644 index 0000000000..d2917de9fe --- /dev/null +++ b/python/sdist/amici/plotting.py @@ -0,0 +1,90 @@ +""" +Plotting +-------- +Plotting related functions +""" +from . import ReturnDataView, Model + +import matplotlib.pyplot as plt +from matplotlib.axes import Axes +from typing import Optional, Iterable + + +def plotStateTrajectories( + rdata: ReturnDataView, + state_indices: Optional[Iterable[int]] = None, + ax: Optional[Axes] = None, + model: Model = None +) -> None: + """ + Plot state trajectories + + :param rdata: + AMICI simulation results as returned by + :func:`amici.amici.runAmiciSimulation` + + :param state_indices: + Indices of states for which trajectories are to be plotted + + :param ax: + matplotlib Axes instance to plot into + + :param model: + amici model instance + """ + if not ax: + fig, ax = plt.subplots() + if not state_indices: + state_indices = range(rdata['x'].shape[1]) + for ix in state_indices: + if model is None: + label = f'$x_{{{ix}}}$' + elif model.getStateNames()[ix]: + label = model.getStateNames()[ix] + else: + label = model.getStateIds()[ix] + ax.plot(rdata['t'], rdata['x'][:, ix], label=label) + ax.set_xlabel('$t$') + ax.set_ylabel('$x(t)$') + ax.legend() + ax.set_title('State trajectories') + + +def plotObservableTrajectories( + rdata: ReturnDataView, + observable_indices: Optional[Iterable[int]] = None, + ax: Optional[Axes] = None, + model: Model = None +) -> None: + """ + Plot observable trajectories + + :param rdata: + AMICI simulation results as returned by + :func:`amici.amici.runAmiciSimulation` + + :param observable_indices: + Indices of observables for which trajectories are to be plotted + + :param ax: + matplotlib Axes instance to plot into + + :param model: + amici model instance + """ + if not ax: + fig, ax = plt.subplots() + if not observable_indices: + observable_indices = range(rdata['y'].shape[1]) + for iy in observable_indices: + if model is None: + label = f'$y_{{{iy}}}$' + elif model.getObservableNames()[iy]: + label = model.getObservableNames()[iy] + else: + label = model.getObservableIds()[iy] + ax.plot(rdata['t'], rdata['y'][:, iy], label=label) + ax.set_xlabel('$t$') + ax.set_ylabel('$y(t)$') + ax.legend() + ax.set_title('Observable trajectories') diff --git a/python/sdist/amici/pysb_import.py b/python/sdist/amici/pysb_import.py deleted file mode 120000 index f3ad0bdb51..0000000000 --- a/python/sdist/amici/pysb_import.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/pysb_import.py \ No newline at end of file diff --git a/python/sdist/amici/pysb_import.py b/python/sdist/amici/pysb_import.py new file mode 100644 index 0000000000..0929283877 --- /dev/null +++ b/python/sdist/amici/pysb_import.py @@ -0,0 +1,1415 @@ +""" +PySB Import +------------ +This module provides all necessary functionality to import a model specified +in the :class:`pysb.core.Model` format. +""" + +import itertools +import logging +import os +import sys +from pathlib import Path +from typing import (Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, + Union) + +import numpy as np +import pysb +import pysb.bng +import pysb.pattern +import sympy as sp + +from .import_utils import (_get_str_symbol_identifiers, + _parse_special_functions, + generate_measurement_symbol, + noise_distribution_to_cost_function, + noise_distribution_to_observable_transformation) +from .logging import get_logger, log_execution_time, set_log_level +from .ode_export import (Constant, Expression, LogLikelihoodY, ODEExporter, + ODEModel, Observable, Parameter, SigmaY, State) + +CL_Prototype = Dict[str, Dict[str, Any]] +ConservationLaw = Dict[str, Union[Dict, str, sp.Basic]] + +logger = get_logger(__name__, logging.ERROR) + + +def pysb2amici( + model: pysb.Model, + output_dir: Optional[Union[str, Path]] = None, + observables: List[str] = None, + constant_parameters: List[str] = None, + sigmas: Dict[str, str] = None, + noise_distributions: Optional[Dict[str, Union[str, Callable]]] = None, + verbose: Union[int, bool] = False, + assume_pow_positivity: bool = False, + compiler: str = None, + compute_conservation_laws: bool = True, + compile: bool = True, + simplify: Callable = lambda x: sp.powsimp(x, deep=True), + # Do not enable by default without testing. + # See https://github.com/AMICI-dev/AMICI/pull/1672 + cache_simplify: bool = False, + generate_sensitivity_code: bool = True, + model_name: Optional[str] = None, +): + r""" + Generate AMICI C++ files for the provided model. + + .. warning:: + **PySB models with Compartments** + + When importing a PySB model with ``pysb.Compartment``\ s, BioNetGen + scales reaction fluxes with the compartment size. Instead of using the + respective symbols, the compartment size Parameter or Expression is + evaluated when generating equations. This may lead to unexpected + results if the compartment size parameter is changed for AMICI + simulations. + + :param model: + pysb model, :attr:`pysb.Model.name` will determine the name of the + generated module + + :param output_dir: + see :meth:`amici.ode_export.ODEExporter.set_paths` + + :param observables: + list of :class:`pysb.core.Expression` or :class:`pysb.core.Observable` + names in the provided model that should be mapped to observables + + :param sigmas: + dict of :class:`pysb.core.Expression` names that should be mapped to + sigmas + + :param noise_distributions: + dict with names of observable Expressions as keys and a noise type + identifier, or a callable generating a custom noise formula string + (see :py:func:`amici.import_utils.noise_distribution_to_cost_function` + ). If nothing is passed for some observable id, a normal model is + assumed as default. + + :param constant_parameters: + list of :class:`pysb.core.Parameter` names that should be mapped as + fixed parameters + + :param verbose: verbosity level for logging, True/False default to + :attr:`logging.DEBUG`/:attr:`logging.ERROR` + + :param assume_pow_positivity: + if set to ``True``, a special pow function is used to avoid problems + with state variables that may become negative due to numerical + errors + + :param compiler: + distutils/setuptools compiler selection to build the python + extension + + :param compute_conservation_laws: + if set to ``True``, conservation laws are automatically computed and + applied such that the state-jacobian of the ODE right-hand-side has + full rank. This option should be set to ``True`` when using the Newton + algorithm to compute steadystates + + :param compile: + If ``True``, build the python module for the generated model. If false, + just generate the source code. + + :param simplify: + see :attr:`amici.ODEModel._simplify` + + :param cache_simplify: + see :func:`amici.ODEModel.__init__` + Note that there are possible issues with PySB models: + https://github.com/AMICI-dev/AMICI/pull/1672 + + :param generate_sensitivity_code: + if set to ``False``, code for sensitivity computation will not be + generated + + :param model_name: + Name for the generated model module. If None, :attr:`pysb.Model.name` + will be used. + """ + if observables is None: + observables = [] + if constant_parameters is None: + constant_parameters = [] + + if sigmas is None: + sigmas = {} + + model_name = model_name or model.name + + set_log_level(logger, verbose) + ode_model = ode_model_from_pysb_importer( + model, constant_parameters=constant_parameters, + observables=observables, sigmas=sigmas, + noise_distributions=noise_distributions, + compute_conservation_laws=compute_conservation_laws, + simplify=simplify, + cache_simplify=cache_simplify, + verbose=verbose, + ) + exporter = ODEExporter( + ode_model, + outdir=output_dir, + model_name=model_name, + verbose=verbose, + assume_pow_positivity=assume_pow_positivity, + compiler=compiler, + generate_sensitivity_code=generate_sensitivity_code + ) + exporter.generate_model_code() + + if compile: + exporter.compile_model() + + +@log_execution_time('creating ODE model', logger) +def ode_model_from_pysb_importer( + model: pysb.Model, + constant_parameters: List[str] = None, + observables: List[str] = None, + sigmas: Dict[str, str] = None, + noise_distributions: Optional[Dict[str, Union[str, Callable]]] = None, + compute_conservation_laws: bool = True, + simplify: Callable = sp.powsimp, + # Do not enable by default without testing. + # See https://github.com/AMICI-dev/AMICI/pull/1672 + cache_simplify: bool = False, + verbose: Union[int, bool] = False, +) -> ODEModel: + """ + Creates an :class:`amici.ODEModel` instance from a :class:`pysb.Model` + instance. + + :param model: + see :func:`amici.pysb_import.pysb2amici` + + :param constant_parameters: + see :func:`amici.pysb_import.pysb2amici` + + :param observables: + see :func:`amici.pysb_import.pysb2amici` + + :param sigmas: + dict with names of observable Expressions as keys and names of sigma + Expressions as value sigma + + :param noise_distributions: + see :func:`amici.pysb_import.pysb2amici` + + :param compute_conservation_laws: + see :func:`amici.pysb_import.pysb2amici` + + :param simplify: + see :attr:`amici.ODEModel._simplify` + + :param cache_simplify: + see :func:`amici.ODEModel.__init__` + Note that there are possible issues with PySB models: + https://github.com/AMICI-dev/AMICI/pull/1672 + + :param verbose: verbosity level for logging, True/False default to + :attr:`logging.DEBUG`/:attr:`logging.ERROR` + + :return: + New ODEModel instance according to pysbModel + """ + + ode = ODEModel( + verbose=verbose, + simplify=simplify, + cache_simplify=cache_simplify, + ) + + if constant_parameters is None: + constant_parameters = [] + + if observables is None: + observables = [] + + if sigmas is None: + sigmas = {} + + pysb.bng.generate_equations(model, verbose=verbose) + + _process_pysb_species(model, ode) + _process_pysb_parameters(model, ode, constant_parameters) + if compute_conservation_laws: + _process_pysb_conservation_laws(model, ode) + _process_pysb_observables(model, ode, observables, sigmas, + noise_distributions) + _process_pysb_expressions(model, ode, observables, sigmas, + noise_distributions) + ode._has_quadratic_nllh = not noise_distributions or all( + noise_distr in ['normal', 'lin-normal', 'log-normal', 'log10-normal'] + for noise_distr in noise_distributions.values() + ) + + _process_stoichiometric_matrix(model, ode, constant_parameters) + + ode.generate_basic_variables() + + return ode + + +@log_execution_time('processing PySB stoich. matrix', logger) +def _process_stoichiometric_matrix(pysb_model: pysb.Model, + ode_model: ODEModel, + constant_parameters: List[str]) -> None: + + """ + Exploits the PySB stoichiometric matrix to generate xdot derivatives + + :param pysb_model: + pysb model instance + + :param ode_model: + ODEModel instance + + :param constant_parameters: + list of constant parameters + """ + + x = ode_model.sym('x') + w = list(ode_model.sym('w')) + p = list(ode_model.sym('p')) + x_rdata = list(ode_model.sym('x_rdata')) + + n_x = len(x) + n_w = len(w) + n_p = len(p) + n_r = len(pysb_model.reactions) + + solver_index = ode_model.get_solver_indices() + dflux_dx_dict = {} + dflux_dw_dict = {} + dflux_dp_dict = {} + + w_idx = dict() + p_idx = dict() + wx_idx = dict() + + def get_cached_index(symbol, sarray, index_cache): + idx = index_cache.get(symbol, None) + if idx is not None: + return idx + idx = sarray.index(symbol) + index_cache[symbol] = idx + return idx + + for ir, rxn in enumerate(pysb_model.reactions): + for ix in np.unique(rxn['reactants']): + idx = solver_index.get(ix, None) + if idx is not None: + # species + values = dflux_dx_dict + else: + # conservation law + idx = get_cached_index(x_rdata[ix], w, wx_idx) + values = dflux_dw_dict + + values[(ir, idx)] = sp.diff(rxn['rate'], x_rdata[ix]) + + # typically <= 3 free symbols in rate, we already account for + # species above so we only need to account for propensity, which + # can only be a parameter or expression + for fs in rxn['rate'].free_symbols: + # dw + if isinstance(fs, pysb.Expression): + var = w + idx_cache = w_idx + values = dflux_dw_dict + # dp + elif isinstance(fs, pysb.Parameter): + if fs.name in constant_parameters: + continue + var = p + idx_cache = p_idx + values = dflux_dp_dict + else: + continue + + idx = get_cached_index(fs, var, idx_cache) + values[(ir, idx)] = sp.diff(rxn['rate'], fs) + + dflux_dx = sp.ImmutableSparseMatrix(n_r, n_x, dflux_dx_dict) + dflux_dw = sp.ImmutableSparseMatrix(n_r, n_w, dflux_dw_dict) + dflux_dp = sp.ImmutableSparseMatrix(n_r, n_p, dflux_dp_dict) + + # use dok format to convert numeric csc to sparse symbolic + S = sp.ImmutableSparseMatrix( + n_x, n_r, # don't use shape here as we are eliminating rows + pysb_model.stoichiometry_matrix[ + np.asarray(list(solver_index.keys())),: + ].todok() + ) + # don't use `.dot` since it's awfully slow + ode_model._eqs['dxdotdx_explicit'] = S*dflux_dx + ode_model._eqs['dxdotdw'] = S*dflux_dw + ode_model._eqs['dxdotdp_explicit'] = S*dflux_dp + + +@log_execution_time('processing PySB species', logger) +def _process_pysb_species(pysb_model: pysb.Model, + ode_model: ODEModel) -> None: + """ + Converts pysb Species into States and adds them to the ODEModel instance + + :param pysb_model: + pysb model instance + + :param ode_model: + ODEModel instance + """ + xdot = sp.Matrix(pysb_model.odes) + + for ix, specie in enumerate(pysb_model.species): + init = sp.sympify('0.0') + for ic in pysb_model.odes.model.initials: + if pysb.pattern.match_complex_pattern( + ic.pattern, specie, exact=True): + # we don't want to allow expressions in initial conditions + if ic.value in pysb_model.expressions: + init = pysb_model.expressions[ic.value.name].expand_expr() + else: + init = ic.value + + ode_model.add_component( + State( + sp.Symbol(f'__s{ix}'), + f'{specie}', + init, + xdot[ix] + ) + ) + logger.debug(f'Finished Processing PySB species ') + + +@log_execution_time('processing PySB parameters', logger) +def _process_pysb_parameters(pysb_model: pysb.Model, + ode_model: ODEModel, + constant_parameters: List[str]) -> None: + """ + Converts pysb parameters into Parameters or Constants and adds them to + the ODEModel instance + + :param pysb_model: + pysb model + + :param constant_parameters: + list of Parameters that should be constants + + :param ode_model: + ODEModel instance + """ + for par in pysb_model.parameters: + if par.name in constant_parameters: + comp = Constant + else: + comp = Parameter + + ode_model.add_component( + comp(par, f'{par.name}', par.value) + ) + + +@log_execution_time('processing PySB expressions', logger) +def _process_pysb_expressions( + pysb_model: pysb.Model, + ode_model: ODEModel, + observables: List[str], + sigmas: Dict[str, str], + noise_distributions: Optional[Dict[str, Union[str, Callable]]] = None, +) -> None: + r""" + Converts pysb expressions/observables into Observables (with + corresponding standard deviation SigmaY and LogLikelihoodY) or + Expressions and adds them to the ODEModel instance + + :param pysb_model: + pysb model + + :param observables: + list of names of :class`pysb.Expression`\ s or + :class:`pysb.Observable`\ s that are to be mapped to ODEModel + observables + + :param sigmas: + dict with names of observable pysb.Expressions/pysb.Observables + names as keys and names of sigma pysb.Expressions as values + + :param noise_distributions: + see :func:`amici.pysb_import.pysb2amici` + + :param ode_model: + ODEModel instance + """ + # we no longer expand expressions here. pysb/bng guarantees that + # they are ordered according to their dependency and we can + # evaluate them sequentially without reordering. Important to make + # sure that observables are processed first though. + + # we use _constant and _dynamic functions to get access to derived + # expressions that are otherwise only accessible as private attribute + for expr in pysb_model.expressions_constant(include_derived=True)\ + | pysb_model.expressions_dynamic(include_derived=True): + if any( + isinstance(symbol, pysb.Tag) + for symbol in expr.expand_expr().free_symbols + ): + # we only need explicit instantiations of expressions with tags, + # which are defined in the derived expressions. The abstract + # expressions are not needed and lead to compilation errors so + # we skip them. + continue + _add_expression(expr, expr.name, expr.expr, + pysb_model, ode_model, observables, sigmas, + noise_distributions) + + +def _add_expression( + sym: sp.Symbol, + name: str, + expr: sp.Basic, + pysb_model: pysb.Model, + ode_model: ODEModel, + observables: List[str], + sigmas: Dict[str, str], + noise_distributions: Optional[Dict[str, Union[str, Callable]]] = None, +): + """ + Adds expressions to the ODE model given and adds observables/sigmas if + appropriate + + :param sym: + symbol how the expression is referenced in the model + + :param name: + name of the expression + + :param expr: + symbolic expression that the symbol refers to + + :param pysb_model: + see :py:func:`_process_pysb_expressions` + + :param observables: + see :py:func:`_process_pysb_expressions` + + :param sigmas: + see :py:func:`_process_pysb_expressions` + + :param noise_distributions: + see :py:func:`amici.pysb_import.pysb2amici` + + :param ode_model: + see :py:func:`_process_pysb_expressions` + """ + ode_model.add_component( + Expression(sym, name, _parse_special_functions(expr)) + ) + + if name in observables: + noise_dist = noise_distributions.get(name, 'normal') \ + if noise_distributions else 'normal' + + y = sp.Symbol(f'{name}') + trafo = noise_distribution_to_observable_transformation(noise_dist) + obs = Observable(y, name, sym, transformation=trafo) + ode_model.add_component(obs) + + sigma_name, sigma_value = _get_sigma_name_and_value( + pysb_model, name, sigmas + ) + + sigma = sp.Symbol(sigma_name) + ode_model.add_component(SigmaY(sigma, f'{sigma_name}', sigma_value)) + + + cost_fun_str = noise_distribution_to_cost_function(noise_dist)(name) + my = generate_measurement_symbol(obs.get_id()) + cost_fun_expr = sp.sympify(cost_fun_str, + locals=dict(zip( + _get_str_symbol_identifiers(name), + (y, my, sigma)))) + ode_model.add_component( + LogLikelihoodY( + sp.Symbol(f'llh_{name}'), + f'llh_{name}', + cost_fun_expr + ) + ) + + +def _get_sigma_name_and_value( + pysb_model: pysb.Model, + obs_name: str, + sigmas: Dict[str, str]) -> Tuple[str, sp.Basic]: + """ + Tries to extract standard deviation symbolic identifier and formula + for a given observable name from the pysb model and if no specification is + available sets default values + + :param pysb_model: + pysb model + + :param obs_name: + name of the observable + + :param sigmas: + dict of :class:`pysb.core.Expression` names that should be mapped to + sigmas + + :return: + tuple containing symbolic identifier and formula for the specified + observable + """ + if obs_name in sigmas: + sigma_name = sigmas[obs_name] + try: + # find corresponding Expression instance + sigma_expr = next(x for x in pysb_model.expressions + if x.name == sigma_name) + except StopIteration: + raise ValueError(f'value of sigma {obs_name} is not a ' + f'valid expression.') + sigma_value = sigma_expr.expand_expr() + else: + sigma_name = f'sigma_{obs_name}' + sigma_value = sp.sympify(1.0) + + return sigma_name, sigma_value + + +@log_execution_time('processing PySB observables', logger) +def _process_pysb_observables( + pysb_model: pysb.Model, + ode_model: ODEModel, + observables: List[str], + sigmas: Dict[str, str], + noise_distributions: Optional[Dict[str, Union[str, Callable]]] = None, +) -> None: + """ + Converts :class:`pysb.core.Observable` into + :class:`ODEModel.Expressions` and adds them to the ODEModel instance + + :param pysb_model: + pysb model + + :param ode_model: + ODEModel instance + + :param observables: + list of names of pysb.Expressions or pysb.Observables that are to be + mapped to ODEModel observables + + :param sigmas: + dict with names of observable pysb.Expressions/pysb.Observables + names as keys and names of sigma pysb.Expressions as values + + :param noise_distributions: + see :func:`amici.pysb_import.pysb2amici` + """ + # only add those pysb observables that occur in the added + # Observables as expressions + for obs in pysb_model.observables: + _add_expression(obs, obs.name, obs.expand_obs(), + pysb_model, ode_model, observables, sigmas, + noise_distributions) + + +@log_execution_time('computing PySB conservation laws', logger) +def _process_pysb_conservation_laws(pysb_model: pysb.Model, + ode_model: ODEModel) -> None: + """ + Removes species according to conservation laws to ensure that the + jacobian has full rank + + :param pysb_model: + pysb model + + :param ode_model: + ODEModel instance + """ + + monomers_without_conservation_law = set() + for rule in pysb_model.rules: + monomers_without_conservation_law |= \ + _get_unconserved_monomers(rule, pysb_model) + + monomers_without_conservation_law |= \ + _compute_monomers_with_fixed_initial_conditions(pysb_model) + + cl_prototypes = _generate_cl_prototypes( + monomers_without_conservation_law, pysb_model, ode_model + ) + conservation_laws = _construct_conservation_from_prototypes( + cl_prototypes, pysb_model + ) + _add_conservation_for_constant_species(ode_model, conservation_laws) + + _flatten_conservation_laws(conservation_laws) + + for cl in conservation_laws: + ode_model.add_conservation_law(**cl) + + +def _compute_monomers_with_fixed_initial_conditions( + pysb_model: pysb.Model) -> Set[str]: + """ + Computes the set of monomers in a model with species that have fixed + initial conditions + + :param pysb_model: pysb model + + :return: + set of monomer names with fixed initial conditions + """ + monomers_with_fixed_initial_conditions = set() + + for monomer in pysb_model.monomers: + # check if monomer has an initial condition that is fixed (means + # that corresponding state is constant and all conservation + # laws are broken) + if any([ + ic.fixed # true or false + for ic in pysb_model.initials + if monomer.name in extract_monomers(ic.pattern) + ]): + monomers_with_fixed_initial_conditions |= {monomer.name} + + return monomers_with_fixed_initial_conditions + + +def _generate_cl_prototypes(excluded_monomers: Iterable[str], + pysb_model: pysb.Model, + ode_model: ODEModel) -> CL_Prototype: + """ + Constructs a dict that contains preprocessed information for the + construction of conservation laws + + :param excluded_monomers: + list of monomer names for which no prototypes + should be computed + + :param pysb_model: + pysb model + + :param ode_model: + ODEModel instance + + :return: + dict('monomer.name':{'possible_indices': ..., 'target_indices': ...} + """ + cl_prototypes = dict() + + _compute_possible_indices(cl_prototypes, pysb_model, ode_model, + excluded_monomers) + _compute_dependency_idx(cl_prototypes) + _compute_target_index(cl_prototypes, ode_model) + + return cl_prototypes + + +def _compute_possible_indices(cl_prototypes: CL_Prototype, + pysb_model: pysb.Model, + ode_model: ODEModel, + excluded_monomers: Iterable[str]) -> None: + """ + Computes viable choices for target_index, ie species that could be + removed and replaced by an algebraic expression according to the + conservation law + + :param cl_prototypes: + dict in which possible indices will be written + + :param pysb_model: + pysb model + + :param ode_model: + ODEModel instance + + :param excluded_monomers: + monomers for which no conservation laws will be + computed + """ + for monomer in pysb_model.monomers: + if monomer.name not in excluded_monomers: + compartments = [ + str(mp.compartment) # string based comparison as + # compartments are not hashable + for cp in pysb_model.species + for mp in cp.monomer_patterns + if mp.monomer.name == monomer.name + ] + + if len(set(compartments)) > 1: + raise ValueError('Conservation laws involving species in ' + 'multiple compartments are currently not ' + 'supported! Please run pysb2amici with ' + 'compute_conservation_laws=False') + # TODO: implement this, multiply species by the volume of + # their respective compartment and allow total_cl to depend + # on parameters + constants and update the respective symbolic + # derivative accordingly + + prototype = dict() + prototype['possible_indices'] = [ + ix + for ix, specie in enumerate(pysb_model.species) + if monomer.name in extract_monomers(specie) + and not ode_model.state_is_constant(ix) + ] + + prototype['species_count'] = len( + prototype['possible_indices'] + ) + + if prototype['possible_indices']: + cl_prototypes[monomer.name] = prototype + + +def _compute_dependency_idx(cl_prototypes: CL_Prototype) -> None: + """ + Compute connecting species, this allows us to efficiently compute + whether the respective conservation law would induce a cyclic dependency. + Adds a 'dependency_idx' field to the prototype dict that + itself is a dict where keys correspond to indexes that, when used as + target index yield dependencies on conservation laws of monomers in + the respective values + + :param cl_prototypes: + dict in which possible indices will be written + """ + # + for monomer_i, prototype_i in cl_prototypes.items(): + if 'dependency_idx' not in prototype_i: + prototype_i['dependency_idx'] = dict() + + for monomer_j, prototype_j in cl_prototypes.items(): + if monomer_i == monomer_j: + continue + + if 'dependency_idx' not in prototype_j: + prototype_j['dependency_idx'] = dict() + + idx_overlap = set(prototype_i['possible_indices']).intersection( + set(prototype_j['possible_indices']) + ) + if len(idx_overlap) == 0: + continue + + for idx in idx_overlap: + if idx not in prototype_i['dependency_idx']: + prototype_i['dependency_idx'][idx] = set() + + if idx not in prototype_j['dependency_idx']: + prototype_j['dependency_idx'][idx] = set() + + prototype_i['dependency_idx'][idx] |= {monomer_j} + prototype_j['dependency_idx'][idx] |= {monomer_i} + + +def _compute_target_index(cl_prototypes: CL_Prototype, + ode_model: ODEModel) -> None: + """ + Computes the target index for every monomer + + :param cl_prototypes: + dict that contains possible indices for every monomer + + :param ode_model: + ODEModel instance + """ + possible_indices = list(set(list(itertools.chain(*[ + cl_prototypes[monomer]['possible_indices'] + for monomer in cl_prototypes + ])))) + + # Note: currently this function is supposed to also count appearances in + # expressions. However, expressions are currently still empty as they + # are also populated from conservation laws. In case there are many + # state heavy expressions in the model (should not be the case for mass + # action kinetics). This may lead to suboptimal results and could improved. + # As this would require substantial code shuffling, this will only be + # fixed if this becomes an actual problem + appearance_counts = ode_model.get_appearance_counts(possible_indices) + + # in this initial guess we ignore the cost of having cyclic dependencies + # between conservation laws + for monomer in cl_prototypes: + prototype = cl_prototypes[monomer] + # extract monomer specific appearance counts + prototype['appearance_counts'] = \ + [ + appearance_counts[possible_indices.index(idx)] + for idx in prototype['possible_indices'] + ] + # select target index as possible index with minimal appearance count + if len(prototype['appearance_counts']) == 0: + raise RuntimeError(f'Failed to compute conservation law for ' + f'monomer {monomer}') + + idx = np.argmin(prototype['appearance_counts']) + + # remove entries from possible indices and appearance counts so we + # do not consider them again in later iterations + prototype['target_index'] = prototype['possible_indices'].pop(idx) + prototype['appearance_count'] = prototype['appearance_counts'].pop(idx) + + # this is only an approximation as the effective species count + # of other conservation laws may also be affected by the chosen + # target index. As long as the number of unique monomers in + # multimers has a low upper bound and the species count does not + # vary too much across conservation laws, this approximation + # should be fine + prototype['fillin'] = \ + prototype['appearance_count'] * prototype['species_count'] + + # we might end up with the same index for multiple monomers, so loop until + # we have a set of unique target indices + while not _cl_prototypes_are_valid(cl_prototypes): + _greedy_target_index_update(cl_prototypes) + + +def _cl_prototypes_are_valid(cl_prototypes: CL_Prototype) -> bool: + """ + Checks consistency of cl_prototypes by asserting that target indices + are unique and there are no cyclic dependencies + + :param cl_prototypes: + dict that contains dependency and target indexes for + every monomer + """ + # target indices are unique + if len(cl_prototypes) != len(set(_get_target_indices(cl_prototypes))): + return False + # conservation law dependencies are cycle free + if any( + _cl_has_cycle(monomer, cl_prototypes) + for monomer in cl_prototypes + ): + return False + + return True + + +def _cl_has_cycle(monomer: str, cl_prototypes: CL_Prototype) -> bool: + """ + Checks whether monomer has a conservation law that is part of a + cyclic dependency + + :param monomer: + name of monomer for which conservation law is to be checked + + :param cl_prototypes: + dict that contains dependency and target indexes for every monomer + + :return: + boolean indicating whether the conservation law is cyclic + """ + + prototype = cl_prototypes[monomer] + + if prototype['target_index'] not in prototype['dependency_idx']: + return False + + visited = [monomer] + root = monomer + return any( + _is_in_cycle( + connecting_monomer, + cl_prototypes, + visited, + root + ) + for connecting_monomer in prototype['dependency_idx'][ + prototype['target_index'] + ] + ) + + +def _is_in_cycle(monomer: str, + cl_prototypes: CL_Prototype, + visited: List[str], + root: str) -> bool: + """ + Recursively checks for cycles in conservation law dependencies via + Depth First Search + + :param monomer: + current location in cl dependency graph + + :param cl_prototypes: + dict that contains dependency and target indexes for + every monomer + + :param visited: + history of visited monomers with conservation laws + + :param root: + monomer at which the cycle search was started + + :return: + boolean indicating whether the specified monomer is part of a cyclic + conservation law + + """ + if monomer == root: + return True # we found a cycle and root is part of it + + if monomer in visited: + return False # we found a cycle but root is not part of it + + visited.append(monomer) + + prototype = cl_prototypes[monomer] + + if prototype['target_index'] not in prototype['dependency_idx']: + return False + + return any( + _is_in_cycle( + connecting_monomer, + cl_prototypes, + visited, + root + ) + for connecting_monomer in prototype['dependency_idx'][ + prototype['target_index'] + ] + ) + + +def _greedy_target_index_update(cl_prototypes: CL_Prototype) -> None: + """ + Computes unique target indices for conservation laws from possible + indices such that expected fill in in symbolic derivatives is minimized + + :param cl_prototypes: + dict that contains possible indices and non-unique target indices + for every monomer + """ + + target_indices = _get_target_indices(cl_prototypes) + + for monomer, prototype in cl_prototypes.items(): + if target_indices.count(prototype['target_index']) > 1 or \ + _cl_has_cycle(monomer, cl_prototypes): + # compute how much fillin the next best target_index would yield + + # we exclude already existing target indices to avoid that + # updating the target index removes uniqueness from already unique + # target indices, this may slightly reduce chances of finding a + # solution but prevents infinite loops + for target_index in list(set(target_indices)): + try: + local_idx = prototype['possible_indices'].index( + target_index + ) + except ValueError: + local_idx = None + + if local_idx: + del prototype['possible_indices'][local_idx] + del prototype['appearance_counts'][local_idx] + + if len(prototype['possible_indices']) == 0: + prototype['diff_fillin'] = -1 + continue + + idx = np.argmin(prototype['appearance_counts']) + + prototype['local_index'] = idx + prototype['alternate_target_index'] = \ + prototype['possible_indices'][idx] + prototype['alternate_appearance_count'] = \ + prototype['appearance_counts'][idx] + + prototype['alternate_fillin'] = \ + prototype['alternate_appearance_count'] \ + * prototype['species_count'] + + prototype['diff_fillin'] = \ + prototype['alternate_fillin'] - prototype['fillin'] + else: + prototype['diff_fillin'] = -1 + + if all( + prototype['diff_fillin'] == -1 + for prototype in cl_prototypes.values() + ): + raise RuntimeError('Could not compute a valid set of conservation ' + 'laws for this model!') + + # this puts prototypes with high diff_fillin last + cl_prototypes = sorted( + cl_prototypes.items(), key=lambda kv: kv[1]['diff_fillin'] + ) + cl_prototypes = { + proto[0]: proto[1] + for proto in cl_prototypes + } + + for monomer in cl_prototypes: + prototype = cl_prototypes[monomer] + # we check that we + # A) have an alternative index computed, i.e. that + # that monomer originally had a non-unique target_index + # B) that the target_index still is not unique or part of a cyclic + # dependency. due to the sorting, this will always be the monomer + # with the highest diff_fillin (note that the target index counts + # are recomputed on the fly) + + if prototype['diff_fillin'] > -1 \ + and ( + _get_target_indices(cl_prototypes).count( + prototype['target_index'] + ) > 1 + or _cl_has_cycle(monomer, cl_prototypes) + ): + prototype['fillin'] = prototype['alternate_fillin'] + prototype['target_index'] = prototype['alternate_target_index'] + prototype['appearance_count'] = \ + prototype['alternate_appearance_count'] + + del prototype['possible_indices'][prototype['local_index']] + del prototype['appearance_counts'][prototype['local_index']] + + +def _get_target_indices( + cl_prototypes: CL_Prototype) -> List[List[int]]: + """ + Computes the list target indices for the current + conservation law prototype + + :param cl_prototypes: + dict that contains target indices for every monomer + + :return: + List of lists of target indices + """ + return [ + prototype['target_index'] for prototype in cl_prototypes.values() + ] + + +def _construct_conservation_from_prototypes( + cl_prototypes: CL_Prototype, + pysb_model: pysb.Model +) -> List[ConservationLaw]: + """ + Computes the algebraic expression for the total amount of a given + monomer + + :param cl_prototypes: + see return of :func:`_generate_cl_prototypes` + + :param pysb_model: + pysb model + + :return: + list of dicts describing conservation laws + """ + conservation_laws = [] + for monomer_name in cl_prototypes: + target_index = cl_prototypes[monomer_name]['target_index'] + coefficients = dict() + + for ix, specie in enumerate(pysb_model.species): + count = extract_monomers(specie).count(monomer_name) + if count > 0: + coefficients[sp.Symbol(f'__s{ix}')] = count + + conservation_laws.append({ + 'state': sp.Symbol(f'__s{target_index}'), + 'total_abundance': sp.Symbol(f'tcl__s{target_index}'), + 'coefficients': coefficients, + }) + + return conservation_laws + + +def _add_conservation_for_constant_species( + ode_model: ODEModel, + conservation_laws: List[ConservationLaw] +) -> None: + """ + Computes the algebraic expression for the total amount of a given + monomer + + :param ode_model: + ODEModel instance to which the conservation laws will be added + + :param conservation_laws: + see return of :func:`_construct_conservation_from_prototypes` + + """ + + for ix in range(ode_model.num_states_rdata()): + if ode_model.state_is_constant(ix): + conservation_laws.append({ + 'state': sp.Symbol(f'__s{ix}'), + 'total_abundance': sp.Symbol(f'tcl__s{ix}'), + 'coefficients': {sp.Symbol(f'__s{ix}'): 1.0} + }) + + +def _flatten_conservation_laws( + conservation_laws: List[ConservationLaw]) -> None: + """ + Flatten the conservation laws such that the state_expr not longer + depend on any states that are replaced by conservation laws + + :param conservation_laws: + see return of :func:`_construct_conservation_from_prototypes` + """ + conservation_law_subs = \ + _get_conservation_law_subs(conservation_laws) + + while conservation_law_subs: + for cl in conservation_laws: + # only update if we changed something + if any( + _apply_conseration_law_sub(cl, sub) + for sub in conservation_law_subs + ): + conservation_law_subs = \ + _get_conservation_law_subs(conservation_laws) + + +def _apply_conseration_law_sub(cl: ConservationLaw, + sub: Tuple[sp.Symbol, ConservationLaw]) -> bool: + """ + Applies a substitution to a conservation law by replacing the + coefficient of the state of the + + :param cl: + conservation law + + :param sub: + substitution to apply, tuple of (state to be replaced, conservation + law) + + :return: boolean flag indicating whether the substitution was applied + """ + if not _state_in_cl_formula(sub[0], cl): + return False + + coeff = cl['coefficients'].pop(sub[0], 0.0) + # x_j = T/b_j - sum_{i≠j}(x_i * b_i) / b_j + # don't need to account for totals here as we can simply + # absorb that into the new total + for k, v in sub[1].items(): + if k == sub[0]: + continue + update = - coeff * v / sub[1][sub[0]] + + if k in cl['coefficients']: + cl['coefficients'][k] += update + else: + cl['coefficients'][k] = update + + return True + + +def _state_in_cl_formula( + state: sp.Symbol, cl: ConservationLaw +) -> bool: + """ + Checks whether state appears in the formula the provided cl + + :param state: + state + + :param cl: + conservation law + + :return: + boolean indicator + """ + if cl['state'] == state: + return False + + return cl['coefficients'].get(state, 0.0) != 0.0 + + +def _get_conservation_law_subs( + conservation_laws: List[ConservationLaw] +) -> List[Tuple[sp.Symbol, Dict[sp.Symbol, sp.Expr]]]: + """ + Computes a list of (state, coeffs) tuples for conservation laws that still + appear in other conservation laws + + :param conservation_laws: + see return of :func:`_flatten_conservation_laws` + + :return: + list of tuples containing substitution rules to be used with sympy + subs + """ + return [ + (cl['state'], cl['coefficients']) for cl in conservation_laws + if any( + _state_in_cl_formula(cl['state'], other_cl) + for other_cl in conservation_laws + ) + ] + +def has_fixed_parameter_ic(specie: pysb.core.ComplexPattern, + pysb_model: pysb.Model, + ode_model: ODEModel) -> bool: + """ + Wrapper to interface + :meth:`ode_export.ODEModel.state_has_fixed_parameter_initial_condition` + from a pysb specie/model arguments + + :param specie: + pysb species + + :param pysb_model: + pysb model + + :param ode_model: + ODE model + + :return: + ``False`` if the species does not have an initial condition at all. + Otherwise the return value of + :meth:`ode_export.ODEModel.state_has_fixed_parameter_initial_condition` + """ + # ComplexPatterns are not hashable, so we have to compare by string + ic_index = next( + ( + ic + for ic, condition in enumerate(pysb_model.initials) + if pysb.pattern.match_complex_pattern(condition[0], + specie, exact=True) + ), + None + ) + if ic_index is None: + return False + else: + return ode_model.state_has_fixed_parameter_initial_condition( + ic_index + ) + + +def extract_monomers( + complex_patterns: Union[pysb.ComplexPattern, + List[pysb.ComplexPattern]] +) -> List[str]: + """ + Constructs a list of monomer names contained in complex patterns. + Multiplicity of names corresponds to the stoichiometry in the complex. + + :param complex_patterns: + (list of) complex pattern(s) + + :return: + list of monomer names + """ + if not isinstance(complex_patterns, list): + complex_patterns = [complex_patterns] + return [ + mp.monomer.name + for cp in complex_patterns + if cp is not None + for mp in cp.monomer_patterns + ] + + +def _get_unconserved_monomers(rule: pysb.Rule, + pysb_model: pysb.Model) -> Set[str]: + """ + Constructs the set of monomer names for which the specified rule changes + the stoichiometry of the monomer in the specified model. + + :param rule: + the pysb rule + + :param pysb_model: + pysb model + + :return: + set of monomer names for which the stoichiometry is not conserved + """ + unconserved_monomers = set() + + if not rule.delete_molecules \ + and len(rule.product_pattern.complex_patterns) == 0: + # if delete_molecules is not True but we have a degradation rule, + # we have to actually go through the reactions that are created by + # the rule + for reaction in [r for r in pysb_model.reactions + if rule.name in r['rule']]: + unconserved_monomers |= _get_changed_stoichiometries( + [pysb_model.species[ix] for ix in reaction['reactants']], + [pysb_model.species[ix] for ix in reaction['products']] + ) + else: + # otherwise we can simply extract all information for the rule + # itself, which is computationally much more efficient + unconserved_monomers |= _get_changed_stoichiometries( + rule.reactant_pattern.complex_patterns, + rule.product_pattern.complex_patterns + ) + + return unconserved_monomers + + +def _get_changed_stoichiometries( + reactants: Union[pysb.ComplexPattern, List[pysb.ComplexPattern]], + products: Union[pysb.ComplexPattern, List[pysb.ComplexPattern]] +) -> Set[str]: + """ + Constructs the set of monomer names which have different + stoichiometries in reactants and products. + + :param reactants: + (list of) complex pattern(s) + :param products: + (list of) complex pattern(s) + + :returns: + set of monomer name for which the stoichiometry changed + """ + + changed_stoichiometries = set() + + reactant_monomers = extract_monomers( + reactants + ) + + product_monomers = extract_monomers( + products + ) + + for monomer in set(reactant_monomers + product_monomers): + if reactant_monomers.count(monomer) != product_monomers.count(monomer): + changed_stoichiometries.add(monomer) + + return changed_stoichiometries + + +def pysb_model_from_path(pysb_model_file: Union[str, Path]) -> pysb.Model: + """Load a pysb model module and return the :class:`pysb.Model` instance + + :param pysb_model_file: Full or relative path to the PySB model module + :return: The pysb Model instance + """ + + pysb_model_module_name = \ + os.path.splitext(os.path.split(pysb_model_file)[-1])[0] + + import importlib.util + spec = importlib.util.spec_from_file_location( + pysb_model_module_name, pysb_model_file) + module = importlib.util.module_from_spec(spec) + sys.modules[pysb_model_module_name] = module + spec.loader.exec_module(module) + + return module.model diff --git a/python/sdist/amici/sbml_import.py b/python/sdist/amici/sbml_import.py deleted file mode 120000 index d5ebad6b67..0000000000 --- a/python/sdist/amici/sbml_import.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/sbml_import.py \ No newline at end of file diff --git a/python/sdist/amici/sbml_import.py b/python/sdist/amici/sbml_import.py new file mode 100644 index 0000000000..e1f35245e4 --- /dev/null +++ b/python/sdist/amici/sbml_import.py @@ -0,0 +1,2365 @@ +""" +SBML Import +----------- +This module provides all necessary functionality to import a model specified +in the `Systems Biology Markup Language (SBML) `_. +""" +import copy +import itertools as itt +import logging +import math +import os +import re +import warnings +from pathlib import Path +from typing import (Any, Callable, Dict, Iterable, List, Optional, Tuple, + Union) + +import libsbml as sbml +import sympy as sp + +from . import has_clibs +from .constants import SymbolId +from .import_utils import (RESERVED_SYMBOLS, + _check_unsupported_functions, + _get_str_symbol_identifiers, + _parse_special_functions, + generate_measurement_symbol, + generate_regularization_symbol, + noise_distribution_to_cost_function, + noise_distribution_to_observable_transformation, + smart_subs, smart_subs_dict, toposort_symbols) +from .logging import get_logger, log_execution_time, set_log_level +from .ode_export import ( + ODEExporter, ODEModel, symbol_with_assumptions, _default_simplify +) + + +class SBMLException(Exception): + pass + + +SymbolicFormula = Dict[sp.Symbol, sp.Expr] + + +default_symbols = { + symbol: {} for symbol in SymbolId +} + +ConservationLaw = Dict[str, Union[str, sp.Expr]] + +logger = get_logger(__name__, logging.ERROR) + + +class SbmlImporter: + """ + Class to generate AMICI C++ files for a model provided in the Systems + Biology Markup Language (SBML). + + :ivar show_sbml_warnings: + indicates whether libSBML warnings should be + displayed + + :ivar symbols: + dict carrying symbolic definitions + + :ivar sbml_reader: + + The libSBML sbml reader + + .. warning:: + Not storing this may result in a segfault. + + :ivar sbml_doc: + document carrying the sbml definition + + .. warning:: + Not storing this may result in a segfault. + + :ivar sbml: + SBML model to import + + :ivar compartments: + dict of compartment ids and compartment volumes + + :ivar stoichiometric_matrix: + stoichiometric matrix of the model + + :ivar flux_vector: + reaction kinetic laws + + :ivar flux_ids: + identifiers for elements of flux_vector + + :ivar _local_symbols: + model symbols for sympy to consider during sympification + see `locals`argument in `sympy.sympify` + + :ivar species_assignment_rules: + Assignment rules for species. + Key is symbolic identifier and value is assignment value + + :ivar compartment_assignment_rules: + Assignment rules for compartments. + Key is symbolic identifier and value is assignment value + + :ivar parameter_assignment_rules: + assignment rules for parameters, these parameters are not permissible + for sensitivity analysis + + :ivar initial_assignments: + initial assignments for parameters, these parameters are not + permissible for sensitivity analysis + + :ivar sbml_parser_settings: + sets behaviour of SBML Formula parsing + + """ + + def __init__(self, + sbml_source: Union[str, Path, sbml.Model], + show_sbml_warnings: bool = False, + from_file: bool = True) -> None: + """ + Create a new Model instance. + + :param sbml_source: + Either a path to SBML file where the model is specified, + or a model string as created by sbml.sbmlWriter( + ).writeSBMLToString() or an instance of `libsbml.Model`. + + :param show_sbml_warnings: + Indicates whether libSBML warnings should be displayed. + + :param from_file: + Whether `sbml_source` is a file name (True, default), or an SBML + string + """ + if isinstance(sbml_source, sbml.Model): + self.sbml_doc: sbml.Document = sbml_source.getSBMLDocument() + else: + self.sbml_reader: sbml.SBMLReader = sbml.SBMLReader() + if from_file: + sbml_doc = self.sbml_reader.readSBMLFromFile(str(sbml_source)) + else: + sbml_doc = self.sbml_reader.readSBMLFromString(sbml_source) + self.sbml_doc = sbml_doc + + self.show_sbml_warnings: bool = show_sbml_warnings + + # process document + self._process_document() + + self.sbml: sbml.Model = self.sbml_doc.getModel() + + # Long and short names for model components + self.symbols: Dict[SymbolId, Dict[sp.Symbol, Dict[str, Any]]] = {} + + self._local_symbols: Dict[str, Union[sp.Expr, sp.Function]] = {} + self.compartments: SymbolicFormula = {} + self.compartment_assignment_rules: SymbolicFormula = {} + self.species_assignment_rules: SymbolicFormula = {} + self.parameter_assignment_rules: SymbolicFormula = {} + self.initial_assignments: SymbolicFormula = {} + + self._reset_symbols() + + # http://sbml.org/Software/libSBML/5.18.0/docs/python-api/classlibsbml_1_1_l3_parser_settings.html#abcfedd34efd3cae2081ba8f42ea43f52 + # all defaults except disable unit parsing + self.sbml_parser_settings = sbml.L3ParserSettings( + self.sbml, sbml.L3P_PARSE_LOG_AS_LOG10, + sbml.L3P_EXPAND_UNARY_MINUS, sbml.L3P_NO_UNITS, + sbml.L3P_AVOGADRO_IS_CSYMBOL, + sbml.L3P_COMPARE_BUILTINS_CASE_INSENSITIVE, None, + sbml.L3P_MODULO_IS_PIECEWISE + ) + + def _process_document(self) -> None: + """ + Validate and simplify document. + """ + # Ensure we got a valid SBML model, otherwise further processing + # might lead to undefined results + self.sbml_doc.validateSBML() + _check_lib_sbml_errors(self.sbml_doc, self.show_sbml_warnings) + + # apply several model simplifications that make our life substantially + # easier + if self.sbml_doc.getModel().getNumFunctionDefinitions(): + convert_config = sbml.SBMLFunctionDefinitionConverter()\ + .getDefaultProperties() + self.sbml_doc.convert(convert_config) + + convert_config = sbml.SBMLLocalParameterConverter().\ + getDefaultProperties() + self.sbml_doc.convert(convert_config) + + # If any of the above calls produces an error, this will be added to + # the SBMLError log in the sbml document. Thus, it is sufficient to + # check the error log just once after all conversion/validation calls. + _check_lib_sbml_errors(self.sbml_doc, self.show_sbml_warnings) + + def _reset_symbols(self) -> None: + """ + Reset the symbols attribute to default values + """ + self.symbols = copy.deepcopy(default_symbols) + self._local_symbols = {} + + def sbml2amici( + self, + model_name: str, + output_dir: Union[str, Path] = None, + observables: Dict[str, Dict[str, str]] = None, + event_observables: Dict[str, Dict[str, str]] = None, + constant_parameters: Iterable[str] = None, + sigmas: Dict[str, Union[str, float]] = None, + event_sigmas: Dict[str, Union[str, float]] = None, + noise_distributions: Dict[str, Union[str, Callable]] = None, + event_noise_distributions: Dict[str, Union[str, Callable]] = None, + verbose: Union[int, bool] = logging.ERROR, + assume_pow_positivity: bool = False, + compiler: str = None, + allow_reinit_fixpar_initcond: bool = True, + compile: bool = True, + compute_conservation_laws: bool = True, + simplify: Optional[Callable] = _default_simplify, + cache_simplify: bool = False, + log_as_log10: bool = True, + generate_sensitivity_code: bool = True, + ) -> None: + """ + Generate and compile AMICI C++ files for the model provided to the + constructor. + + The resulting model can be imported as a regular Python module (if + `compile=True`), or used from Matlab or C++ as described in the + documentation of the respective AMICI interface. + + Note that this generates model ODEs for changes in concentrations, not + amounts unless the `hasOnlySubstanceUnits` attribute has been + defined for a particular species. + + Sensitivity analysis for local parameters is enabled by creating + global parameters _{reactionId}_{localParameterName}. + + :param model_name: + name of the model/model directory + + :param output_dir: + see :meth:`amici.ode_export.ODEExporter.set_paths` + + :param observables: + dictionary( observableId:{'name':observableName + (optional), 'formula':formulaString)}) to be added to the model + + :param event_observables: + dictionary( eventObservableId:{'name':eventObservableName + (optional), 'event':eventId, 'formula':formulaString)}) to be + added to the model + + :param constant_parameters: + list of SBML Ids identifying constant parameters + + :param sigmas: + dictionary(observableId: sigma value or (existing) parameter name) + + :param event_sigmas: + dictionary(eventObservableId: sigma value or (existing) parameter + name) + + :param noise_distributions: + dictionary(observableId: noise type). + If nothing is passed for some observable id, a normal model is + assumed as default. Either pass a noise type identifier, or a + callable generating a custom noise string. + + :param event_noise_distributions: + dictionary(eventObservableId: noise type). + If nothing is passed for some observable id, a normal model is + assumed as default. Either pass a noise type identifier, or a + callable generating a custom noise string. + + :param verbose: + verbosity level for logging, ``True``/``False`` default to + ``logging.Error``/``logging.DEBUG`` + + :param assume_pow_positivity: + if set to ``True``, a special pow function is + used to avoid problems with state variables that may become + negative due to numerical errors + + :param compiler: + distutils/setuptools compiler selection to build the + python extension + + :param allow_reinit_fixpar_initcond: + see :class:`amici.ode_export.ODEExporter` + + :param compile: + If ``True``, compile the generated Python package, + if ``False``, just generate code. + + :param compute_conservation_laws: + if set to ``True``, conservation laws are automatically computed + and applied such that the state-jacobian of the ODE + right-hand-side has full rank. This option should be set to + ``True`` when using the Newton algorithm to compute steadystate + sensitivities. + Conservation laws for constant species are enabled by default. + Support for conservation laws for non-constant species is + experimental and may be enabled by setting an environment variable + ``AMICI_EXPERIMENTAL_SBML_NONCONST_CLS`` to either ``demartino`` + to use the algorithm proposed by De Martino et al. (2014) + https://doi.org/10.1371/journal.pone.0100750, or to any other value + to use the deterministic algorithm implemented in + ``conserved_moieties2.py``. In some cases, the ``demartino`` may + run for a very long time. This has been observed for example in the + case of stoichiometric coefficients with many significant digits. + + :param simplify: + see :attr:`ODEModel._simplify` + + :param cache_simplify: + see :func:`amici.ODEModel.__init__` + + :param log_as_log10: + If ``True``, log in the SBML model will be parsed as ``log10`` + (default), if ``False``, log will be parsed as natural logarithm + ``ln`` + + :param generate_sensitivity_code: + If ``False``, the code required for sensitivity computation will + not be generated + """ + set_log_level(logger, verbose) + + ode_model = self._build_ode_model( + observables=observables, + event_observables=event_observables, + constant_parameters=constant_parameters, + sigmas=sigmas, + event_sigmas=event_sigmas, + noise_distributions=noise_distributions, + event_noise_distributions=event_noise_distributions, + verbose=verbose, + compute_conservation_laws=compute_conservation_laws, + simplify=simplify, + cache_simplify=cache_simplify, + log_as_log10=log_as_log10, + ) + + exporter = ODEExporter( + ode_model, + model_name=model_name, + outdir=output_dir, + verbose=verbose, + assume_pow_positivity=assume_pow_positivity, + compiler=compiler, + allow_reinit_fixpar_initcond=allow_reinit_fixpar_initcond, + generate_sensitivity_code=generate_sensitivity_code + ) + exporter.generate_model_code() + + if compile: + if not has_clibs: + warnings.warn('AMICI C++ extensions have not been built. ' + 'Generated model code, but unable to compile.') + exporter.compile_model() + + def _build_ode_model( + self, + observables: Dict[str, Dict[str, str]] = None, + event_observables: Dict[str, Dict[str, str]] = None, + constant_parameters: Iterable[str] = None, + sigmas: Dict[str, Union[str, float]] = None, + event_sigmas: Dict[str, Union[str, float]] = None, + noise_distributions: Dict[str, Union[str, Callable]] = None, + event_noise_distributions: Dict[str, Union[str, Callable]] = None, + verbose: Union[int, bool] = logging.ERROR, + compute_conservation_laws: bool = True, + simplify: Optional[Callable] = _default_simplify, + cache_simplify: bool = False, + log_as_log10: bool = True, + ) -> ODEModel: + """Generate an ODEModel from this SBML model. + + See :py:func:`sbml2amici` for parameters. + """ + constant_parameters = list(constant_parameters) \ + if constant_parameters else [] + + if sigmas is None: + sigmas = {} + + if event_sigmas is None: + event_sigmas = {} + + if noise_distributions is None: + noise_distributions = {} + + if event_noise_distributions is None: + event_noise_distributions = {} + + self._reset_symbols() + self.sbml_parser_settings.setParseLog( + sbml.L3P_PARSE_LOG_AS_LOG10 if log_as_log10 else + sbml.L3P_PARSE_LOG_AS_LN + ) + self._process_sbml(constant_parameters) + if self.symbols.get(SymbolId.EVENT, False): + if compute_conservation_laws: + logger.warning( + 'Conservation laws are currently not supported for models ' + 'with events, and will be turned off.' + ) + compute_conservation_laws = False + + self._process_observables( + observables, + sigmas, + noise_distributions + ) + self._process_event_observables( + event_observables, + event_sigmas, + event_noise_distributions + ) + self._replace_compartments_with_volumes() + + self._clean_reserved_symbols() + self._process_time() + + ode_model = ODEModel( + verbose=verbose, + simplify=simplify, + cache_simplify=cache_simplify, + ) + ode_model.import_from_sbml_importer( + self, compute_cls=compute_conservation_laws) + return ode_model + + @log_execution_time('importing SBML', logger) + def _process_sbml(self, constant_parameters: List[str] = None) -> None: + """ + Read parameters, species, reactions, and so on from SBML model + + :param constant_parameters: + SBML Ids identifying constant parameters + """ + self.check_support() + self._gather_locals() + self._process_parameters(constant_parameters) + self._process_compartments() + self._process_species() + self._process_reactions() + self._process_rules() + self._process_initial_assignments() + self._process_species_references() + self._process_events() + + def check_support(self) -> None: + """ + Check whether all required SBML features are supported. + Also ensures that the SBML contains at least one reaction, or rate + rule, or assignment rule, to produce change in the system over time. + """ + + # Check for required but unsupported SBML extensions + if self.sbml_doc.getLevel() != 3 \ + and hasattr(self.sbml, 'all_elements_from_plugins') \ + and self.sbml.all_elements_from_plugins.getSize(): + raise SBMLException('SBML extensions are currently not supported!') + + if self.sbml_doc.getLevel() == 3: + # the "required" attribute is only available in SBML Level 3 + for i_plugin in range(self.sbml.getNumPlugins()): + plugin = self.sbml.getPlugin(i_plugin) + if plugin.getPackageName() in ('layout',): + # 'layout' plugin does not have the 'required' attribute + continue + if hasattr(plugin, 'getRequired') and not plugin.getRequired(): + # if not "required", this has no impact on model + # simulation, and we can safely ignore it + continue + # Check if there are extension elements. If not, we can safely + # ignore the enabled package + if plugin.getListOfAllElements(): + raise SBMLException( + f'Required SBML extension {plugin.getPackageName()} ' + f'is currently not supported!') + + if any(not rule.isAssignment() and not isinstance( + self.sbml.getElementBySId(rule.getVariable()), + (sbml.Compartment, sbml.Species, sbml.Parameter) + ) for rule in self.sbml.getListOfRules()): + raise SBMLException('Algebraic rules are currently not supported, ' + 'and rate rules are only supported for ' + 'species, compartments, and parameters.') + + if any(not (rule.isAssignment() or rule.isRate()) + and isinstance( + self.sbml.getElementBySId(rule.getVariable()), + (sbml.Compartment, sbml.Species, sbml.Parameter) + ) for rule in self.sbml.getListOfRules()): + raise SBMLException('Only assignment and rate rules are ' + 'currently supported for compartments, ' + 'species, and parameters!') + + if any(r.getFast() for r in self.sbml.getListOfReactions()): + raise SBMLException('Fast reactions are currently not supported!') + + # Check events for unsupported functionality + self.check_event_support() + + def check_event_support(self) -> None: + """ + Check possible events in the model, as AMICI does currently not support + + * delays in events + * priorities of events + * events fired at initial time + + Furthermore, event triggers are optional (e.g., if an event is fired at + initial time, no trigger function is necessary). + In this case, warn that this event will have no effect. + """ + for event in self.sbml.getListOfEvents(): + event_id = event.getId() + # Check for delays in events + delay = event.getDelay() + if delay is not None: + try: + delay_time = float(self._sympy_from_sbml_math(delay)) + if delay_time != 0: + raise ValueError + # `TypeError` would be raised in the above `float(...)` + # if the delay is not a fixed time + except (TypeError, ValueError): + raise SBMLException('Events with execution delays are ' + 'currently not supported in AMICI.') + # Check for priorities + if event.getPriority() is not None: + raise SBMLException(f'Event {event_id} has a priority ' + 'specified. This is currently not ' + 'supported in AMICI.') + + # check trigger + trigger_sbml = event.getTrigger() + if trigger_sbml is None: + logger.warning(f'Event {event_id} trigger has no trigger, ' + 'so will be skipped.') + continue + if trigger_sbml.getMath() is None: + logger.warning(f'Event {event_id} trigger has no trigger ' + 'expression, so a dummy trigger will be set.') + + if not trigger_sbml.getPersistent(): + raise SBMLException( + f'Event {event_id} has a non-persistent trigger.' + 'This is currently not supported in AMICI.' + ) + + @log_execution_time('gathering local SBML symbols', logger) + def _gather_locals(self) -> None: + """ + Populate self.local_symbols with all model entities. + + This is later used during sympifications to avoid sympy builtins + shadowing model entities as well as to avoid possibly costly + symbolic substitutions + """ + self._gather_base_locals() + self._gather_dependent_locals() + + def _gather_base_locals(self): + """ + Populate self.local_symbols with pure symbol definitions that do not + depend on any other symbol. + """ + + special_symbols_and_funs = { + # oo is sympy infinity + 'INF': sp.oo, + 'NaN': sp.nan, + 'rem': sp.Mod, + 'time': symbol_with_assumptions('time'), + # SBML L3 explicitly defines this value, which is not equal + # to the most recent SI definition. + 'avogadro': sp.Float(6.02214179e23), + 'exponentiale': sp.E, + } + for s, v in special_symbols_and_funs.items(): + self.add_local_symbol(s, v) + + for c in itt.chain(self.sbml.getListOfSpecies(), + self.sbml.getListOfParameters(), + self.sbml.getListOfCompartments()): + if not c.isSetId(): + continue + + self.add_local_symbol(c.getId(), _get_identifier_symbol(c)) + + for x_ref in _get_list_of_species_references(self.sbml): + if not x_ref.isSetId(): + continue + if x_ref.isSetStoichiometry() and not \ + self.is_assignment_rule_target(x_ref): + value = sp.Float(x_ref.getStoichiometry()) + else: + value = _get_identifier_symbol(x_ref) + + ia_sym = self._get_element_initial_assignment(x_ref.getId()) + if ia_sym is not None: + value = ia_sym + + self.add_local_symbol(x_ref.getId(), value) + + for r in self.sbml.getListOfReactions(): + for e in itt.chain(r.getListOfReactants(), r.getListOfProducts()): + if isinstance(e, sbml.SpeciesReference): + continue + + if not (e.isSetId() and e.isSetStoichiometry()) or \ + self.is_assignment_rule_target(e): + continue + + self.add_local_symbol(e.getId(), + sp.Float(e.getStoichiometry())) + + def _gather_dependent_locals(self): + """ + Populate self.local_symbols with symbol definitions that may depend on + other symbol definitions. + """ + for r in self.sbml.getListOfReactions(): + if not r.isSetId(): + continue + self.add_local_symbol( + r.getId(), + self._sympy_from_sbml_math(r.getKineticLaw()) + ) + + def add_local_symbol(self, key: str, value: sp.Expr): + """ + Add local symbols with some sanity checking for duplication which + would indicate redefinition of internals, which SBML permits, + but we don't. + + :param key: + local symbol key + + :param value: + local symbol value + """ + if key in self._local_symbols.keys(): + raise SBMLException( + f'AMICI tried to add a local symbol {key} with value {value}, ' + f'but {key} was already instantiated with ' + f'{self._local_symbols[key]}. This means that there ' + f'are multiple SBML elements with SId {key}, which is ' + f'invalid SBML. This can be fixed by renaming ' + f'the elements with SId {key}.' + ) + if key in {'True', 'False', 'true', 'false', 'pi'}: + raise SBMLException( + f'AMICI tried to add a local symbol {key} with value {value}, ' + f'but {key} is a reserved symbol in AMICI. This can be fixed ' + f'by renaming the element with SId {key}.' + ) + self._local_symbols[key] = value + + @log_execution_time('processing SBML compartments', logger) + def _process_compartments(self) -> None: + """ + Get compartment information, stoichiometric matrix and fluxes from + SBML model. + """ + compartments = self.sbml.getListOfCompartments() + self.compartments = {} + for comp in compartments: + init = sp.Float(1.0) + + if comp.isSetVolume(): + init = self._sympy_from_sbml_math(comp.getVolume()) + + ia_sym = self._get_element_initial_assignment(comp.getId()) + if ia_sym is not None: + init = ia_sym + + self.compartments[_get_identifier_symbol(comp)] = init + + @log_execution_time('processing SBML species', logger) + def _process_species(self) -> None: + """ + Get species information from SBML model. + """ + if self.sbml.isSetConversionFactor(): + conversion_factor = symbol_with_assumptions( + self.sbml.getConversionFactor() + ) + else: + conversion_factor = 1 + + for s in self.sbml.getListOfSpecies(): + if self.is_assignment_rule_target(s): + continue + self.symbols[SymbolId.SPECIES][_get_identifier_symbol(s)] = { + 'name': s.getName() if s.isSetName() else s.getId(), + 'compartment': _get_species_compartment_symbol(s), + 'constant': s.getConstant() or s.getBoundaryCondition(), + 'amount': s.getHasOnlySubstanceUnits(), + 'conversion_factor': symbol_with_assumptions( + s.getConversionFactor() + ) + if s.isSetConversionFactor() + else conversion_factor, + 'index': len(self.symbols[SymbolId.SPECIES]), + } + + self._convert_event_assignment_parameter_targets_to_species() + self._process_species_initial() + self._process_rate_rules() + + @log_execution_time('processing SBML species initials', logger) + def _process_species_initial(self): + """ + Extract initial values and initial assignments from species + """ + for species_variable in self.sbml.getListOfSpecies(): + initial = get_species_initial(species_variable) + + species_id = _get_identifier_symbol(species_variable) + # If species_id is a target of an AssignmentRule, species will be + # None, but we don't have to account for the initial definition + # of the species itself and SBML doesn't permit AssignmentRule + # targets to have InitialAssignments. + species = self.symbols[SymbolId.SPECIES].get(species_id, None) + + ia_initial = self._get_element_initial_assignment( + species_variable.getId() + ) + if ia_initial is not None: + if species and species['amount'] \ + and 'compartment' in species: + ia_initial *= self.compartments.get( + species['compartment'], species['compartment'] + ) + initial = ia_initial + if species: + species['init'] = initial + + # don't assign this since they need to stay in order + sorted_species = toposort_symbols(self.symbols[SymbolId.SPECIES], + 'init') + for species in self.symbols[SymbolId.SPECIES].values(): + species['init'] = smart_subs_dict(species['init'], + sorted_species, + 'init') + + @log_execution_time('processing SBML rate rules', logger) + def _process_rate_rules(self): + """ + Process rate rules for species, compartments and parameters. + Compartments and parameters with rate rules are implemented as species. + Note that, in the case of species, rate rules may describe the change + in amount, not concentration, of a species. + """ + rules = self.sbml.getListOfRules() + # compartments with rules are replaced with constants in the relevant + # equations during the _replace_in_all_expressions call inside + # _process_rules + for rule in rules: + if rule.getTypeCode() != sbml.SBML_RATE_RULE: + continue + + variable = symbol_with_assumptions(rule.getVariable()) + formula = self._sympy_from_sbml_math(rule) + if formula is None: + continue + + # Species rules are processed first, to avoid processing + # compartments twice (as compartments with rate rules are + # implemented as species). + ia_init = self._get_element_initial_assignment(rule.getVariable()) + if variable in self.symbols[SymbolId.SPECIES]: + init = self.symbols[SymbolId.SPECIES][variable]['init'] + name = None + + if variable in self.compartments: + init = self.compartments[variable] + name = str(variable) + del self.compartments[variable] + + elif variable in self.symbols[SymbolId.PARAMETER]: + init = self._sympy_from_sbml_math( + self.symbols[SymbolId.PARAMETER][variable]['value'], + ) + name = self.symbols[SymbolId.PARAMETER][variable]['name'] + del self.symbols[SymbolId.PARAMETER][variable] + + # parameter with initial assignment, cannot use + # self.initial_assignments as it is not filled at this + # point + elif ia_init is not None: + init = ia_init + par = self.sbml.getElementBySId(rule.getVariable()) + name = par.getName() if par.isSetName() else par.getId() + + self.add_d_dt(formula, variable, init, name) + + def add_d_dt( + self, + d_dt: sp.Expr, + variable: sp.Symbol, + variable0: Union[float, sp.Expr], + name: str, + ) -> None: + """ + Creates or modifies species, to implement rate rules for + compartments and species, respectively. + + :param d_dt: + The rate rule (or, right-hand side of an ODE). + + :param variable: + The subject of the rate rule. + + :param variable0: + The initial value of the variable. + + :param name: + Species name, only applicable if this function generates a new + species + """ + if variable in self.symbols[SymbolId.SPECIES]: + # only update dt if species was already generated + self.symbols[SymbolId.SPECIES][variable]['dt'] = d_dt + else: + # update initial values + for species_id, species in self.symbols[SymbolId.SPECIES].items(): + variable0 = smart_subs(variable0, species_id, species['init']) + + for species in self.symbols[SymbolId.SPECIES].values(): + species['init'] = smart_subs(species['init'], + variable, variable0) + + # add compartment/parameter species + self.symbols[SymbolId.SPECIES][variable] = { + 'name': name, + 'init': variable0, + 'amount': False, + 'conversion_factor': 1.0, + 'constant': False, + 'index': len(self.symbols[SymbolId.SPECIES]), + 'dt': d_dt, + } + + @log_execution_time('processing SBML parameters', logger) + def _process_parameters(self, + constant_parameters: List[str] = None) -> None: + """ + Get parameter information from SBML model. + + :param constant_parameters: + SBML Ids identifying constant parameters + """ + + if constant_parameters is None: + constant_parameters = [] + + # Ensure specified constant parameters exist in the model + for parameter in constant_parameters: + if not self.sbml.getParameter(parameter): + raise KeyError('Cannot make %s a constant parameter: ' + 'Parameter does not exist.' % parameter) + + fixed_parameters = [ + parameter + for parameter in self.sbml.getListOfParameters() + if parameter.getId() in constant_parameters + ] + for parameter in fixed_parameters: + if self._get_element_initial_assignment(parameter.getId()) is not \ + None or self.is_assignment_rule_target(parameter) or \ + self.is_rate_rule_target(parameter): + raise SBMLException( + f'Cannot turn parameter {parameter.getId()} into a ' + 'constant/fixed parameter since it either has an ' + 'initial assignment or is the target of an assignment or ' + 'rate rule.' + ) + + parameters = [ + parameter for parameter + in self.sbml.getListOfParameters() + if parameter.getId() not in constant_parameters + and self._get_element_initial_assignment(parameter.getId()) is None + and not self.is_assignment_rule_target(parameter) + ] + + loop_settings = { + SymbolId.PARAMETER: {'var': parameters, 'name': 'parameter'}, + SymbolId.FIXED_PARAMETER: {'var': fixed_parameters, + 'name': 'fixed_parameter'} + } + + for partype, settings in loop_settings.items(): + for par in settings['var']: + self.symbols[partype][_get_identifier_symbol(par)] = { + 'name': par.getName() if par.isSetName() else par.getId(), + 'value': par.getValue() + } + + @log_execution_time('processing SBML reactions', logger) + def _process_reactions(self): + """ + Get reactions from SBML model. + """ + reactions = self.sbml.getListOfReactions() + # nr (number of reactions) should have a minimum length of 1. This is + # to ensure that, if there are no reactions, the stoichiometric matrix + # and flux vector multiply to a zero vector with dimensions (nx, 1). + nr = max(1, len(reactions)) + nx = len(self.symbols[SymbolId.SPECIES]) + # stoichiometric matrix + self.stoichiometric_matrix = sp.SparseMatrix(sp.zeros(nx, nr)) + self.flux_vector = sp.zeros(nr, 1) + # Use reaction IDs as IDs for flux expressions (note that prior to SBML + # level 3 version 2 the ID attribute was not mandatory and may be + # unset) + self.flux_ids = [ + f"flux_{reaction.getId()}" if reaction.isSetId() + else f"flux_r{reaction_idx}" + for reaction_idx, reaction in enumerate(reactions) + ] or ['flux_r0'] + + reaction_ids = [ + reaction.getId() for reaction in reactions + if reaction.isSetId() + ] + + for reaction_index, reaction in enumerate(reactions): + for element_list, sign in [(reaction.getListOfReactants(), -1), + (reaction.getListOfProducts(), 1)]: + for element in element_list: + stoichiometry = self._get_element_stoichiometry( + element + ) + sbml_species = self.sbml.getSpecies(element.getSpecies()) + if self.is_assignment_rule_target(sbml_species): + continue + species_id = _get_identifier_symbol(sbml_species) + species = self.symbols[SymbolId.SPECIES][species_id] + + if species['constant']: + continue + + # Division by species compartment size (to find the + # rate of change in species concentration) now occurs + # in the `dx_dt` method in "ode_export.py", which also + # accounts for possibly variable compartments. + self.stoichiometric_matrix[species['index'], + reaction_index] += \ + sign * stoichiometry * species['conversion_factor'] + if reaction.isSetId(): + sym_math = self._local_symbols[reaction.getId()] + else: + sym_math = self._sympy_from_sbml_math(reaction.getKineticLaw()) + + self.flux_vector[reaction_index] = sym_math + if any( + str(symbol) in reaction_ids + for symbol in self.flux_vector[reaction_index].free_symbols + ): + raise SBMLException( + 'Kinetic laws involving reaction ids are currently' + ' not supported!' + ) + + @log_execution_time('processing SBML rules', logger) + def _process_rules(self) -> None: + """ + Process Rules defined in the SBML model. + """ + for rule in self.sbml.getListOfRules(): + # rate rules are processed in _process_species + if rule.getTypeCode() == sbml.SBML_RATE_RULE: + continue + + sbml_var = self.sbml.getElementBySId(rule.getVariable()) + sym_id = symbol_with_assumptions(rule.getVariable()) + formula = self._sympy_from_sbml_math(rule) + if formula is None: + continue + + if isinstance(sbml_var, sbml.Species): + self.species_assignment_rules[sym_id] = formula + + elif isinstance(sbml_var, sbml.Compartment): + self.compartment_assignment_rules[sym_id] = formula + self.compartments[sym_id] = formula + + elif isinstance(sbml_var, sbml.Parameter): + self.parameter_assignment_rules[sym_id] = formula + + self.symbols[SymbolId.EXPRESSION][sym_id] = { + 'name': str(sym_id), + 'value': formula + } + + self.symbols[SymbolId.EXPRESSION] = toposort_symbols( + self.symbols[SymbolId.EXPRESSION], 'value' + ) + + # expressions must not occur in definition of x0 + for species in self.symbols[SymbolId.SPECIES].values(): + species['init'] = self._make_initial( + smart_subs_dict(species['init'], + self.symbols[SymbolId.EXPRESSION], + 'value') + ) + + def _process_time(self) -> None: + """ + Convert time_symbol into cpp variable. + """ + sbml_time_symbol = symbol_with_assumptions('time') + amici_time_symbol = symbol_with_assumptions('t') + self.amici_time_symbol = amici_time_symbol + + self._replace_in_all_expressions(sbml_time_symbol, amici_time_symbol) + + def _convert_event_assignment_parameter_targets_to_species(self): + """ + Convert parameters that are targets of event assignments to species. + + This is for the convenience of only implementing event assignments for + "species". + """ + parameter_targets = \ + _collect_event_assignment_parameter_targets(self.sbml) + for parameter_target in parameter_targets: + # Parameter rate rules already exist as species. + if parameter_target in self.symbols[SymbolId.SPECIES]: + continue + if parameter_target in self.parameter_assignment_rules: + raise SBMLException( + 'AMICI does not currently support models with SBML events ' + 'that affect parameters that are also the target of ' + 'assignment rules.' + ) + parameter_def = None + for symbol_id in {SymbolId.PARAMETER, SymbolId.FIXED_PARAMETER}: + if parameter_target in self.symbols[symbol_id]: + # `parameter_target` should only exist in one of the + # `symbol_id` dictionaries. + if parameter_def is not None: + raise AssertionError( + 'Unexpected error. The parameter target of an ' + 'event assignment was processed twice.' + ) + parameter_def = \ + self.symbols[symbol_id].pop(parameter_target) + if parameter_def is None: + # this happens for parameters that have initial assignments + # or are assignment rule targets + par = self.sbml.getElementBySId(str(parameter_target)) + ia_init = self._get_element_initial_assignment( + par.getId() + ) + parameter_def = { + 'name': par.getName() if par.isSetName() else par.getId(), + 'value': par.getValue() if ia_init is None else ia_init + } + # Fixed parameters are added as species such that they can be + # targets of events. + self.symbols[SymbolId.SPECIES][parameter_target] = { + 'name': parameter_def['name'], + 'init': sp.Float(parameter_def['value']), + # 'compartment': None, # can ignore for amounts + 'constant': False, + 'amount': True, + # 'conversion_factor': 1.0, # can be ignored + 'index': len(self.symbols[SymbolId.SPECIES]), + 'dt': sp.Float(0), + } + + @log_execution_time('processing SBML events', logger) + def _process_events(self) -> None: + """Process SBML events.""" + events = self.sbml.getListOfEvents() + + def get_empty_bolus_value() -> sp.Float: + """ + Used in the event update vector for species that are not affected + by the event. + """ + return sp.Symbol('AMICI_EMTPY_BOLUS') + + # Used to update species concentrations when an event affects a + # compartment. + concentration_species_by_compartment = { + symbol_with_assumptions(c.getId()): [] + for c in self.sbml.getListOfCompartments() + } + for species, species_def in self.symbols[SymbolId.SPECIES].items(): + if ( + # Species is a concentration + not species_def.get('amount', True) and + # Species has a compartment + 'compartment' in species_def + ): + concentration_species_by_compartment[ + species_def['compartment'] + ].append(species) + + for ievent, event in enumerate(events): + # get the event id (which is optional unfortunately) + event_id = event.getId() + if event_id is None or event_id == '': + event_id = f'event_{ievent}' + event_sym = sp.Symbol(event_id) + + # get and parse the trigger function + trigger_sbml = event.getTrigger() + trigger_sym = self._sympy_from_sbml_math(trigger_sbml) + trigger = _parse_event_trigger(trigger_sym) + + # Currently, all event assignment targets must exist in + # self.symbols[SymbolId.SPECIES] + state_vector = list(self.symbols[SymbolId.SPECIES].keys()) + + # parse the boluses / event assignments + bolus = [get_empty_bolus_value() for _ in state_vector] + event_assignments = event.getListOfEventAssignments() + compartment_event_assignments = set() + for event_assignment in event_assignments: + variable_sym = \ + symbol_with_assumptions(event_assignment.getVariable()) + if event_assignment.getMath() is None: + # Ignore event assignments with no change in value. + continue + formula = self._sympy_from_sbml_math(event_assignment) + try: + # Try to find the species in the state vector. + index = state_vector.index(variable_sym) + bolus[index] = formula + except ValueError: + raise SBMLException( + 'Could not process event assignment for ' + f'{str(variable_sym)}. AMICI currently only allows ' + 'event assignments to species; parameters; or, ' + 'compartments with rate rules, at the moment.' + ) + try: + # Try working with the formula now to detect errors + # here instead of at multiple points downstream. + _ = formula - variable_sym + except TypeError: + raise SBMLException( + 'Could not process event assignment for ' + f'{str(variable_sym)}. AMICI only allows symbolic ' + 'expressions as event assignments.' + ) + if variable_sym in concentration_species_by_compartment: + compartment_event_assignments.add(variable_sym) + + for comp, assignment in \ + self.compartment_assignment_rules.items(): + if variable_sym not in assignment.free_symbols: + continue + compartment_event_assignments.add(comp) + + # Update the concentration of species with concentration units + # in compartments that were affected by the event assignments. + for compartment_sym in compartment_event_assignments: + for species_sym in concentration_species_by_compartment[ + compartment_sym + ]: + # If the species was not affected by an event assignment + # then the old value should be updated. + if ( + bolus[state_vector.index(species_sym)] + == get_empty_bolus_value() + ): + species_value = species_sym + # else the species was affected by an event assignment, + # hence the updated value should be updated further. + else: + species_value = bolus[state_vector.index(species_sym)] + # New species value is old amount / new volume. + bolus[state_vector.index(species_sym)] = ( + species_value * compartment_sym / formula + ) + + # Subtract the current species value from each species with an + # update, as the bolus will be added on to the current species + # value during simulation. + for index in range(len(bolus)): + if bolus[index] != get_empty_bolus_value(): + bolus[index] -= state_vector[index] + bolus[index] = bolus[index].subs(get_empty_bolus_value(), + sp.Float(0.0)) + + self.symbols[SymbolId.EVENT][event_sym] = { + 'name': event_id, + 'value': trigger, + 'state_update': sp.MutableDenseMatrix(bolus), + 'initial_value': + trigger_sbml.getInitialValue() if trigger_sbml is not None + else True, + } + + @log_execution_time('processing SBML observables', logger) + def _process_observables( + self, + observables: Union[Dict[str, Dict[str, str]], None], + sigmas: Dict[str, Union[str, float]], + noise_distributions: Dict[str, str] + ) -> None: + """ + Perform symbolic computations required for observable and objective + function evaluation. + + :param observables: + dictionary(observableId: {'name':observableName + (optional), 'formula':formulaString)}) + to be added to the model + + :param sigmas: + dictionary(observableId: sigma value or (existing) + parameter name) + + :param noise_distributions: + dictionary(observableId: noise type) + See :py:func:`sbml2amici`. + """ + + _validate_observables(observables, sigmas, noise_distributions, + events=False) + + # add user-provided observables or make all species, and compartments + # with assignment rules, observable + if observables: + # gather local symbols before parsing observable and sigma formulas + for obs in observables.keys(): + self.add_local_symbol(obs, symbol_with_assumptions(obs)) + + self.symbols[SymbolId.OBSERVABLE] = { + symbol_with_assumptions(obs): { + 'name': definition.get('name', f'y{iobs}'), + 'value': self._sympy_from_sbml_math( + definition['formula'] + ), + 'transformation': + noise_distribution_to_observable_transformation( + noise_distributions.get(obs, 'normal') + ) + } + for iobs, (obs, definition) in enumerate(observables.items()) + } + # check for nesting of observables (unsupported) + observable_syms = set(self.symbols[SymbolId.OBSERVABLE].keys()) + for obs in self.symbols[SymbolId.OBSERVABLE].values(): + if any(sym in observable_syms + for sym in obs['value'].free_symbols): + raise ValueError( + "Nested observables are not supported, " + f"but observable `{obs['name']} = {obs['value']}` " + "references another observable." + ) + elif observables is None: + self._generate_default_observables() + + _check_symbol_nesting(self.symbols[SymbolId.OBSERVABLE], + 'eventObservable') + + self._process_log_likelihood(sigmas, noise_distributions) + + @log_execution_time('processing SBML event observables', logger) + def _process_event_observables( + self, + event_observables: Dict[str, Dict[str, str]], + event_sigmas: Dict[str, Union[str, float]], + event_noise_distributions: Dict[str, str] + ) -> None: + """ + Perform symbolic computations required for observable and objective + function evaluation. + + :param event_observables: + See :py:func:`sbml2amici`. + + :param event_sigmas: + See :py:func:`sbml2amici`. + + :param event_noise_distributions: + See :py:func:`sbml2amici`. + """ + if event_observables is None: + return + + _validate_observables(event_observables, event_sigmas, + event_noise_distributions, + events=True) + + # gather local symbols before parsing observable and sigma formulas + for obs, definition in event_observables.items(): + self.add_local_symbol(obs, symbol_with_assumptions(obs)) + # check corresponding event exists + if sp.Symbol(definition['event']) not in \ + self.symbols[SymbolId.EVENT]: + raise ValueError( + 'Could not find an event with the event identifier ' + f'{definition["event"]} for the event observable with name' + f'{definition["name"]}.' + ) + + self.symbols[SymbolId.EVENT_OBSERVABLE] = { + symbol_with_assumptions(obs): { + 'name': definition.get('name', f'z{iobs}'), + 'value': self._sympy_from_sbml_math( + definition['formula'] + ), + 'event': sp.Symbol(definition.get('event')), + 'transformation': + noise_distribution_to_observable_transformation( + event_noise_distributions.get(obs, 'normal') + ) + } + for iobs, (obs, definition) in + enumerate(event_observables.items()) + } + + wrong_t = sp.Symbol('t') + for eo in self.symbols[SymbolId.EVENT_OBSERVABLE].values(): + if eo['value'].has(wrong_t): + warnings.warn(f'Event observable {eo["name"]} uses `t` in ' + 'it\'s formula which is not the time variable. ' + 'For the time variable, please use `time` ' + 'instead!') + + # check for nesting of observables (unsupported) + _check_symbol_nesting(self.symbols[SymbolId.EVENT_OBSERVABLE], + 'eventObservable') + + self._process_log_likelihood(event_sigmas, event_noise_distributions, + events=True) + self._process_log_likelihood(event_sigmas, event_noise_distributions, + events=True, event_reg=True) + + def _generate_default_observables(self): + """ + Generate default observables from species, compartments and + (initial) assignment rules. + """ + self.symbols[SymbolId.OBSERVABLE] = { + symbol_with_assumptions(f'y{species_id}'): { + 'name': specie['name'], + 'value': species_id + } + for species_id, specie + in self.symbols[SymbolId.SPECIES].items() + } + + for variable, formula in itt.chain( + self.parameter_assignment_rules.items(), + self.initial_assignments.items(), + self.compartment_assignment_rules.items(), + self.species_assignment_rules.items(), + self.compartments.items() + ): + symbol = symbol_with_assumptions(f'y{variable}') + # Assignment rules take precedence over compartment volume + # definitions, so they need to be evaluated first. + # Species assignment rules always overwrite. + if symbol in self.symbols[SymbolId.OBSERVABLE] \ + and variable not in self.species_assignment_rules: + continue + self.symbols[SymbolId.OBSERVABLE][symbol] = { + 'name': str(variable), 'value': formula + } + + def _process_log_likelihood(self, + sigmas: Dict[str, Union[str, float]], + noise_distributions: Dict[str, str], + events: bool = False, + event_reg: bool = False): + """ + Perform symbolic computations required for objective function + evaluation. + + :param sigmas: + See :py:func:`SBMLImporter._process_observables` + + :param noise_distributions: + See :py:func:`SBMLImporter._process_observables` + + :param events: + indicates whether the passed definitions are for observables + (False) or for event observables (True). + + :param event_reg: + indicates whether log-likelihoods definitons should be processed + for event observable regularization (Jrz). If this is activated, + measurements are substituted by 0 and the observable by the + respective regularization symbol. + """ + + if events: + if event_reg: + obs_symbol = SymbolId.EVENT_OBSERVABLE + sigma_symbol = SymbolId.SIGMAZ + llh_symbol = SymbolId.LLHRZ + else: + obs_symbol = SymbolId.EVENT_OBSERVABLE + sigma_symbol = SymbolId.SIGMAZ + llh_symbol = SymbolId.LLHZ + else: + assert not event_reg + obs_symbol = SymbolId.OBSERVABLE + sigma_symbol = SymbolId.SIGMAY + llh_symbol = SymbolId.LLHY + + for obs_id, obs in self.symbols[obs_symbol].items(): + obs['measurement_symbol'] = generate_measurement_symbol(obs_id) + if event_reg: + obs['reg_symbol'] = generate_regularization_symbol(obs_id) + + if not event_reg: + self.symbols[sigma_symbol] = { + symbol_with_assumptions(f'sigma_{obs_id}'): { + 'name': f'sigma_{obs["name"]}', + 'value': self._sympy_from_sbml_math( + sigmas.get(str(obs_id), '1.0') + ) + } + for obs_id, obs in self.symbols[obs_symbol].items() + } + + self.symbols[llh_symbol] = {} + for (obs_id, obs), (sigma_id, sigma) in zip( + self.symbols[obs_symbol].items(), + self.symbols[sigma_symbol].items() + ): + symbol = symbol_with_assumptions(f'J{obs_id}') + dist = noise_distributions.get(str(obs_id), 'normal') + cost_fun = noise_distribution_to_cost_function(dist)(obs_id) + value = sp.sympify(cost_fun, locals=dict(zip( + _get_str_symbol_identifiers(obs_id), + (obs_id, obs['measurement_symbol'], sigma_id) + ))) + if event_reg: + value = value.subs(obs['measurement_symbol'], 0.0) + value = value.subs(obs_id, obs['reg_symbol']) + self.symbols[llh_symbol][symbol] = { + 'name': f'J{obs["name"]}', + 'value': value, + 'dist': dist, + } + + @log_execution_time('processing SBML initial assignments', logger) + def _process_initial_assignments(self): + """ + Accounts for initial assignments of parameters and species + references. Initial assignments for species and compartments are + processed in :py:func:`amici.SBMLImporter._process_initial_species` and + :py:func:`amici.SBMLImporter._process_compartments` respectively. + """ + for ia in self.sbml.getListOfInitialAssignments(): + identifier = _get_identifier_symbol(ia) + if identifier in itt.chain(self.symbols[SymbolId.SPECIES], + self.compartments): + continue + + sym_math = self._get_element_initial_assignment(ia.getId()) + if sym_math is None: + continue + + sym_math = self._make_initial(smart_subs_dict( + sym_math, self.symbols[SymbolId.EXPRESSION], 'value' + )) + self.initial_assignments[_get_identifier_symbol(ia)] = sym_math + + # sort and flatten + self.initial_assignments = toposort_symbols(self.initial_assignments) + for ia_id, ia in self.initial_assignments.items(): + self.initial_assignments[ia_id] = smart_subs_dict( + ia, self.initial_assignments + ) + + for identifier, sym_math in list(self.initial_assignments.items()): + self._replace_in_all_expressions(identifier, sym_math) + + @log_execution_time('processing SBML species references', logger) + def _process_species_references(self): + """ + Replaces species references that define anything but stoichiometries. + + Species references for stoichiometries are processed in + :py:func:`amici.SBMLImporter._process_reactions`. + """ + # doesnt look like there is a better way to get hold of those lists: + species_references = _get_list_of_species_references(self.sbml) + for species_reference in species_references: + if hasattr(species_reference, 'getStoichiometryMath') and \ + species_reference.getStoichiometryMath() is not None: + raise SBMLException('StoichiometryMath is currently not ' + 'supported for species references.') + if species_reference.getId() == '': + continue + + stoich = self._get_element_stoichiometry(species_reference) + self._replace_in_all_expressions( + _get_identifier_symbol(species_reference), + self._sympy_from_sbml_math(stoich) + ) + + def _make_initial(self, sym_math: Union[sp.Expr, None, float] + ) -> Union[sp.Expr, None, float]: + """ + Transforms an expression to its value at the initial time point by + replacing species by their initial values. + + :param sym_math: + symbolic expression + :return: + transformed expression + """ + + if not isinstance(sym_math, sp.Expr): + return sym_math + + for species_id, species in self.symbols[SymbolId.SPECIES].items(): + if 'init' in species: + sym_math = smart_subs(sym_math, species_id, species['init']) + + sym_math = smart_subs(sym_math, self._local_symbols['time'], + sp.Float(0)) + + return sym_math + + def process_conservation_laws(self, ode_model) -> None: + """ + Find conservation laws in reactions and species. + + :param ode_model: + ODEModel object with basic definitions + """ + conservation_laws = [] + + # Create conservation laws for constant species + species_solver = _add_conservation_for_constant_species( + ode_model, conservation_laws + ) + # Non-constant species processed here + if "AMICI_EXPERIMENTAL_SBML_NONCONST_CLS" in os.environ \ + or "GITHUB_ACTIONS" in os.environ: + species_solver = list(set( + self._add_conservation_for_non_constant_species( + ode_model, conservation_laws)) & set(species_solver)) + + # Check, whether species_solver is empty now. As currently, AMICI + # cannot handle ODEs without species, CLs must be switched off in this + # case + if not len(species_solver): + conservation_laws = [] + species_solver = list(range(ode_model.num_states_rdata())) + + # prune out species from stoichiometry and + self.stoichiometric_matrix = \ + self.stoichiometric_matrix[species_solver, :] + + # add the found CLs to the ode_model + for cl in conservation_laws: + ode_model.add_conservation_law(**cl) + + def _get_conservation_laws_demartino( + self, + ode_model: ODEModel, + ) -> List[Tuple[int, List[int], List[float]]]: + """Identify conservation laws based on algorithm by DeMartino et al. + (see conserved_moieties.py). + + :param ode_model: Model for which to compute conserved quantities + :returns: List of one tuple per conservation law, each containing: + (0) the index of the (solver-)species to eliminate, + (1) (solver-)indices of all species engaged in the conserved + quantity (including the eliminated one) + (2) coefficients for the species in (1) + """ + from .conserved_quantities_demartino \ + import compute_moiety_conservation_laws + + try: + stoichiometric_list = [ + float(entry) for entry in self.stoichiometric_matrix.T.flat() + ] + except TypeError: + # Due to the numerical algorithm currently used to identify + # conserved quantities, we can't have symbols in the + # stoichiometric matrix + warnings.warn("Conservation laws for non-constant species in " + "combination with parameterized stoichiometric " + "coefficients are not currently supported " + "and will be turned off.") + return [] + + if any(rule.getTypeCode() == sbml.SBML_RATE_RULE + for rule in self.sbml.getListOfRules()): + # see SBML semantic test suite, case 33 for an example + warnings.warn("Conservation laws for non-constant species in " + "models with RateRules are not currently supported " + "and will be turned off.") + return [] + + cls_state_idxs, cls_coefficients = compute_moiety_conservation_laws( + stoichiometric_list, *self.stoichiometric_matrix.shape, + rng_seed=32, + species_names=[str(x.get_id()) for x in ode_model._states] + ) + + # Sparsify conserved quantities + # ``compute_moiety_conservation_laws`` identifies conserved quantities + # with positive coefficients. The resulting system is, therefore, + # often non-sparse. This leads to circular dependencies in the + # state expressions of eliminated states. The identified conserved + # quantities are linearly independent. We can construct `A` as in + # `A * x0 = total_cl` and bring it to reduced row echelon form. The + # pivot species are the ones to be eliminated. The resulting state + # expressions are sparse and void of any circular dependencies. + A = sp.zeros(len(cls_coefficients), len(ode_model._states)) + for i_cl, (cl, coefficients) in enumerate(zip(cls_state_idxs, + cls_coefficients)): + for i, c in zip(cl, coefficients): + A[i_cl, i] = sp.Rational(c) + rref, pivots = A.rref() + + raw_cls = [] + for i_cl, target_state_model_idx in enumerate(pivots): + # collect values for species engaged in the current CL + state_idxs = [i for i, coeff in enumerate(rref[i_cl, :]) + if coeff] + coefficients = [coeff for coeff in rref[i_cl, :] if coeff] + raw_cls.append((target_state_model_idx, state_idxs, + coefficients),) + return raw_cls + + def _get_conservation_laws_rref( + self + ) -> List[Tuple[int, List[int], List[float]]]: + """Identify conservation laws based on left nullspace of the + stoichiometric matrix, computed through (numeric) Gaussian elimination + + :returns: List of one tuple per conservation law, each containing: + (0) the index of the (solver-)species to eliminate, + (1) (solver-)indices of all species engaged in the conserved + quantity (including the eliminated one) + (2) coefficients for the species in (1) + """ + import numpy as np + from numpy.linalg import matrix_rank + from .conserved_quantities_rref import nullspace_by_rref, rref + + try: + S = np.asarray(self.stoichiometric_matrix, dtype=float) + except TypeError: + # Due to the numerical algorithm currently used to identify + # conserved quantities, we can't have symbols in the + # stoichiometric matrix + warnings.warn("Conservation laws for non-constant species in " + "combination with parameterized stoichiometric " + "coefficients are not currently supported " + "and will be turned off.") + return [] + + if any(rule.getTypeCode() == sbml.SBML_RATE_RULE + for rule in self.sbml.getListOfRules()): + # see SBML semantic test suite, case 33 for an example + warnings.warn("Conservation laws for non-constant species in " + "models with RateRules are not currently supported " + "and will be turned off.") + return [] + + # Determine rank via SVD + rank = matrix_rank(S) if S.shape[0] else 0 + if rank == S.shape[0]: + return [] + kernel = nullspace_by_rref(S.T) + # Check dimensions - due to numerical errors, nullspace_by_rref may + # fail in certain situations + if kernel.shape[0] != S.shape[0] - rank: + raise AssertionError( + "Failed to determine all conserved quantities " + f"(found {kernel.shape[0]}, expected {S.shape[0] - rank}). " + "Try another algorithm, disable detection of conservation " + "laws, or submit a bug report along with the model." + ) + kernel = rref(kernel) + raw_cls = [] + for row in kernel: + state_idxs = [i for i, coeff in enumerate(row) if coeff] + coefficients = [coeff for coeff in row if coeff] + raw_cls.append((state_idxs[0], state_idxs, coefficients),) + + return raw_cls + + def _add_conservation_for_non_constant_species( + self, + ode_model: ODEModel, + conservation_laws: List[ConservationLaw] + ) -> List[int]: + """Add non-constant species to conservation laws + + :param ode_model: + ODEModel object with basic definitions + :param conservation_laws: + List of already known conservation laws + :returns: + List of species indices which later remain in the ODE solver + """ + # indices of retained species + species_solver = list(range(ode_model.num_states_rdata())) + + algorithm = os.environ.get("AMICI_EXPERIMENTAL_SBML_NONCONST_CLS", "") + if algorithm.lower() == "demartino": + raw_cls = self._get_conservation_laws_demartino(ode_model) + else: + raw_cls = self._get_conservation_laws_rref() + + if not raw_cls: + # no conservation laws identified + return species_solver + + species_to_be_removed = {x[0] for x in raw_cls} + + # keep new conservations laws separate until we know everything worked + new_conservation_laws = [] + # previously removed constant species + eliminated_state_ids = {cl['state'] for cl in conservation_laws} + + all_state_ids = [x.get_id() for x in ode_model._states] + all_compartment_sizes = [ + sp.Integer(1) + if self.symbols[SymbolId.SPECIES][state_id]['amount'] + else self.compartments[ + self.symbols[SymbolId.SPECIES][state_id]['compartment'] + ] + for state_id in all_state_ids + ] + + # iterate over list of conservation laws, create symbolic expressions, + for target_state_model_idx, state_idxs, coefficients in raw_cls: + if all_state_ids[target_state_model_idx] in eliminated_state_ids: + # constants state, already eliminated + continue + # collect values for species engaged in the current CL + state_ids = [all_state_ids[i_state] for i_state in state_idxs] + compartment_sizes = [all_compartment_sizes[i] for i in state_idxs] + + target_state_id = all_state_ids[target_state_model_idx] + total_abundance = symbol_with_assumptions(f'tcl_{target_state_id}') + + new_conservation_laws.append({ + 'state': target_state_id, + 'total_abundance': total_abundance, + 'coefficients': { + state_id: coeff * compartment + for state_id, coeff, compartment + in zip(state_ids, coefficients, compartment_sizes) + }, + }) + species_to_be_removed.add(target_state_model_idx) + + conservation_laws.extend(new_conservation_laws) + + # list of species that are not determined by conservation laws + return [ix for ix in species_solver if ix not in species_to_be_removed] + + def _replace_compartments_with_volumes(self): + """ + Replaces compartment symbols in expressions with their respective + (possibly variable) volumes. + """ + for comp, vol in self.compartments.items(): + if comp in self.symbols[SymbolId.SPECIES]: + # for comps with rate rules volume is only initial + for species in self.symbols[SymbolId.SPECIES].values(): + if isinstance(species['init'], sp.Expr): + species['init'] = smart_subs(species['init'], + comp, vol) + continue + self._replace_in_all_expressions(comp, vol) + + def _replace_in_all_expressions(self, + old: sp.Symbol, + new: sp.Expr, + replace_identifiers=False) -> None: + """ + Replace 'old' by 'new' in all symbolic expressions. + + :param old: + symbolic variables to be replaced + + :param new: + replacement symbolic variables + """ + fields = [ + 'stoichiometric_matrix', 'flux_vector', + ] + for field in fields: + if field in dir(self): + self.__setattr__(field, smart_subs( + self.__getattribute__(field), old, new + )) + + dictfields = [ + 'compartment_assignment_rules', 'parameter_assignment_rules', + 'initial_assignments' + ] + for dictfield in dictfields: + d = getattr(self, dictfield) + + # replace identifiers + if old in d and replace_identifiers: + d[new] = d[old] + del d[old] + + if dictfield == 'initial_assignments': + tmp_new = self._make_initial(new) + else: + tmp_new = new + + # replace values + for k in d: + d[k] = smart_subs(d[k], old, tmp_new) + + # replace in identifiers + if replace_identifiers: + for symbol in [SymbolId.EXPRESSION, SymbolId.SPECIES]: + # completely recreate the dict to keep ordering consistent + if old not in self.symbols[symbol]: + continue + self.symbols[symbol] = { + smart_subs(k, old, new): v + for k, v in self.symbols[symbol].items() + } + + for symbol in [SymbolId.OBSERVABLE, SymbolId.LLHY, + SymbolId.SIGMAY]: + if old not in self.symbols[symbol]: + continue + self.symbols[symbol][new] = self.symbols[symbol][old] + del self.symbols[symbol][old] + + # replace in values + for symbol in [SymbolId.OBSERVABLE, SymbolId.LLHY, SymbolId.LLHZ, + SymbolId.SIGMAY, SymbolId.SIGMAZ, SymbolId.EXPRESSION, + SymbolId.EVENT, SymbolId.EVENT_OBSERVABLE]: + if not self.symbols.get(symbol, None): + continue + for element in self.symbols[symbol].values(): + element['value'] = smart_subs(element['value'], old, new) + + # replace in event state updates (boluses) + if self.symbols.get(SymbolId.EVENT, False): + for event in self.symbols[SymbolId.EVENT].values(): + for index in range(len(event['state_update'])): + event['state_update'][index] = \ + smart_subs(event['state_update'][index], old, new) + + if SymbolId.SPECIES in self.symbols: + for species in self.symbols[SymbolId.SPECIES].values(): + species['init'] = smart_subs(species['init'], + old, self._make_initial(new)) + + fields = ['dt'] + if replace_identifiers: + fields.append('compartment') + + for field in ['dt']: + if field in species: + species[field] = smart_subs(species[field], old, new) + + # Initial compartment volume may also be specified with an assignment + # rule (at the end of the _process_species method), hence needs to be + # processed here too. + self.compartments = {smart_subs(c, old, new) if replace_identifiers + else c: + smart_subs(v, old, self._make_initial(new)) + for c, v in self.compartments.items()} + + def _clean_reserved_symbols(self) -> None: + """ + Remove all reserved symbols from self.symbols + """ + for sym in RESERVED_SYMBOLS: + old_symbol = symbol_with_assumptions(sym) + new_symbol = symbol_with_assumptions(f'amici_{sym}') + self._replace_in_all_expressions(old_symbol, new_symbol, + replace_identifiers=True) + for symbols_ids, symbols in self.symbols.items(): + if old_symbol in symbols: + # reconstitute the whole dict in order to keep the ordering + self.symbols[symbols_ids] = { + new_symbol if k is old_symbol else k: v + for k, v in symbols.items() + } + + def _sympy_from_sbml_math(self, var_or_math: [sbml.SBase, str] + ) -> Union[sp.Expr, float, None]: + """ + Sympify Math of SBML variables with all sanity checks and + transformations + + :param var_or_math: + SBML variable that has a getMath() function or math string + :return: + sympfified symbolic expression + """ + if isinstance(var_or_math, sbml.SBase): + math_string = sbml.formulaToL3StringWithSettings( + var_or_math.getMath(), + self.sbml_parser_settings + ) + ele_name = var_or_math.element_name + else: + math_string = var_or_math + ele_name = 'string' + math_string = replace_logx(math_string) + try: + try: + formula = sp.sympify(_parse_logical_operators( + math_string + ), locals=self._local_symbols) + except TypeError as err: + if str(err) == 'BooleanAtom not allowed in this context.': + formula = sp.sympify(_parse_logical_operators( + math_string + ), locals={'true': sp.Float(1.0), 'false': sp.Float(0.0), + **self._local_symbols}) + else: + raise + except (sp.SympifyError, TypeError, ZeroDivisionError) as err: + raise SBMLException(f'{ele_name} "{math_string}" ' + 'contains an unsupported expression: ' + f'{err}.') + + if isinstance(formula, sp.Expr): + formula = _parse_special_functions_sbml(formula) + _check_unsupported_functions_sbml(formula, + expression_type=ele_name) + return formula + + def _get_element_initial_assignment(self, + element_id: str) -> Union[sp.Expr, + None]: + """ + Extract value of sbml variable according to its initial assignment + + :param element_id: + sbml variable name + :return: + + """ + assignment = self.sbml.getInitialAssignment( + element_id + ) + if assignment is None: + return None + sym = self._sympy_from_sbml_math(assignment) + # this is an initial assignment so we need to use + # initial conditions + sym = self._make_initial(sym) + return sym + + def _get_element_stoichiometry(self, + ele: sbml.SBase) -> sp.Expr: + """ + Computes the stoichiometry of a reactant or product of a reaction + + :param ele: + reactant or product + :return: + symbolic variable that defines stoichiometry + """ + if ele.isSetId(): + sym = self._get_element_initial_assignment(ele.getId()) + if sym is not None: + return sym + + if self.is_assignment_rule_target(ele): + return _get_identifier_symbol(ele) + + if ele.isSetStoichiometry(): + stoichiometry: float = ele.getStoichiometry() + return sp.Integer(stoichiometry) if stoichiometry.is_integer() \ + else sp.Float(stoichiometry) + + return sp.Integer(1) + + def is_assignment_rule_target(self, element: sbml.SBase) -> bool: + """ + Checks if an element has a valid assignment rule in the specified + model. + + :param element: + SBML variable + + :return: + boolean indicating truth of function name + """ + a = self.sbml.getAssignmentRuleByVariable(element.getId()) + return a is not None and self._sympy_from_sbml_math(a) is not None + + def is_rate_rule_target(self, element: sbml.SBase) -> bool: + """ + Checks if an element has a valid assignment rule in the specified + model. + + :param element: + SBML variable + + :return: + boolean indicating truth of function name + """ + a = self.sbml.getRateRuleByVariable(element.getId()) + return a is not None and self._sympy_from_sbml_math(a) is not None + + +def _check_lib_sbml_errors(sbml_doc: sbml.SBMLDocument, + show_warnings: bool = False) -> None: + """ + Checks the error log in the current self.sbml_doc. + + :param sbml_doc: + SBML document + + :param show_warnings: + display SBML warnings + """ + num_warning = sbml_doc.getNumErrors(sbml.LIBSBML_SEV_WARNING) + num_error = sbml_doc.getNumErrors(sbml.LIBSBML_SEV_ERROR) + num_fatal = sbml_doc.getNumErrors(sbml.LIBSBML_SEV_FATAL) + + if num_warning + num_error + num_fatal: + for i_error in range(sbml_doc.getNumErrors()): + error = sbml_doc.getError(i_error) + # we ignore any info messages for now + if error.getSeverity() >= sbml.LIBSBML_SEV_ERROR \ + or (show_warnings and + error.getSeverity() >= sbml.LIBSBML_SEV_WARNING): + logger.error(f'libSBML {error.getCategoryAsString()} ' + f'({error.getSeverityAsString()}):' + f' {error.getMessage()}') + + if num_error + num_fatal: + raise SBMLException( + 'SBML Document failed to load (see error messages above)' + ) + + +def _parse_event_trigger(trigger: sp.Expr) -> sp.Expr: + """ + Recursively translates a boolean trigger function into a real valued + root function + + :param trigger: + :return: real valued root function expression + """ + # Events can be defined without trigger, i.e., the event will never fire. + # In this case, set a dummy trigger: + if trigger is None: + return sp.Float(1.0) + if trigger.is_Relational: + root = trigger.args[0] - trigger.args[1] + _check_unsupported_functions_sbml(root, 'sympy.Expression') + + # convert relational expressions into trigger functions + if isinstance(trigger, (sp.core.relational.LessThan, + sp.core.relational.StrictLessThan)): + # y < x or y <= x + return -root + if isinstance(trigger, (sp.core.relational.GreaterThan, + sp.core.relational.StrictGreaterThan)): + # y >= x or y > x + return root + + # or(x,y): any of {x,y} is > 0: sp.Max(x, y) + if isinstance(trigger, sp.Or): + return sp.Max(*[_parse_event_trigger(arg) for arg in trigger.args]) + # and(x,y): all out of {x,y} are > 0: sp.Min(x, y) + if isinstance(trigger, sp.And): + return sp.Min(*[_parse_event_trigger(arg) for arg in trigger.args]) + + raise SBMLException( + 'AMICI can not parse piecewise/event trigger functions with argument ' + f'{trigger}.' + ) + + +def _parse_logical_operators(math_str: Union[str, float, None] + ) -> Union[str, float, None]: + """ + Parses a math string in order to replace logical operators by a form + parsable for sympy + + :param math_str: + str with mathematical expression + :param math_str: + parsed math_str + """ + if not isinstance(math_str, str): + return math_str + + if ' xor(' in math_str or ' Xor(' in math_str: + raise SBMLException('Xor is currently not supported as logical ' + 'operation.') + + return (math_str.replace('&&', '&')).replace('||', '|') + + +def assignmentRules2observables(sbml_model: sbml.Model, + filter_function: Callable = lambda *_: True): + """ + Turn assignment rules into observables. + + :param sbml_model: + Model to operate on + + :param filter_function: + Callback function taking assignment variable as input and returning + ``True``/``False`` to indicate if the respective rule should be + turned into an observable. + + :return: + A dictionary(observableId:{ + 'name': observableName, + 'formula': formulaString + }) + """ + observables = {} + for p in sbml_model.getListOfParameters(): + parameter_id = p.getId() + if filter_function(p): + observables[parameter_id] = { + 'name': p.getName() if p.isSetName() else p.getId(), + 'formula': sbml_model.getAssignmentRuleByVariable( + parameter_id + ).getFormula() + } + + for parameter_id in observables: + sbml_model.removeRuleByVariable(parameter_id) + sbml_model.removeParameter(parameter_id) + + return observables + + +def _add_conservation_for_constant_species( + ode_model: ODEModel, + conservation_laws: List[ConservationLaw] +) -> List[int]: + """ + Adds constant species to conservations laws + + :param ode_model: + ODEModel object with basic definitions + + :param conservation_laws: + List of already known conservation laws + + :returns species_solver: + List of species indices which remain later in the ODE solver + """ + + # decide which species to keep in stoichiometry + species_solver = list(range(ode_model.num_states_rdata())) + + # iterate over species, find constant ones + for ix in reversed(range(ode_model.num_states_rdata())): + if ode_model.state_is_constant(ix): + # dont use sym('x') here since conservation laws need to be + # added before symbols are generated + target_state = ode_model._states[ix].get_id() + total_abundance = symbol_with_assumptions(f'tcl_{target_state}') + conservation_laws.append({ + 'state': target_state, + 'total_abundance': total_abundance, + 'coefficients': {target_state: 1.0}, + }) + # mark species to delete from stoichiometric matrix + species_solver.pop(ix) + + return species_solver + + +def _get_species_compartment_symbol(species: sbml.Species) -> sp.Symbol: + """ + Generate compartment symbol for the compartment of a specific species. + This function will always return the same unique python object for a + given species name. + + :param species: + sbml species + :return: + compartment symbol + """ + return symbol_with_assumptions(species.getCompartment()) + + +def _get_identifier_symbol(var: sbml.SBase) -> sp.Symbol: + """ + Generate identifier symbol for a sbml variable. + This function will always return the same unique python object for a + given entity. + + :param var: + sbml variable + :return: + identifier symbol + """ + return symbol_with_assumptions(var.getId()) + + +def get_species_initial(species: sbml.Species) -> sp.Expr: + """ + Extract the initial concentration from a given species + + :param species: + species index + + :return: + initial species concentration + """ + if species.isSetInitialConcentration(): + conc = species.getInitialConcentration() + if species.getHasOnlySubstanceUnits(): + return sp.Float(conc) * _get_species_compartment_symbol(species) + else: + return sp.Float(conc) + + if species.isSetInitialAmount(): + amt = species.getInitialAmount() + if math.isnan(amt): + return sp.Float(0.0) + + if species.getHasOnlySubstanceUnits(): + return sp.Float(amt) + else: + return sp.Float(amt) / _get_species_compartment_symbol(species) + + return sp.Float(0.0) + + +def _get_list_of_species_references(sbml_model: sbml.Model) \ + -> List[sbml.SpeciesReference]: + """ + Extracts list of species references as SBML doesn't provide a native + function for this. + + :param sbml_model: + SBML model instance + + :return: + ListOfSpeciesReferences + """ + return [ + reference + for element in sbml_model.all_elements + if isinstance(element, sbml.ListOfSpeciesReferences) + for reference in element + ] + + +def replace_logx(math_str: Union[str, float, None]) -> Union[str, float, None]: + """ + Replace logX(.) by log(., X) since sympy cannot parse the former + + :param math_str: + string for sympification + + :return: + sympifiable string + """ + if not isinstance(math_str, str): + return math_str + + return re.sub( + r'(^|\W)log(\d+)\(', r'\g<1>1/ln(\2)*ln(', math_str + ) + + +def _collect_event_assignment_parameter_targets(sbml_model: sbml.Model): + targets = set() + sbml_parameters = sbml_model.getListOfParameters() + sbml_parameter_ids = [p.getId() for p in sbml_parameters] + for event in sbml_model.getListOfEvents(): + for event_assignment in event.getListOfEventAssignments(): + target_id = event_assignment.getVariable() + if target_id in sbml_parameter_ids: + targets.add(_get_identifier_symbol( + sbml_parameters[sbml_parameter_ids.index(target_id)] + )) + return targets + + +def _check_unsupported_functions_sbml(sym: sp.Expr, + expression_type: str, + full_sym: Optional[sp.Expr] = None): + try: + _check_unsupported_functions(sym, expression_type, full_sym) + except RuntimeError as err: + raise SBMLException(str(err)) + + +def _parse_special_functions_sbml(sym: sp.Expr, + toplevel: bool = True) -> sp.Expr: + try: + return _parse_special_functions(sym, toplevel) + except RuntimeError as err: + raise SBMLException(str(err)) + + +def _validate_observables( + observables: Union[Dict[str, Dict[str, str]], None], + sigmas: Dict[str, Union[str, float]], + noise_distributions: Dict[str, str], + events: bool = False +) -> None: + + if observables is None or not observables: + return + + # Ensure no non-existing observableIds have been specified + # (no problem here, but usually an upstream bug) + unknown_ids = set(sigmas.keys()) - set(observables.keys()) + if unknown_ids: + raise ValueError( + f"Sigma provided for unknown " + f"{'eventO' if events else 'o'}bservableIds: " + f"{unknown_ids}.") + + # Ensure no non-existing observableIds have been specified + # (no problem here, but usually an upstream bug) + unknown_ids = set(noise_distributions.keys()) - \ + set(observables.keys()) + if unknown_ids: + raise ValueError( + f"Noise distribution provided for unknown " + f"{'eventO' if events else 'o'}bservableIds: " + f"{unknown_ids}.") + + +def _check_symbol_nesting(symbols: Dict[sp.Symbol, Dict[str, sp.Expr]], + symbol_type: str): + observable_syms = set(symbols.keys()) + for obs in symbols.values(): + if any(sym in observable_syms + for sym in obs['value'].free_symbols): + raise ValueError( + "Nested observables are not supported, " + f"but {symbol_type} `{obs['name']} = {obs['value']}` " + "references another observable." + ) diff --git a/python/sdist/amici/setup.template.py b/python/sdist/amici/setup.template.py deleted file mode 120000 index 9258160253..0000000000 --- a/python/sdist/amici/setup.template.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/setup.template.py \ No newline at end of file diff --git a/python/sdist/amici/setup.template.py b/python/sdist/amici/setup.template.py new file mode 100644 index 0000000000..9e5297be62 --- /dev/null +++ b/python/sdist/amici/setup.template.py @@ -0,0 +1,178 @@ +"""AMICI model package setup""" + + +import contextlib +import os +import sys +from typing import List + +from amici import amici_path, hdf5_enabled, compiledWithOpenMP +from amici.custom_commands import (set_compiler_specific_extension_options, + compile_parallel) +from amici.setuptools import (get_blas_config, + get_hdf5_config, + add_coverage_flags_if_required, + add_debug_flags_if_required, + add_openmp_flags, + ) +from setuptools import find_packages, setup, Extension +from setuptools.command.build_ext import build_ext + + +class ModelBuildExt(build_ext): + """Custom build_ext""" + + def build_extension(self, ext): + # Work-around for compiler-specific build options + set_compiler_specific_extension_options( + ext, self.compiler.compiler_type) + + + # Monkey-patch compiler instance method for parallel compilation + # except for Windows, where this seems to be incompatible with + # providing swig files. Not investigated further... + if sys.platform != 'win32': + import setuptools._distutils.ccompiler + self.compiler.compile = compile_parallel.__get__( + self.compiler, setuptools._distutils.ccompiler.CCompiler) + + print(f"Building model extension in {os.getcwd()}") + + build_ext.build_extension(self, ext) + + def find_swig(self) -> str: + """Find SWIG executable + + Overrides horribly outdated distutils function.""" + + from amici.swig import find_swig + return find_swig() + + +def get_model_sources() -> List[str]: + """Get list of source files for the amici base library""" + import glob + model_sources = glob.glob('*.cpp') + with contextlib.suppress(ValueError): + model_sources.remove('main.cpp') + return model_sources + + +def get_amici_libs() -> List[str]: + """ + Get list of libraries for the amici base library + """ + return ['amici', 'sundials', 'suitesparse'] + + +def get_extension() -> Extension: + """Get setuptools extension object for this AMICI model package""" + + cxx_flags = [] + linker_flags = [] + + if compiledWithOpenMP(): + # Only build model with OpenMP support if AMICI base packages was built + # that way + add_openmp_flags(cxx_flags=cxx_flags, ldflags=linker_flags) + + add_coverage_flags_if_required(cxx_flags, linker_flags) + add_debug_flags_if_required(cxx_flags, linker_flags) + + h5pkgcfg = get_hdf5_config() + + blaspkgcfg = get_blas_config() + linker_flags.extend(blaspkgcfg.get('extra_link_args', [])) + + libraries = [*get_amici_libs(), *blaspkgcfg['libraries']] + if hdf5_enabled: + libraries.extend(['hdf5_hl_cpp', 'hdf5_hl', 'hdf5_cpp', 'hdf5']) + + sources = [os.path.join("swig", "TPL_MODELNAME.i"), *get_model_sources()] + + # compiler and linker flags for libamici + if 'AMICI_CXXFLAGS' in os.environ: + cxx_flags.extend(os.environ['AMICI_CXXFLAGS'].split(' ')) + if 'AMICI_LDFLAGS' in os.environ: + linker_flags.extend(os.environ['AMICI_LDFLAGS'].split(' ')) + + ext_include_dirs = [ + os.getcwd(), + os.path.join(amici_path, 'include'), + os.path.join(amici_path, "ThirdParty", "gsl"), + os.path.join(amici_path, "ThirdParty", "sundials", "include"), + os.path.join(amici_path, "ThirdParty", "SuiteSparse", "include"), + *h5pkgcfg['include_dirs'], + *blaspkgcfg['include_dirs'] + ] + + ext_library_dirs = [ + *h5pkgcfg['library_dirs'], + *blaspkgcfg['library_dirs'], + os.path.join(amici_path, 'libs') + ] + + # Build shared object + ext = Extension( + 'TPL_MODELNAME._TPL_MODELNAME', + sources=sources, + include_dirs=ext_include_dirs, + libraries=libraries, + library_dirs=ext_library_dirs, + swig_opts=[ + '-c++', '-modern', '-outdir', 'TPL_MODELNAME', + '-I%s' % os.path.join(amici_path, 'swig'), + '-I%s' % os.path.join(amici_path, 'include'), + ], + extra_compile_args=cxx_flags, + extra_link_args=linker_flags + ) + + # see `set_compiler_specific_extension_options` + ext.extra_compile_args_mingw32 = ['-std=c++14'] + ext.extra_compile_args_unix = ['-std=c++14'] + ext.extra_compile_args_msvc = ['/std:c++14'] + + return ext + + +# Change working directory to setup.py location +os.chdir(os.path.dirname(os.path.abspath(__file__))) + +MODEL_EXT = get_extension() + +CLASSIFIERS = [ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Science/Research', + 'Operating System :: POSIX :: Linux', + 'Operating System :: MacOS :: MacOS X', + 'Programming Language :: Python', + 'Programming Language :: C++', + 'Topic :: Scientific/Engineering :: Bio-Informatics', +] + +CMDCLASS = { + # For parallel compilation and custom swig finder + 'build_ext': ModelBuildExt, +} + +# Install +setup( + name='TPL_MODELNAME', + cmdclass=CMDCLASS, + version='TPL_PACKAGE_VERSION', + description='AMICI-generated module for model TPL_MODELNAME', + url='https://github.com/AMICI-dev/AMICI', + author='model-author-todo', + author_email='model-author-todo', + # license = 'BSD', + ext_modules=[MODEL_EXT], + packages=find_packages(), + install_requires=['amici==TPL_AMICI_VERSION'], + extras_require={'wurlitzer': ['wurlitzer']}, + python_requires='>=3.8', + package_data={}, + zip_safe=False, + include_package_data=True, + classifiers=CLASSIFIERS, +) diff --git a/python/sdist/amici/setuptools.py b/python/sdist/amici/setuptools.py deleted file mode 120000 index 9fc45c94ba..0000000000 --- a/python/sdist/amici/setuptools.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/setuptools.py \ No newline at end of file diff --git a/python/sdist/amici/setuptools.py b/python/sdist/amici/setuptools.py new file mode 100644 index 0000000000..7cfcf61fe1 --- /dev/null +++ b/python/sdist/amici/setuptools.py @@ -0,0 +1,284 @@ +""" +setuptools +---------- +Helper functions for AMICI core and module package preparation +""" + +import os +import sys +import shlex +import subprocess + +from .swig import find_swig, get_swig_version + +try: + import pkgconfig # optional + + # pkgconfig python module might be installed without pkg-config binary + # being available + pkgconfig.exists('somePackageName') +except (ModuleNotFoundError, EnvironmentError): + pkgconfig = None + +from typing import Dict, List, Union, Tuple, Any + +PackageInfo = Dict[str, List[Union[str, Tuple[str, Any]]]] + + +def get_blas_config() -> PackageInfo: + """ + Find CBLAS-compatible BLAS + + :return: + blas related package information + """ + + blaspkgcfg = {'include_dirs': [], + 'library_dirs': [], + 'libraries': [], + 'define_macros': [], + 'extra_compile_args': [], + 'extra_link_args': [] + } + + # Check environment variables + if 'BLAS_CFLAGS' in os.environ: + blaspkgcfg['extra_compile_args'].extend( + shlex.split(os.environ['BLAS_CFLAGS']) + ) + + if 'BLAS_LIBS' in os.environ: + blaspkgcfg['extra_link_args'].extend( + shlex.split(os.environ['BLAS_LIBS']) + ) + + if 'BLAS_CFLAGS' in os.environ or 'BLAS_LIBS' in os.environ: + # If options have been provided by the user, we don't try to detect + # anything by ourselves + return blaspkgcfg + + # Try environment modules + # MKL + if 'MKLROOT' in os.environ: + if 'MKL_INC' in os.environ: + blaspkgcfg['extra_compile_args'].extend( + shlex.split(os.environ['MKL_INC']) + ) + if 'MKL_LIB' in os.environ: + blaspkgcfg['extra_link_args'].extend( + shlex.split(os.environ['MKL_LIB']) + ) + blaspkgcfg['define_macros'].append(('AMICI_BLAS_MKL', None), ) + return blaspkgcfg + + # Try pkgconfig + if pkgconfig: + for blas_name in ['cblas', 'openblas']: + if pkgconfig.exists(blas_name): + blaspkgcfg = pkgconfig.parse(blas_name) + blaspkgcfg['extra_compile_args'] = [ + pkgconfig.cflags(blas_name) + ] + blaspkgcfg['extra_link_args'] = [ + pkgconfig.libs(blas_name) + ] + + return blaspkgcfg + + # If none of the previous worked, fall back to libcblas in default paths + blaspkgcfg['libraries'] = ['cblas'] + + return blaspkgcfg + + +def get_hdf5_config() -> PackageInfo: + """ + Find HDF5 include dir and libs + + :return: + hdf5 related package information + """ + + h5pkgcfg = {'include_dirs': [], + 'library_dirs': [], + 'libraries': [], + 'define_macros': [] + } + hdf5_include_dir_found = False + hdf5_library_dir_found = False + + # try for hdf5 in standard locations + hdf5_include_dir_hints = [ + '/usr/include/hdf5/serial', + '/usr/local/include', + '/usr/include', # travis ubuntu xenial, centos + '/usr/local/Cellar/hdf5/1.10.2_1/include' # travis macOS + ] + hdf5_library_dir_hints = [ + '/usr/lib/x86_64-linux-gnu/', # travis ubuntu xenial + '/usr/lib/x86_64-linux-gnu/hdf5/serial', + '/usr/local/lib', + '/usr/lib64/', # CentOS + '/usr/local/Cellar/hdf5/1.10.2_1/lib' # travis macOS + ] + + # special treatment for conda environments + # as the conda library dir is provided first, we should also check for + # conda header files first + if 'CONDA_DIR' in os.environ: + hdf5_include_dir_hints.insert( + 0, os.path.join(os.environ['CONDA_DIR'], 'include')) + hdf5_library_dir_hints.insert( + 0, os.path.join(os.environ['CONDA_DIR'], 'lib')) + + # Check for Environment Modules variables + if 'HDF5_BASE' in os.environ: + hdf5_include_dir_hints.insert( + 0, os.path.join(os.environ['HDF5_BASE'], 'include')) + hdf5_library_dir_hints.insert( + 0, os.path.join(os.environ['HDF5_BASE'], 'lib')) + + for hdf5_include_dir_hint in hdf5_include_dir_hints: + hdf5_include_dir_found = os.path.isfile( + os.path.join(hdf5_include_dir_hint, 'hdf5.h')) + if hdf5_include_dir_found: + print(f"hdf5.h found in {hdf5_include_dir_hint}") + h5pkgcfg['include_dirs'] = [hdf5_include_dir_hint] + break + + for hdf5_library_dir_hint in hdf5_library_dir_hints: + # check for static or shared library + for lib_filename in ['libhdf5.a', 'libhdf5.so']: + hdf5_library_dir_found = os.path.isfile( + os.path.join(hdf5_library_dir_hint, lib_filename)) + if hdf5_library_dir_found: + print(f'{lib_filename} found in {hdf5_library_dir_hint}') + h5pkgcfg['library_dirs'] = [hdf5_library_dir_hint] + break + if hdf5_library_dir_found: + # break to not override hdf5_library_dir_found + break + + h5pkgcfg['found'] = hdf5_include_dir_found and hdf5_library_dir_found + if h5pkgcfg['found']: + return h5pkgcfg + + if pkgconfig: + try: + h5pkgcfg = pkgconfig.parse('hdf5') + except pkgconfig.PackageNotFoundError: + pass + # NOTE: Cannot use pkgconfig.exists('hdf5f'), since this is true + # although no libraries or include dirs are available + h5pkgcfg['found'] = 'include_dirs' in h5pkgcfg \ + and h5pkgcfg['include_dirs'] and \ + 'library_dirs' in h5pkgcfg \ + and h5pkgcfg['library_dirs'] + + return h5pkgcfg + + +def add_coverage_flags_if_required(cxx_flags: List[str], + linker_flags: List[str]) -> None: + """ + Add compiler and linker flags if gcov coverage requested + + :param cxx_flags: + list of existing cxx flags + + :param linker_flags: + list of existing linker flags + """ + if 'ENABLE_GCOV_COVERAGE' in os.environ and \ + os.environ['ENABLE_GCOV_COVERAGE'].upper() == 'TRUE': + print("ENABLE_GCOV_COVERAGE was set to TRUE." + " Building AMICI with coverage symbols.") + cxx_flags.extend(['-g', '-O0', '--coverage']) + linker_flags.extend(['--coverage', '-g']) + + +def add_debug_flags_if_required(cxx_flags: List[str], + linker_flags: List[str]) -> None: + """ + Add compiler and linker debug flags if requested + + Arguments: + :param cxx_flags: + list of existing cxx flags + + :param linker_flags: + list of existing linker flags + """ + if 'ENABLE_AMICI_DEBUGGING' in os.environ \ + and os.environ['ENABLE_AMICI_DEBUGGING'] == 'TRUE': + print("ENABLE_AMICI_DEBUGGING was set to TRUE." + " Building AMICI with debug symbols.") + cxx_flags.extend(['-g', '-O0', '-UNDEBUG']) + linker_flags.extend(['-g']) + + +def generate_swig_interface_files(swig_outdir: str = None, + with_hdf5: bool = None) -> None: + """ + Compile the swig python interface to amici + """ + + swig_exe = find_swig() + swig_version = get_swig_version(swig_exe) + + swig_args = [ + '-c++', + '-python', + '-py3', + '-threads', + '-Wall', + f'-Iamici{os.sep}swig', + f'-Iamici{os.sep}include', + ] + + print(f"Found SWIG version {swig_version}") + + # Are HDF5 includes available to generate the wrapper? + if with_hdf5 is None: + with_hdf5 = get_hdf5_config()['found'] + + if not with_hdf5: + swig_args.append('-DAMICI_SWIG_WITHOUT_HDF5') + + if swig_outdir is not None: + swig_args.extend(['-outdir', swig_outdir]) + + # Do we have -doxygen? + if swig_version >= (4, 0, 0): + swig_args.append('-doxygen') + + swig_cmd = [swig_exe, + *swig_args, + '-o', os.path.join("amici", "amici_wrap.cxx"), + os.path.join("amici", "swig", "amici.i")] + + print(f"Running SWIG: {' '.join(swig_cmd)}") + sp = subprocess.run(swig_cmd, stdout=subprocess.PIPE, + stderr=sys.stdout.buffer) + if not sp.returncode == 0: + raise AssertionError('Swigging AMICI failed:\n' + + sp.stdout.decode('utf-8')) + + +def add_openmp_flags(cxx_flags: List, ldflags: List) -> None: + """Add OpenMP flags to lists for compiler/linker flags (in-place)""" + + # Enable OpenMP support for Linux / OSX: + if sys.platform == 'linux': + print("Adding OpenMP flags...") + cxx_flags.insert(0, "-fopenmp") + ldflags.insert(0, "-fopenmp") + elif sys.platform == 'darwin': + if os.path.exists('/usr/local/lib/libomp.a'): + print("Adding OpenMP flags...") + cxx_flags[0:0] = ["-Xpreprocessor", "-fopenmp"] + ldflags[0:0] = ["-Xpreprocessor", "-fopenmp", "-lomp"] + else: + print("Not adding OpenMP flags, because /usr/local/lib/libomp.a" + " does not exist. To enable, run `brew install libomp` " + "or add flags manually.") diff --git a/python/sdist/amici/swig.py b/python/sdist/amici/swig.py deleted file mode 120000 index 638f8c3b3c..0000000000 --- a/python/sdist/amici/swig.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/swig.py \ No newline at end of file diff --git a/python/sdist/amici/swig.py b/python/sdist/amici/swig.py new file mode 100644 index 0000000000..b2ab7c090c --- /dev/null +++ b/python/sdist/amici/swig.py @@ -0,0 +1,183 @@ +"""Functions for downloading/building/finding SWIG""" +import ast +import contextlib +import os +import re +import subprocess +from typing import Tuple + + +def find_swig() -> str: + """Get name and version of SWIG executable + + We need version >=3.0. Probably we should try some default paths and names, + but this should do the trick for now. + + Debian/Ubuntu systems have swig3.0 ('swig' is older versions), + OSX has swig 3.0 as 'swig'. + """ + + candidates = ['swig4.0', 'swig3.0', 'swig'] + # Environment variable has priority + if 'SWIG' in os.environ: + candidates.insert(0, os.environ['SWIG']) + + for candidate in candidates: + if swig_works(candidate): + return candidate + + raise RuntimeError( + "Unable to find SWIG executable with default names. " + "Ensure you have SWIG installed, e.g. by " + "`sudo apt install swig` or `brew install swig`. " + "As non-root user, you can install SWIG using " + "https://github.com/AMICI-dev/AMICI/blob/master/scripts/" + "downloadAndBuildSwig.sh, or by following the " + "instructions at http://www.swig.org/Doc4.0/" + "SWIGDocumentation.html#Preface_installation. " + "If was not found despite being installed, set the SWIG" + " environment variable to the full path of the correct " + "executable." + ) + + +def swig_works(swig: str, verbose: bool = True) -> bool: + """Test if `swig` looks like a working SWIG executable.""" + + try: + # For python3.6 compatibility no `capture_output=True` + result = subprocess.run([swig, '-version'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + except (FileNotFoundError, PermissionError): + if verbose: + print(f'Testing SWIG executable {swig}... FAILED.') + return False + + if verbose: + if result.returncode == 0: + print(f'Testing SWIG executable {swig}... SUCCEEDED.') + else: + print(f'Testing SWIG executable {swig}... FAILED.') + + return result.returncode == 0 + + +def get_swig_version(swig_exe: str) -> Tuple: + """Determine version of the given SWIG executable + + Returns: + Version tuple + """ + result = subprocess.run([swig_exe, '-version'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + assert result.returncode == 0 + + version = re.sub(r'(?s).*Version\s+([\S]+).*', r'\1', + result.stdout.decode('utf-8')) + + return tuple(int(x) for x in version.split('.')) + + +class TypeHintFixer(ast.NodeTransformer): + """Replaces SWIG-generated C++ typehints by corresponding Python types""" + + mapping = { + 'void': None, + 'double': ast.Name('float'), + 'int': ast.Name('int'), + 'long': ast.Name('int'), + 'ptrdiff_t': ast.Name('int'), + 'size_t': ast.Name('int'), + 'bool': ast.Name('bool'), + 'std::unique_ptr< amici::Solver >': ast.Constant('Solver'), + 'amici::InternalSensitivityMethod': + ast.Constant('InternalSensitivityMethod'), + 'amici::InterpolationType': ast.Constant('InterpolationType'), + 'amici::LinearMultistepMethod': ast.Constant('LinearMultistepMethod'), + 'amici::LinearSolver': ast.Constant('LinearSolver'), + 'amici::Model *': ast.Constant('Model'), + 'amici::Model const *': ast.Constant('Model'), + 'amici::NewtonDampingFactorMode': + ast.Constant('NewtonDampingFactorMode'), + 'amici::NonlinearSolverIteration': + ast.Constant('NonlinearSolverIteration'), + 'amici::ObservableScaling': ast.Constant('ObservableScaling'), + 'amici::ParameterScaling': ast.Constant('ParameterScaling'), + 'amici::RDataReporting': ast.Constant('RDataReporting'), + 'amici::SensitivityMethod': ast.Constant('SensitivityMethod'), + 'amici::SensitivityOrder': ast.Constant('SensitivityOrder'), + 'amici::Solver *': ast.Constant('Solver'), + 'amici::SteadyStateSensitivityMode': + ast.Constant('SteadyStateSensitivityMode'), + 'amici::realtype': ast.Name('float'), + 'DoubleVector': ast.Constant('Sequence[float]'), + 'IntVector': ast.Name('Sequence[int]'), + 'std::string': ast.Name('str'), + 'std::string const &': ast.Name('str'), + 'std::unique_ptr< amici::ExpData >': ast.Constant('ExpData'), + 'std::unique_ptr< amici::ReturnData >': ast.Constant('ReturnData'), + 'std::vector< amici::ParameterScaling,' + 'std::allocator< amici::ParameterScaling > > const &': + ast.Constant('ParameterScalingVector') + } + + def visit_FunctionDef(self, node): + # Has a return type annotation? + if node.returns: + node.returns = self._new_annot(node.returns.value) + + # Has arguments? + if node.args.args: + for arg in node.args.args: + if not arg.annotation: + continue + arg.annotation = self._new_annot(arg.annotation.value) + return node + + def _new_annot(self, old_annot: str): + with contextlib.suppress(KeyError): + return self.mapping[old_annot] + + # std::vector size type + if re.match(r"std::vector< .* >::(?:size|difference)_type", old_annot): + return ast.Name("int") + + # std::vector value type + if (value_type := re.sub( + r'std::vector< (.*) >::value_type(?: const &)?', + r'\1', old_annot)) in self.mapping: + return self.mapping[value_type] + + # std::vector + if (value_type := re.sub( + r'std::vector< (.*),std::allocator< \1 > >(?: const &)?', + r'\1', old_annot)) in self.mapping: + value_type_annot = self.mapping[value_type] + if isinstance(value_type_annot, ast.Constant): + return ast.Name(f"Tuple['{value_type_annot.value}']") + if isinstance(value_type_annot, ast.Name): + return ast.Name(f"Tuple[{value_type_annot.id}]") + + return ast.Constant(old_annot) + + +def fix_typehints(infilename, outfilename): + """Change SWIG-generated C++ typehints to Python typehints""" + # Only available from Python3.9 + if not getattr(ast, 'unparse', None): + return + + # file -> AST + with open(infilename, 'r') as f: + source = f.read() + parsed_source = ast.parse(source) + + # Change AST + fixer = TypeHintFixer() + parsed_source = fixer.visit(parsed_source) + + # AST -> file + with open(outfilename, 'w') as f: + f.write(ast.unparse(parsed_source)) diff --git a/python/sdist/amici/swig_wrappers.py b/python/sdist/amici/swig_wrappers.py deleted file mode 120000 index 83af20e099..0000000000 --- a/python/sdist/amici/swig_wrappers.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/swig_wrappers.py \ No newline at end of file diff --git a/python/sdist/amici/swig_wrappers.py b/python/sdist/amici/swig_wrappers.py new file mode 100644 index 0000000000..394f91aea8 --- /dev/null +++ b/python/sdist/amici/swig_wrappers.py @@ -0,0 +1,237 @@ +"""Convenience wrappers for the swig interface""" +import sys +from contextlib import contextmanager, suppress +from typing import List, Optional, Union, Sequence, Dict, Any +import amici.amici as amici_swig +from . import numpy + +__all__ = [ + 'runAmiciSimulation', 'runAmiciSimulations', 'ExpData', + 'readSolverSettingsFromHDF5', 'writeSolverSettingsToHDF5', + 'set_model_settings', 'get_model_settings', + 'AmiciModel', 'AmiciSolver', 'AmiciExpData', 'AmiciReturnData', + 'AmiciExpDataVector' +] + +AmiciModel = Union['amici.Model', 'amici.ModelPtr'] +AmiciSolver = Union['amici.Solver', 'amici.SolverPtr'] +AmiciExpData = Union['amici.ExpData', 'amici.ExpDataPtr'] +AmiciReturnData = Union['amici.ReturnData', 'amici.ReturnDataPtr'] +AmiciExpDataVector = Union['amici.ExpDataPtrVector', Sequence[AmiciExpData]] + + +try: + from wurlitzer import sys_pipes +except ModuleNotFoundError: + sys_pipes = suppress + + +@contextmanager +def _capture_cstdout(): + """Redirect C/C++ stdout to python stdout if python stdout is redirected, + e.g. in ipython notebook""" + if sys.stdout == sys.__stdout__: + yield + else: + with sys_pipes(): + yield + + +def _get_ptr( + obj: Union[AmiciModel, AmiciExpData, AmiciSolver, AmiciReturnData] +) -> Union['amici_swig.Model', 'amici_swig.ExpData', + 'amici_swig.Solver', 'amici_swig.ReturnData']: + """ + Convenience wrapper that returns the smart pointer pointee, if applicable + + :param obj: + Potential smart pointer + + :returns: + Non-smart pointer + """ + if isinstance(obj, (amici_swig.ModelPtr, amici_swig.ExpDataPtr, + amici_swig.SolverPtr, amici_swig.ReturnDataPtr)): + return obj.get() + return obj + + +def runAmiciSimulation( + model: AmiciModel, + solver: AmiciSolver, + edata: Optional[AmiciExpData] = None +) -> 'numpy.ReturnDataView': + """ + Convenience wrapper around :py:func:`amici.amici.runAmiciSimulation` + (generated by swig) + + :param model: + Model instance + +` :param solver: + Solver instance, must be generated from + :py:meth:`amici.amici.Model.getSolver` + + :param edata: + ExpData instance (optional) + + :returns: + ReturnData object with simulation results + """ + with _capture_cstdout(): + rdata = amici_swig.runAmiciSimulation( + _get_ptr(solver), _get_ptr(edata), _get_ptr(model)) + return numpy.ReturnDataView(rdata) + + +def ExpData(*args) -> 'amici_swig.ExpData': + """ + Convenience wrapper for :py:class:`amici.amici.ExpData` constructors + + :param args: arguments + + :returns: ExpData Instance + """ + if isinstance(args[0], numpy.ReturnDataView): + return amici_swig.ExpData(_get_ptr(args[0]['ptr']), *args[1:]) + elif isinstance(args[0], (amici_swig.ExpData, amici_swig.ExpDataPtr)): + # the *args[:1] should be empty, but by the time you read this, + # the constructor signature may have changed, and you are glad this + # wrapper did not break. + return amici_swig.ExpData(_get_ptr(args[0]), *args[1:]) + elif isinstance(args[0], (amici_swig.Model, amici_swig.ModelPtr)): + return amici_swig.ExpData(_get_ptr(args[0])) + else: + return amici_swig.ExpData(*args) + + +def runAmiciSimulations( + model: AmiciModel, + solver: AmiciSolver, + edata_list: AmiciExpDataVector, + failfast: bool = True, + num_threads: int = 1, +) -> List['numpy.ReturnDataView']: + """ + Convenience wrapper for loops of amici.runAmiciSimulation + + :param model: Model instance + :param solver: Solver instance, must be generated from Model.getSolver() + :param edata_list: list of ExpData instances + :param failfast: returns as soon as an integration failure is encountered + :param num_threads: number of threads to use (only used if compiled + with openmp) + + :returns: list of simulation results + """ + with _capture_cstdout(): + edata_ptr_vector = amici_swig.ExpDataPtrVector(edata_list) + rdata_ptr_list = amici_swig.runAmiciSimulations( + _get_ptr(solver), + edata_ptr_vector, + _get_ptr(model), + failfast, + num_threads + ) + return [numpy.ReturnDataView(r) for r in rdata_ptr_list] + + +def readSolverSettingsFromHDF5( + file: str, + solver: AmiciSolver, + location: Optional[str] = 'solverSettings' +) -> None: + """ + Convenience wrapper for :py:func:`amici.readSolverSettingsFromHDF5` + + :param file: hdf5 filename + :param solver: Solver instance to which settings will be transferred + :param location: location of solver settings in hdf5 file + """ + amici_swig.readSolverSettingsFromHDF5(file, _get_ptr(solver), location) + + +def writeSolverSettingsToHDF5( + solver: AmiciSolver, + file: Union[str, object], + location: Optional[str] = 'solverSettings' +) -> None: + """ + Convenience wrapper for :py:func:`amici.amici.writeSolverSettingsToHDF5` + + :param file: hdf5 filename, can also be an object created by + :py:func:`amici.amici.createOrOpenForWriting` + :param solver: Solver instance from which settings will be stored + :param location: location of solver settings in hdf5 file + """ + amici_swig.writeSolverSettingsToHDF5(_get_ptr(solver), file, location) + + +# Values are suffixes of `get[...]` and `set[...]` `amici.Model` methods. +# If either the getter or setter is not named with this pattern, then the value +# is a tuple where the first and second elements are the getter and setter +# methods, respectively. +model_instance_settings = [ + # `setParameter{List,Scale}` will clear initial state sensitivities, so + # `setParameter{List,Scale}` has to be called first. + 'ParameterList', + 'ParameterScale', # getter returns a SWIG object + 'AddSigmaResiduals', + 'AlwaysCheckFinite', + 'FixedParameters', + 'InitialStates', + ('getInitialStateSensitivities', 'setUnscaledInitialStateSensitivities'), + 'MinimumSigmaResiduals', + ('nMaxEvent', 'setNMaxEvent'), + 'Parameters', + 'ReinitializationStateIdxs', + 'ReinitializeFixedParameterInitialStates', + 'StateIsNonNegative', + 'SteadyStateSensitivityMode', + ('t0', 'setT0'), + 'Timepoints', +] + + +def get_model_settings( + model: AmiciModel, +) -> Dict[str, Any]: + """Get model settings that are set independently of the compiled model. + + :param model: The AMICI model instance. + + :returns: Keys are AMICI model attributes, values are attribute values. + """ + settings = {} + for setting in model_instance_settings: + getter = setting[0] if isinstance(setting, tuple) else f'get{setting}' + + if getter == 'getInitialStates' and not model.hasCustomInitialStates(): + settings[setting] = [] + continue + if getter == 'getInitialStateSensitivities' \ + and not model.hasCustomInitialStateSensitivities(): + settings[setting] = [] + continue + + settings[setting] = getattr(model, getter)() + # TODO `amici.Model.getParameterScale` returns a SWIG object instead + # of a Python list/tuple. + if setting == 'ParameterScale': + settings[setting] = tuple(settings[setting]) + return settings + + +def set_model_settings( + model: AmiciModel, + settings: Dict[str, Any], +) -> None: + """Set model settings. + + :param model: The AMICI model instance. + :param settings: Keys are callable attributes (setters) of an AMICI model, + values are provided to the setters. + """ + for setting, value in settings.items(): + setter = setting[1] if isinstance(setting, tuple) else f'set{setting}' + getattr(model, setter)(value) diff --git a/python/sdist/amici/testing.py b/python/sdist/amici/testing.py deleted file mode 120000 index 3c889afbfa..0000000000 --- a/python/sdist/amici/testing.py +++ /dev/null @@ -1 +0,0 @@ -../../amici/testing.py \ No newline at end of file diff --git a/python/sdist/amici/testing.py b/python/sdist/amici/testing.py new file mode 100644 index 0000000000..de1f69a1cc --- /dev/null +++ b/python/sdist/amici/testing.py @@ -0,0 +1,41 @@ +"""Test support functions""" +import os +import sys +from tempfile import TemporaryDirectory + +import pytest + +# Indicates whether we are currently running under valgrind +# see also https://stackoverflow.com/a/62364698 +ON_VALGRIND = any( + needle in haystack + for needle in ('valgrind', 'vgpreload') + for haystack in (os.getenv("LD_PRELOAD", ""), + os.getenv("DYLD_INSERT_LIBRARIES", "")) +) + +# Decorator to skip certain tests when we are under valgrind +# (those that are independent of the AMICI C++ parts, or that take too long, +# or that test performance) +skip_on_valgrind = pytest.mark.skipif( + ON_VALGRIND, reason="Takes too long or is meaningless under valgrind") + + +class TemporaryDirectoryWinSafe(TemporaryDirectory): + """TemporaryDirectory that will not raise if cleanup fails. + + If any extension was loaded from the temporary directory, cleanup would + otherwise fail on Windows with a ``PermissionError``. This class ignores + such failures. + """ + def cleanup(self): + try: + super().cleanup() + except PermissionError as e: + if sys.platform not in {'win32', 'cygwin'}: + raise e + except NotADirectoryError: + # Ignore exception on Windows for pyd files: + # NotADirectoryError: [WinError 267] The directory name is + # invalid: '....pyd' + pass From 5c16d596c5ffbf53642cf42f9308e6f4b3d5a3cb Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Tue, 4 Oct 2022 14:30:20 +0200 Subject: [PATCH 7/7] Update changelog, bump version number --- CHANGELOG.md | 15 ++++++++++++++- version.txt | 2 +- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5bc272b2a5..3a5d1834cf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,18 @@ ## v0.X Series +### v0.13.0 (2022-10-04) + +* Fixed extraction of common subexpressions + by @dweindl in https://github.com/AMICI-dev/AMICI/pull/1865 +* Added function to convert `ReturnData::status` flags to string + by @dweindl in https://github.com/AMICI-dev/AMICI/pull/1864 + +And further contributions by @dweindl, @FFroehlich + +**Full Changelog**: +https://github.com/AMICI-dev/AMICI/compare/v0.12.0...v0.13.0 + ### v0.12.0 (2022-08-26) Features: @@ -14,6 +26,7 @@ Features: * Option to extract common subexpressions by @dweindl in https://github.com/AMICI-dev/AMICI/pull/1852, https://github.com/AMICI-dev/AMICI/pull/1856 + **not available in this release, use v0.13.0** * Parallelize matrix simplification by @dweindl in https://github.com/AMICI-dev/AMICI/pull/1778 * Validate PEtab problems before attempting import @@ -32,7 +45,7 @@ Fixes: And further contributions by @dilpath, @dweindl, @FFroehlich **Full Changelog**: -https://github.com/AMICI-dev/AMICI/compare/v0.11.32...v0.11.33 +https://github.com/AMICI-dev/AMICI/compare/v0.11.32...v0.12.0 ### v0.11.32 (2022-07-15) diff --git a/version.txt b/version.txt index ac454c6a1f..54d1a4f2a4 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.12.0 +0.13.0