From 0f91857f135ba5dd3b140eee2db2a1f4122ca8c8 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 22 Nov 2023 09:54:14 +0100 Subject: [PATCH 001/186] draft of BaseSolver and UnbalancedMixin --- docs/conf.py | 3 +- src/ott/neural/solvers/base_solver.py | 248 ++++++++++++++++++ tests/geometry/costs_test.py | 1 + tests/geometry/graph_test.py | 1 + tests/geometry/low_rank_test.py | 1 + tests/geometry/pointcloud_test.py | 1 + tests/geometry/scaling_cost_test.py | 1 + tests/geometry/subsetting_test.py | 1 + .../initializers/linear/sinkhorn_init_test.py | 1 + .../linear/sinkhorn_lr_init_test.py | 1 + tests/initializers/quadratic/gw_init_test.py | 1 + tests/math/lse_test.py | 1 + tests/math/math_utils_test.py | 1 + tests/math/matrix_square_root_test.py | 1 + tests/neural/icnn_test.py | 1 + tests/neural/losses_test.py | 1 + tests/neural/map_estimator_test.py | 1 + tests/neural/meta_initializer_test.py | 1 + tests/neural/neuraldual_test.py | 1 + tests/problems/linear/potentials_test.py | 1 + .../linear/continuous_barycenter_test.py | 1 + .../linear/discrete_barycenter_test.py | 1 + tests/solvers/linear/sinkhorn_diff_test.py | 1 + tests/solvers/linear/sinkhorn_grid_test.py | 1 + tests/solvers/linear/sinkhorn_lr_test.py | 1 + tests/solvers/linear/sinkhorn_misc_test.py | 1 + tests/solvers/linear/sinkhorn_test.py | 1 + tests/solvers/linear/univariate_test.py | 1 + tests/solvers/quadratic/fgw_test.py | 1 + tests/solvers/quadratic/gw_barycenter_test.py | 1 + tests/solvers/quadratic/gw_test.py | 1 + tests/solvers/quadratic/lower_bound_test.py | 1 + .../gaussian_mixture/fit_gmm_pair_test.py | 1 + tests/tools/gaussian_mixture/fit_gmm_test.py | 1 + .../gaussian_mixture_pair_test.py | 1 + .../gaussian_mixture/gaussian_mixture_test.py | 1 + tests/tools/gaussian_mixture/gaussian_test.py | 1 + tests/tools/gaussian_mixture/linalg_test.py | 1 + .../gaussian_mixture/probabilities_test.py | 1 + .../tools/gaussian_mixture/scale_tril_test.py | 1 + tests/tools/k_means_test.py | 5 +- tests/tools/plot_test.py | 1 + tests/tools/segment_sinkhorn_test.py | 1 + tests/tools/sinkhorn_divergence_test.py | 1 + tests/tools/soft_sort_test.py | 1 + tests/utils_test.py | 1 + 46 files changed, 296 insertions(+), 3 deletions(-) create mode 100644 src/ott/neural/solvers/base_solver.py diff --git a/docs/conf.py b/docs/conf.py index 6158c668d..c2a2c7102 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -26,9 +26,10 @@ import logging from datetime import datetime -import ott from sphinx.util import logging as sphinx_logging +import ott + # -- Project information ----------------------------------------------------- needs_sphinx = "4.0" diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py new file mode 100644 index 000000000..cd12684e0 --- /dev/null +++ b/src/ott/neural/solvers/base_solver.py @@ -0,0 +1,248 @@ +from abc import ABC, abstractmethod +from pathlib import Path +from types import Mapping, MappingProxyType +from typing import ( + Any, + Callable, + Dict, + Literal, + Optional, + Tuple, + Union, +) + +import jax +import jax.numpy as jnp +import optax +from flax import train_state + +from ott.geometry.pointcloud import PointCloud +from ott.neural.solvers import models +from ott.problems.linear import linear_problem +from ott.solvers.linear import sinkhorn + + +class BaseNeuralSolver(ABC): + """Base class for neural solvers. + + Args: + iterations: Number of iterations to train for. + valid_freq: Frequency at which to run validation. + """ + + def __init__(self, iterations: int, valid_freq: int, **_: Any) -> Any: + self.iterations = iterations + self.valid_freq = valid_freq + + @abstractmethod + def setup(self, *args: Any, **kwargs: Any) -> None: + pass + + @abstractmethod + def __call__(self, *args: Any, **kwargs: Any) -> None: + """Train the model.""" + pass + + @abstractmethod + def save(self, path: Path): + """Save the model.""" + pass + + @abstractmethod + @property + def is_balanced(self) -> Dict[str, Any]: + """Return the training logs.""" + pass + + @abstractmethod + @property + def training_logs(self) -> Dict[str, Any]: + """Return the training logs.""" + pass + + +class UnbalancednessMixin: + + def __init__( + self, + source_dim: int, + target_dim: int, + cond_dim: Optional[int], + tau_a: float = 1.0, + tau_b: float = 1.0, + mlp_eta: Optional[models.ModelBase] = None, + mlp_xi: Optional[models.ModelBase] = None, + seed: Optional[int] = None, + opt_eta: Optional[optax.GradientTransformation] = None, + opt_xi: Optional[optax.GradientTransformation] = None, + resample_epsilon: float = 1e-2, + scale_cost: Union[bool, int, float, Literal["mean", "max_cost", + "median"]] = "mean", + sinkhorn_kwargs: Mapping[str, Any] = MappingProxyType({}), + **_: Any, + ) -> None: + self.source_dim = source_dim + self.target_dim = target_dim + self.cond_dim = cond_dim + self.tau_a = tau_a + self.tau_b = tau_b + self.mlp_eta = mlp_eta + self.mlp_xi = mlp_xi + self.seed = seed + self.opt_eta = opt_eta + self.opt_xi = opt_xi + self.resample_epsilon = resample_epsilon + self.scale_cost = scale_cost + + self._compute_unbalanced_marginals = self._get_compute_unbalanced_marginals( + tau_a=tau_a, + tau_b=tau_b, + resample_epsilon=resample_epsilon, + scale_cost=scale_cost, + sinkhorn_kwargs=sinkhorn_kwargs + ) + self._setup(source_dim=source_dim, target_dim=target_dim, cond_dim=cond_dim) + + def _get_compute_unbalanced_marginals( + self, + tau_a: float, + tau_b: float, + resample_epsilon: float, + scale_cost: Union[bool, int, float, Literal["mean", "max_cost", + "median"]] = "mean", + sinkhorn_kwargs: Dict[str, Any] = MappingProxyType({}), + ) -> Tuple[jnp.ndarray, jnp.ndarray]: + """Compute the unbalanced source and target marginals for a batch.""" + + @jax.jit + def compute_unbalanced_marginals( + batch_source: jnp.ndarray, batch_target: jnp.ndarray + ) -> Tuple[jnp.ndarray, jnp.ndarray]: + geom = PointCloud( + batch_source, + batch_target, + epsilon=resample_epsilon, + scale_cost=scale_cost + ) + out = sinkhorn.Sinkhorn(**sinkhorn_kwargs)( + linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) + ) + return out.matrix.sum(axis=1), out.matrix.sum(axis=0) + + return compute_unbalanced_marginals + + @jax.jit + def _resample( + self, + key: jax.random.KeyArray, + batch: Tuple[jnp.ndarray, ...], + marginals: jnp.ndarray, + ) -> Tuple[jnp.ndarray, ...]: + """Resample a batch based upon marginals.""" + indices = jax.random.choice( + key, a=len(marginals), p=jnp.squeeze(marginals), shape=[len(marginals)] + ) + return tuple(b[indices] if b is not None else None for b in batch) + + def _setup(self, source_dim: int, target_dim: int, cond_dim: int): + self.unbalancedness_step_fn = self._get_step_fn() + if self.mlp_eta is not None: + self.opt_eta = ( + self.opt_eta if self.opt_eta is not None else + optax.adamw(learning_rate=1e-4, weight_decay=1e-10) + ) + self.state_eta = self.mlp_eta.create_train_state( + self._key, self.opt_eta, source_dim + cond_dim + ) + if self.mlp_xi is not None: + self.opt_xi = ( + self.opt_xi if self.opt_xi is not None else + optax.adamw(learning_rate=1e-4, weight_decay=1e-10) + ) + self.state_xi = self.mlp_xi.create_train_state( + self._key, self.opt_xi, target_dim + cond_dim + ) + + def _get_step_fn(self) -> Callable: # type:ignore[type-arg] + + def loss_a_fn( + params_eta: Optional[jnp.ndarray], + apply_fn_eta: Callable[[Dict[str, jnp.ndarray], jnp.ndarray], + jnp.ndarray], + x: jnp.ndarray, + a: jnp.ndarray, + expectation_reweighting: float, + ) -> Tuple[float, jnp.ndarray]: + eta_predictions = apply_fn_eta({"params": params_eta}, x) + return ( + optax.l2_loss(eta_predictions[:, 0], a).mean() + + optax.l2_loss(jnp.mean(eta_predictions) - expectation_reweighting), + eta_predictions, + ) + + def loss_b_fn( + params_xi: Optional[jnp.ndarray], + apply_fn_xi: Callable[[Dict[str, jnp.ndarray], jnp.ndarray], + jnp.ndarray], + x: jnp.ndarray, + b: jnp.ndarray, + expectation_reweighting: float, + ) -> Tuple[float, jnp.ndarray]: + xi_predictions = apply_fn_xi({"params": params_xi}, x) + return ( + optax.l2_loss(xi_predictions[:, 0], b).mean() + + optax.l2_loss(jnp.mean(xi_predictions) - expectation_reweighting), + xi_predictions, + ) + + @jax.jit + def step_fn( + source: jnp.ndarray, + target: jnp.ndarray, + condition: Optional[jnp.ndarray], + a: jnp.ndarray, + b: jnp.ndarray, + state_eta: Optional[train_state.TrainState] = None, + state_xi: Optional[train_state.TrainState] = None, + *, + is_training: bool = True, + ): + if condition is None: + input_source = source + input_target = target + else: + input_source = jnp.concatenate([source, condition], axis=-1) + input_target = jnp.concatenate([target, condition], axis=-1) + if state_eta is not None: + grad_a_fn = jax.value_and_grad(loss_a_fn, argnums=0, has_aux=True) + (loss_a, eta_predictions), grads_eta = grad_a_fn( + state_eta.params, + state_eta.apply_fn, + input_source, + a * len(a), + jnp.sum(b), + ) + new_state_eta = state_eta.apply_gradients( + grads=grads_eta + ) if is_training else None + + else: + new_state_eta = eta_predictions = loss_a = None + if state_xi is not None: + grad_b_fn = jax.value_and_grad(loss_b_fn, argnums=0, has_aux=True) + (loss_b, xi_predictions), grads_xi = grad_b_fn( + state_xi.params, + state_xi.apply_fn, + input_target, + b * len(b), + jnp.sum(a), + ) + new_state_xi = state_xi.apply_gradients( + grads=grads_xi + ) if is_training else None + else: + new_state_xi = xi_predictions = loss_b = None + + return new_state_eta, new_state_xi, eta_predictions, xi_predictions, loss_a, loss_b + + return step_fn diff --git a/tests/geometry/costs_test.py b/tests/geometry/costs_test.py index 20158042a..57a4d8874 100644 --- a/tests/geometry/costs_test.py +++ b/tests/geometry/costs_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import costs, pointcloud from ott.solvers import linear diff --git a/tests/geometry/graph_test.py b/tests/geometry/graph_test.py index eb80735f2..c242b192f 100644 --- a/tests/geometry/graph_test.py +++ b/tests/geometry/graph_test.py @@ -21,6 +21,7 @@ from jax.experimental import sparse from networkx.algorithms import shortest_paths from networkx.generators import balanced_tree, random_graphs + from ott.geometry import geometry, graph from ott.problems.linear import linear_problem from ott.solvers.linear import implicit_differentiation as implicit_lib diff --git a/tests/geometry/low_rank_test.py b/tests/geometry/low_rank_test.py index e0d937b35..87dd98db2 100644 --- a/tests/geometry/low_rank_test.py +++ b/tests/geometry/low_rank_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import costs, geometry, grid, low_rank, pointcloud diff --git a/tests/geometry/pointcloud_test.py b/tests/geometry/pointcloud_test.py index 17c0ac7aa..5f75ddb8e 100644 --- a/tests/geometry/pointcloud_test.py +++ b/tests/geometry/pointcloud_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import costs, geometry, pointcloud diff --git a/tests/geometry/scaling_cost_test.py b/tests/geometry/scaling_cost_test.py index 9cb905cfa..94ce97cf4 100644 --- a/tests/geometry/scaling_cost_test.py +++ b/tests/geometry/scaling_cost_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import geometry, low_rank, pointcloud from ott.problems.linear import linear_problem from ott.solvers.linear import sinkhorn, sinkhorn_lr diff --git a/tests/geometry/subsetting_test.py b/tests/geometry/subsetting_test.py index 579180c8c..c07929436 100644 --- a/tests/geometry/subsetting_test.py +++ b/tests/geometry/subsetting_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import geometry, low_rank, pointcloud Geom_t = Union[pointcloud.PointCloud, geometry.Geometry, low_rank.LRCGeometry] diff --git a/tests/initializers/linear/sinkhorn_init_test.py b/tests/initializers/linear/sinkhorn_init_test.py index 0b5979c05..6acf77f11 100644 --- a/tests/initializers/linear/sinkhorn_init_test.py +++ b/tests/initializers/linear/sinkhorn_init_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import geometry, pointcloud from ott.initializers.linear import initializers as linear_init from ott.problems.linear import linear_problem diff --git a/tests/initializers/linear/sinkhorn_lr_init_test.py b/tests/initializers/linear/sinkhorn_lr_init_test.py index b71ff0aac..f3fe7acd1 100644 --- a/tests/initializers/linear/sinkhorn_lr_init_test.py +++ b/tests/initializers/linear/sinkhorn_lr_init_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import geometry, pointcloud from ott.initializers.linear import initializers_lr from ott.problems.linear import linear_problem diff --git a/tests/initializers/quadratic/gw_init_test.py b/tests/initializers/quadratic/gw_init_test.py index 7298bfafe..4c39bafb4 100644 --- a/tests/initializers/quadratic/gw_init_test.py +++ b/tests/initializers/quadratic/gw_init_test.py @@ -14,6 +14,7 @@ import jax import numpy as np import pytest + from ott.geometry import pointcloud from ott.initializers.linear import initializers as lin_init from ott.initializers.linear import initializers_lr diff --git a/tests/math/lse_test.py b/tests/math/lse_test.py index 342726ebe..b842afe21 100644 --- a/tests/math/lse_test.py +++ b/tests/math/lse_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.math import utils as mu diff --git a/tests/math/math_utils_test.py b/tests/math/math_utils_test.py index 5a5e3a69a..b8451355b 100644 --- a/tests/math/math_utils_test.py +++ b/tests/math/math_utils_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.math import utils as mu diff --git a/tests/math/matrix_square_root_test.py b/tests/math/matrix_square_root_test.py index 2bee2ea70..fcd557957 100644 --- a/tests/math/matrix_square_root_test.py +++ b/tests/math/matrix_square_root_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.math import matrix_square_root diff --git a/tests/neural/icnn_test.py b/tests/neural/icnn_test.py index fabc4f422..fd6c07f2b 100644 --- a/tests/neural/icnn_test.py +++ b/tests/neural/icnn_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.neural.models import models diff --git a/tests/neural/losses_test.py b/tests/neural/losses_test.py index f675dbc76..8cff7bd64 100644 --- a/tests/neural/losses_test.py +++ b/tests/neural/losses_test.py @@ -15,6 +15,7 @@ import jax import numpy as np import pytest + from ott.geometry import costs from ott.neural.models import models from ott.neural.solvers import losses diff --git a/tests/neural/map_estimator_test.py b/tests/neural/map_estimator_test.py index 680cfcd01..7c506aa38 100644 --- a/tests/neural/map_estimator_test.py +++ b/tests/neural/map_estimator_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import pytest + from ott import datasets from ott.geometry import pointcloud from ott.neural.models import models diff --git a/tests/neural/meta_initializer_test.py b/tests/neural/meta_initializer_test.py index f9b1e4cd0..f711366ec 100644 --- a/tests/neural/meta_initializer_test.py +++ b/tests/neural/meta_initializer_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import pytest from flax import linen as nn + from ott.geometry import pointcloud from ott.initializers.linear import initializers as linear_init from ott.neural.models import models as nn_init diff --git a/tests/neural/neuraldual_test.py b/tests/neural/neuraldual_test.py index c1aed055d..252bf817a 100644 --- a/tests/neural/neuraldual_test.py +++ b/tests/neural/neuraldual_test.py @@ -16,6 +16,7 @@ import jax import numpy as np import pytest + from ott import datasets from ott.neural.models import conjugate_solvers, models from ott.neural.solvers import neuraldual diff --git a/tests/problems/linear/potentials_test.py b/tests/problems/linear/potentials_test.py index 619537297..c9fa9cf17 100644 --- a/tests/problems/linear/potentials_test.py +++ b/tests/problems/linear/potentials_test.py @@ -17,6 +17,7 @@ import matplotlib.pyplot as plt import numpy as np import pytest + from ott.geometry import costs, pointcloud from ott.problems.linear import linear_problem, potentials from ott.solvers.linear import sinkhorn diff --git a/tests/solvers/linear/continuous_barycenter_test.py b/tests/solvers/linear/continuous_barycenter_test.py index de4724200..5512263c7 100644 --- a/tests/solvers/linear/continuous_barycenter_test.py +++ b/tests/solvers/linear/continuous_barycenter_test.py @@ -18,6 +18,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import costs, segment from ott.problems.linear import barycenter_problem from ott.solvers.linear import continuous_barycenter as cb diff --git a/tests/solvers/linear/discrete_barycenter_test.py b/tests/solvers/linear/discrete_barycenter_test.py index 9e31d85d0..dc90e15c0 100644 --- a/tests/solvers/linear/discrete_barycenter_test.py +++ b/tests/solvers/linear/discrete_barycenter_test.py @@ -13,6 +13,7 @@ # limitations under the License. import jax.numpy as jnp import pytest + from ott.geometry import grid, pointcloud from ott.problems.linear import barycenter_problem as bp from ott.solvers.linear import discrete_barycenter as db diff --git a/tests/solvers/linear/sinkhorn_diff_test.py b/tests/solvers/linear/sinkhorn_diff_test.py index 892233127..d80f94251 100644 --- a/tests/solvers/linear/sinkhorn_diff_test.py +++ b/tests/solvers/linear/sinkhorn_diff_test.py @@ -18,6 +18,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import costs, geometry, grid, pointcloud from ott.problems.linear import linear_problem from ott.solvers.linear import implicit_differentiation as implicit_lib diff --git a/tests/solvers/linear/sinkhorn_grid_test.py b/tests/solvers/linear/sinkhorn_grid_test.py index 925af7278..b2aa4da3e 100644 --- a/tests/solvers/linear/sinkhorn_grid_test.py +++ b/tests/solvers/linear/sinkhorn_grid_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import grid, pointcloud from ott.problems.linear import linear_problem from ott.solvers import linear diff --git a/tests/solvers/linear/sinkhorn_lr_test.py b/tests/solvers/linear/sinkhorn_lr_test.py index 975570ac9..9b360bdf0 100644 --- a/tests/solvers/linear/sinkhorn_lr_test.py +++ b/tests/solvers/linear/sinkhorn_lr_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import low_rank, pointcloud from ott.problems.linear import linear_problem from ott.solvers.linear import sinkhorn_lr diff --git a/tests/solvers/linear/sinkhorn_misc_test.py b/tests/solvers/linear/sinkhorn_misc_test.py index 8fd2623e5..aeb37918b 100644 --- a/tests/solvers/linear/sinkhorn_misc_test.py +++ b/tests/solvers/linear/sinkhorn_misc_test.py @@ -18,6 +18,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import costs, geometry, pointcloud from ott.problems.linear import linear_problem from ott.solvers import linear diff --git a/tests/solvers/linear/sinkhorn_test.py b/tests/solvers/linear/sinkhorn_test.py index 2ff49b57e..ce7f9919a 100644 --- a/tests/solvers/linear/sinkhorn_test.py +++ b/tests/solvers/linear/sinkhorn_test.py @@ -19,6 +19,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott import utils from ott.geometry import costs, epsilon_scheduler, geometry, grid, pointcloud from ott.problems.linear import linear_problem diff --git a/tests/solvers/linear/univariate_test.py b/tests/solvers/linear/univariate_test.py index 221f295cd..1a5529167 100644 --- a/tests/solvers/linear/univariate_test.py +++ b/tests/solvers/linear/univariate_test.py @@ -16,6 +16,7 @@ import numpy as np import pytest import scipy as sp + from ott.geometry import costs, pointcloud from ott.problems.linear import linear_problem from ott.solvers.linear import sinkhorn, univariate diff --git a/tests/solvers/quadratic/fgw_test.py b/tests/solvers/quadratic/fgw_test.py index acc40ba36..0a2a2fff4 100644 --- a/tests/solvers/quadratic/fgw_test.py +++ b/tests/solvers/quadratic/fgw_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import geometry, low_rank, pointcloud from ott.problems.quadratic import quadratic_problem from ott.solvers.linear import implicit_differentiation as implicit_lib diff --git a/tests/solvers/quadratic/gw_barycenter_test.py b/tests/solvers/quadratic/gw_barycenter_test.py index d07247fef..6bc843477 100644 --- a/tests/solvers/quadratic/gw_barycenter_test.py +++ b/tests/solvers/quadratic/gw_barycenter_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import pointcloud from ott.problems.quadratic import gw_barycenter as gwb from ott.solvers.quadratic import gw_barycenter as gwb_solver diff --git a/tests/solvers/quadratic/gw_test.py b/tests/solvers/quadratic/gw_test.py index 192de8ed6..e7b77cd58 100644 --- a/tests/solvers/quadratic/gw_test.py +++ b/tests/solvers/quadratic/gw_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import geometry, low_rank, pointcloud from ott.problems.quadratic import quadratic_problem from ott.solvers.linear import implicit_differentiation as implicit_lib diff --git a/tests/solvers/quadratic/lower_bound_test.py b/tests/solvers/quadratic/lower_bound_test.py index 6a15bd20a..ba90d6362 100644 --- a/tests/solvers/quadratic/lower_bound_test.py +++ b/tests/solvers/quadratic/lower_bound_test.py @@ -19,6 +19,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import costs, pointcloud from ott.initializers.linear import initializers from ott.problems.quadratic import quadratic_problem diff --git a/tests/tools/gaussian_mixture/fit_gmm_pair_test.py b/tests/tools/gaussian_mixture/fit_gmm_pair_test.py index 98b7619ea..8f43eaa4e 100644 --- a/tests/tools/gaussian_mixture/fit_gmm_pair_test.py +++ b/tests/tools/gaussian_mixture/fit_gmm_pair_test.py @@ -14,6 +14,7 @@ import jax import jax.numpy as jnp import pytest + from ott.tools.gaussian_mixture import ( fit_gmm, fit_gmm_pair, diff --git a/tests/tools/gaussian_mixture/fit_gmm_test.py b/tests/tools/gaussian_mixture/fit_gmm_test.py index 18c930740..e39633b19 100644 --- a/tests/tools/gaussian_mixture/fit_gmm_test.py +++ b/tests/tools/gaussian_mixture/fit_gmm_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import jax.test_util import pytest + from ott.tools.gaussian_mixture import fit_gmm, gaussian_mixture diff --git a/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py b/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py index ce57fa533..ccf1e50cd 100644 --- a/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py +++ b/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.tools.gaussian_mixture import gaussian_mixture, gaussian_mixture_pair diff --git a/tests/tools/gaussian_mixture/gaussian_mixture_test.py b/tests/tools/gaussian_mixture/gaussian_mixture_test.py index 3e6fcde83..af52860be 100644 --- a/tests/tools/gaussian_mixture/gaussian_mixture_test.py +++ b/tests/tools/gaussian_mixture/gaussian_mixture_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.tools.gaussian_mixture import gaussian_mixture, linalg diff --git a/tests/tools/gaussian_mixture/gaussian_test.py b/tests/tools/gaussian_mixture/gaussian_test.py index 1d05d5056..8b720861c 100644 --- a/tests/tools/gaussian_mixture/gaussian_test.py +++ b/tests/tools/gaussian_mixture/gaussian_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.tools.gaussian_mixture import gaussian, scale_tril diff --git a/tests/tools/gaussian_mixture/linalg_test.py b/tests/tools/gaussian_mixture/linalg_test.py index b92552a23..4db928264 100644 --- a/tests/tools/gaussian_mixture/linalg_test.py +++ b/tests/tools/gaussian_mixture/linalg_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.tools.gaussian_mixture import linalg diff --git a/tests/tools/gaussian_mixture/probabilities_test.py b/tests/tools/gaussian_mixture/probabilities_test.py index 9d51be1a4..4924924df 100644 --- a/tests/tools/gaussian_mixture/probabilities_test.py +++ b/tests/tools/gaussian_mixture/probabilities_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.tools.gaussian_mixture import probabilities diff --git a/tests/tools/gaussian_mixture/scale_tril_test.py b/tests/tools/gaussian_mixture/scale_tril_test.py index 049f9a043..3e53fd543 100644 --- a/tests/tools/gaussian_mixture/scale_tril_test.py +++ b/tests/tools/gaussian_mixture/scale_tril_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.math import matrix_square_root from ott.tools.gaussian_mixture import scale_tril diff --git a/tests/tools/k_means_test.py b/tests/tools/k_means_test.py index b77f4ce5f..a36c4b5c1 100644 --- a/tests/tools/k_means_test.py +++ b/tests/tools/k_means_test.py @@ -19,12 +19,13 @@ import jax.numpy as jnp import numpy as np import pytest -from ott.geometry import costs, pointcloud -from ott.tools import k_means from sklearn import datasets from sklearn.cluster import KMeans, kmeans_plusplus from sklearn.cluster._k_means_common import _is_same_clustering +from ott.geometry import costs, pointcloud +from ott.tools import k_means + def make_blobs( *args: Any, diff --git a/tests/tools/plot_test.py b/tests/tools/plot_test.py index 80e374bb6..8c8b81a1c 100644 --- a/tests/tools/plot_test.py +++ b/tests/tools/plot_test.py @@ -14,6 +14,7 @@ import jax import matplotlib.pyplot as plt + import ott from ott.geometry import pointcloud from ott.problems.linear import linear_problem diff --git a/tests/tools/segment_sinkhorn_test.py b/tests/tools/segment_sinkhorn_test.py index 2e56af4c3..119dbf93a 100644 --- a/tests/tools/segment_sinkhorn_test.py +++ b/tests/tools/segment_sinkhorn_test.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import costs, pointcloud from ott.problems.linear import linear_problem from ott.solvers.linear import sinkhorn diff --git a/tests/tools/sinkhorn_divergence_test.py b/tests/tools/sinkhorn_divergence_test.py index 1c180fe31..d46c220d0 100644 --- a/tests/tools/sinkhorn_divergence_test.py +++ b/tests/tools/sinkhorn_divergence_test.py @@ -17,6 +17,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.geometry import costs, geometry, pointcloud from ott.solvers import linear from ott.solvers.linear import acceleration diff --git a/tests/tools/soft_sort_test.py b/tests/tools/soft_sort_test.py index 735764f8a..372420a9e 100644 --- a/tests/tools/soft_sort_test.py +++ b/tests/tools/soft_sort_test.py @@ -18,6 +18,7 @@ import jax.numpy as jnp import numpy as np import pytest + from ott.solvers.linear import acceleration from ott.solvers.linear import implicit_differentiation as implicit_lib from ott.tools import soft_sort diff --git a/tests/utils_test.py b/tests/utils_test.py index 768a498b5..192ed59f4 100644 --- a/tests/utils_test.py +++ b/tests/utils_test.py @@ -14,6 +14,7 @@ from typing import Optional import pytest + from ott import utils From 3706970e21a6732eb1bd7bb659be752218f460ee Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 22 Nov 2023 10:46:49 +0100 Subject: [PATCH 002/186] draft of BaseSolver and UnbalancedMixin --- src/ott/neural/solvers/base_solver.py | 14 ++-- src/ott/neural/solvers/flow_matching.py | 100 ++++++++++++++++++++++++ src/ott/neural/solvers/neuraldual.py | 4 + 3 files changed, 111 insertions(+), 7 deletions(-) create mode 100644 src/ott/neural/solvers/flow_matching.py diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index cd12684e0..caca5e732 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -2,13 +2,13 @@ from pathlib import Path from types import Mapping, MappingProxyType from typing import ( - Any, - Callable, - Dict, - Literal, - Optional, - Tuple, - Union, + Any, + Callable, + Dict, + Literal, + Optional, + Tuple, + Union, ) import jax diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py new file mode 100644 index 000000000..98c05cb8f --- /dev/null +++ b/src/ott/neural/solvers/flow_matching.py @@ -0,0 +1,100 @@ +from typing import Any, Callable, Dict, Optional, Type + +import jax.numpy as jnp +import orbax as obx + +from ott.geometry import costs +from ott.neural.models.models import BaseNeuralVectorField +from ott.neural.solver.base_solver import BaseNeuralSolver, UnbalancednessMixin +from ott.solvers import was_solver + + +class FlowMatching(BaseNeuralSolver, UnbalancednessMixin): + + def __init__( + self, + neural_vector_field: Type[BaseNeuralVectorField], + input_dim: int, + iterations: int, + valid_freq: int, + ot_solver: Type[was_solver.WassersteinSolver], + optimizer: Optional[Any] = None, + checkpoint_manager: Type[obx.CheckpointManager] = None, + epsilon: float = 1e-2, + cost_fn: Type[costs.CostFn] = costs.SqEuclidean(), + tau_a: float = 1.0, + tau_b: float = 1.0, + mlp_eta: Callable[[jnp.ndarray], float] = None, + mlp_xi: Callable[[jnp.ndarray], float] = None, + unbalanced_kwargs: Dict[str, Any] = {}, + callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], + Any]] = None, + seed: int = 0, + **kwargs: Any, + ) -> None: + + super().__init__(iterations=iterations, valid_freq=valid_freq) + super(UnbalancednessMixin, self).__init__( + mlp_eta=mlp_eta, + mlp_xi=mlp_xi, + tau_a=tau_a, + tau_b=tau_b, + **unbalanced_kwargs + ) + self.neural_vector_field = neural_vector_field + self.input_dim = input_dim + self.ot_solver = ot_solver + self.optimizer = optimizer + self.epsilon = epsilon + self.cost_fn = cost_fn + self.callback_fn = callback_fn + self.checkpoint_manager = checkpoint_manager + self.seed = seed + + def setup(self, **kwargs: Any) -> None: + self.state_neural_vector_field = self.neural_vector_field.create_train_state( + self.rng, self.optimizer, self.output_dim + ) + + self.step_fn = self._get_step_fn() + + self.match_fn = self._get_match_fn( + self.ot_solver, + epsilon=self.epsilon, + cost_fn=self.cost_fn, + tau_a=self.tau_a, + tau_b=self.tau_b, + scale_cost=self.scale_cost, + ) + + def _get_match_fn(self): + pass + + def __call__(self, train_loader, valid_loader) -> None: + for iter in range(self.iterations): + batch = next(train_loader) + batch, a, b = self.match_fn(batch) + self.state_neural_vector_field, logs = self.step_fn( + self.state_neural_vector_field, batch + ) + if not self.is_balanced: + self.unbalancedness_step_fn(batch, a, b) + if iter % self.valid_freq == 0: + self._valid_step(valid_loader, iter) + if self.checkpoint_manager is not None: + states_to_save = { + "state_neural_vector_field": self.state_neural_vector_field + } + if self.state_mlp is not None: + states_to_save["state_eta"] = self.state_mlp + if self.state_xi is not None: + states_to_save["state_xi"] = self.state_xi + self.checkpoint_manager.save(iter, states_to_save) + + def _valid_step(self, valid_loader, iter) -> None: + batch = next(valid_loader) + batch, a, b = self.match_fn(batch) + if not self.is_balanced: + self.unbalancedness_step_fn(batch, a, b) + if self.callback_fn is not None: + self.callback_fn(batch, a, b) diff --git a/src/ott/neural/solvers/neuraldual.py b/src/ott/neural/solvers/neuraldual.py index fef8f873c..6ac3d1c79 100644 --- a/src/ott/neural/solvers/neuraldual.py +++ b/src/ott/neural/solvers/neuraldual.py @@ -68,6 +68,10 @@ class W2NeuralTrainState(train_state.TrainState): ) +class BaseNeuralVectorField(nn.Module): + pass + + class BaseW2NeuralDual(abc.ABC, nn.Module): """Base class for the neural solver models.""" From 42dd2b8263e05cc2ba01c169d2c82f73b06d590e Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 22 Nov 2023 14:12:42 +0100 Subject: [PATCH 003/186] [ci skip] continue flow matching implementation --- src/ott/neural/data/dataloaders.py | 22 +++++ src/ott/neural/solvers/base_solver.py | 33 ++++++- src/ott/neural/solvers/flow_matching.py | 117 ++++++++++++++++++++---- src/ott/neural/solvers/flows.py | 56 ++++++++++++ 4 files changed, 209 insertions(+), 19 deletions(-) create mode 100644 src/ott/neural/data/dataloaders.py create mode 100644 src/ott/neural/solvers/flows.py diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py new file mode 100644 index 000000000..8ea1f5571 --- /dev/null +++ b/src/ott/neural/data/dataloaders.py @@ -0,0 +1,22 @@ +from typing import Dict + +import jax +import jax.numpy as jnp +import tensorflow as tf + + +class ConditionalDataLoader: + + def __init__( + self, rng: jax.random.KeyArray, dataloaders: Dict[str, tf.Dataloader], + p: jax.Array + ) -> None: + super().__init__() + self.rng = rng + self.conditions = dataloaders.keys() + self.p = p + + def __next__(self) -> jnp.ndarray: + self.rng, rng = jax.random.split(self.rng, 2) + condition = jax.random.choice(rng, self.conditions, p=self.p) + return next(self.dataloaders[condition]) diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index caca5e732..045e8bc86 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -15,6 +15,7 @@ import jax.numpy as jnp import optax from flax import train_state +from jax import random from ott.geometry.pointcloud import PointCloud from ott.neural.solvers import models @@ -61,6 +62,36 @@ def training_logs(self) -> Dict[str, Any]: pass +class ResampleMixin: + + def _resample_data( + self, + key: jax.random.KeyArray, + tmat: jnp.ndarray, + source_arrays: Tuple[jnp.ndarray, ...], + target_arrays: Tuple[jnp.ndarray, ...], + ) -> Tuple[jnp.ndarray, ...]: + """Resample a batch according to coupling `tmat`.""" + transition_matrix = tmat.flatten() + indices = random.choice( + key, transition_matrix.flatten(), shape=[len(transition_matrix) ** 2] + ) + indices_source = indices // self.batch_size + indices_target = indices % self.batch_size + return tuple( + b[indices_source] if b is not None else None for b in source_arrays + ), tuple( + b[indices_target] if b is not None else None for b in target_arrays + ) + + def _resample_data_conditionally( + self, + *args: Any, + **kwargs: Any, + ): + raise NotImplementedError + + class UnbalancednessMixin: def __init__( @@ -132,7 +163,7 @@ def compute_unbalanced_marginals( return compute_unbalanced_marginals @jax.jit - def _resample( + def _resample_unbalanced( self, key: jax.random.KeyArray, batch: Tuple[jnp.ndarray, ...], diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index 98c05cb8f..966a1f3b0 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -1,15 +1,27 @@ -from typing import Any, Callable, Dict, Optional, Type +import functools +from typing import Any, Callable, Dict, Mapping, Optional, Tuple, Type +import jax import jax.numpy as jnp import orbax as obx +from jax import random -from ott.geometry import costs +from ott.geometry import costs, pointcloud from ott.neural.models.models import BaseNeuralVectorField -from ott.neural.solver.base_solver import BaseNeuralSolver, UnbalancednessMixin +from ott.neural.solver.base_solver import ( + BaseNeuralSolver, + MatchMixin, + UnbalancednessMixin, +) +from ott.neural.solvers.flows import ( + BaseFlow, + ConstantNoiseFlow, +) +from ott.problems.linear import linear_problem from ott.solvers import was_solver -class FlowMatching(BaseNeuralSolver, UnbalancednessMixin): +class FlowMatching(BaseNeuralSolver, MatchMixin, UnbalancednessMixin): def __init__( self, @@ -18,6 +30,7 @@ def __init__( iterations: int, valid_freq: int, ot_solver: Type[was_solver.WassersteinSolver], + flow: Type[BaseFlow] = ConstantNoiseFlow(0), optimizer: Optional[Any] = None, checkpoint_manager: Type[obx.CheckpointManager] = None, epsilon: float = 1e-2, @@ -32,18 +45,21 @@ def __init__( seed: int = 0, **kwargs: Any, ) -> None: - - super().__init__(iterations=iterations, valid_freq=valid_freq) - super(UnbalancednessMixin, self).__init__( - mlp_eta=mlp_eta, - mlp_xi=mlp_xi, + super().__init__( + iterations=iterations, + valid_freq=valid_freq, tau_a=tau_a, tau_b=tau_b, - **unbalanced_kwargs + mlp_eta=mlp_eta, + mlp_xi=mlp_xi, + unbalanced_kwargs=unbalanced_kwargs, + **kwargs ) + self.neural_vector_field = neural_vector_field self.input_dim = input_dim self.ot_solver = ot_solver + self.flow = flow self.optimizer = optimizer self.epsilon = epsilon self.cost_fn = cost_fn @@ -57,7 +73,6 @@ def setup(self, **kwargs: Any) -> None: ) self.step_fn = self._get_step_fn() - self.match_fn = self._get_match_fn( self.ot_solver, epsilon=self.epsilon, @@ -67,18 +82,80 @@ def setup(self, **kwargs: Any) -> None: scale_cost=self.scale_cost, ) - def _get_match_fn(self): - pass + def _get_step_fn(self) -> Callable: + + def step_fn( + key: random.PRNGKeyArray, + state_neural_vector_field: Any, + batch: Dict[str, jnp.ndarray], + ) -> Tuple[Any, Any]: + + def loss_fn( + params: jax.Array, t: jax.Array, noise: jax.Array, + batch: Dict[str, jnp.ndarray], keys_model: random.PRNGKeyArray + ) -> jnp.ndarray: + + x_t = self.flow.compute_xt(noise, t, batch["source"], batch["target"]) + apply_fn = functools.partial( + state_neural_vector_field.apply, {"params": params} + ) + v_t = jax.vmap(apply_fn)( + t=t, x_t=x_t, condition=batch["condition"], keys_model=keys_model + ) + u_t = self.flow.compute_ut(t, batch["source"], batch["target"]) + return jnp.mean((v_t - u_t) ** 2) + + batch_size = len(batch["source"]) + key_noise, key_t, key_model = random.split(key, 3) + keys_model = random.split(key_model, batch_size) + t = self.sample_t(key_t, batch_size) + noise = self.sample_noise(key_noise, batch_size) + loss_grad = jax.value_and_grad(loss_fn) + loss, grads = loss_grad( + state_neural_vector_field.params, t, noise, batch, keys_model + ) + return state_neural_vector_field.apply_gradients(grads), loss + + return step_fn + + def _get_match_fn( + self, + ot_solver: Any, + epsilon: float, + cost_fn: str, + tau_a: float, + tau_b: float, + scale_cost: Any, + ) -> Callable: + + def match_pairs( + x: jax.Array, y: jax.Array + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: + geom = pointcloud.PointCloud( + x, y, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn + ) + return ot_solver( + linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) + ).matrix + + return match_pairs def __call__(self, train_loader, valid_loader) -> None: + batch: Mapping[str, jnp.ndarray] = {} for iter in range(self.iterations): - batch = next(train_loader) - batch, a, b = self.match_fn(batch) - self.state_neural_vector_field, logs = self.step_fn( + batch["source"], batch["target"], batch["condition"] = next(train_loader) + tmat = self.match_fn(batch) + batch = self.resample( + batch, tmat, (batch["source"], batch["condition"]), + (batch["target"], batch["condition"]) + ) + self.state_neural_vector_field, loss = self.step_fn( self.state_neural_vector_field, batch ) - if not self.is_balanced: - self.unbalancedness_step_fn(batch, a, b) + if self.learn_rescaling: + self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( + batch, tmat.sum(axis=1), tmat.sum(axis=0) + ) if iter % self.valid_freq == 0: self._valid_step(valid_loader, iter) if self.checkpoint_manager is not None: @@ -98,3 +175,7 @@ def _valid_step(self, valid_loader, iter) -> None: self.unbalancedness_step_fn(batch, a, b) if self.callback_fn is not None: self.callback_fn(batch, a, b) + + @property + def learn_rescaling(self) -> bool: + return self.mlp_eta is not None or self.mlp_xi is not None diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py new file mode 100644 index 000000000..b8c635f61 --- /dev/null +++ b/src/ott/neural/solvers/flows.py @@ -0,0 +1,56 @@ +import abc + +import jax +import jax.numpy as jnp + + +class BaseFlow(abc.ABC): + + def __init__(self, sigma: float) -> None: + self.sigma = sigma + + @abc.abstractmethod + def compute_mu_t(self, t: jax.Array, x_0: jax.Array, x_1: jax.Array): + pass + + @abc.abstractmethod + def compute_sigma_t(self, t: jax.Array): + pass + + @abc.abstractmethod + def compute_ut( + self, t: jax.Array, x_0: jax.Array, x_1: jax.Array + ) -> jax.Array: + pass + + def compute_xt( + self, noise: jax.Array, t: jax.Array, x_0: jax.Array, x_1: jax.Array + ) -> jax.Array: + mu_t = self.compute_mu_t(t, x_0, x_1) + sigma_t = self.compute_sigma_t(t, x_0, x_1) + return mu_t + sigma_t * noise + + +class StraightFlow(BaseFlow): + + def compute_mu_t( + self, t: jax.Array, x_0: jax.Array, x_1: jax.Array + ) -> jax.Array: + return t * x_0 + (1 - t) * x_1 + + def compute_ut( + self, t: jax.Array, x_0: jax.Array, x_1: jax.Array + ) -> jax.Array: + return x_1 - x_0 + + +class ConstantNoiseFlow(StraightFlow): + + def compute_sigma_t(self, t: jax.Array): + return self.sigma + + +class BrownianNoiseFlow(StraightFlow): + + def compute_sigma_t(self, t: jax.Array): + return jnp.sqrt(self.sigma * t * (1 - t)) From 65832815ae3b1706fa61dd43b8194ec80a635bed Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 22 Nov 2023 14:35:36 +0100 Subject: [PATCH 004/186] [ci skip] continue flow matching implementation --- src/ott/neural/models/models.py | 12 ++++++++ src/ott/neural/solvers/base_solver.py | 5 +++ src/ott/neural/solvers/flow_matching.py | 41 ++++++++++++++++++++++--- tests/neural/test_flow_matching.py | 4 +++ 4 files changed, 57 insertions(+), 5 deletions(-) create mode 100644 tests/neural/test_flow_matching.py diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index c5485ef59..1983075b0 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import abc import functools from typing import Any, Callable, Dict, Optional, Sequence, Tuple @@ -403,3 +404,14 @@ def tree_flatten(self) -> Tuple[Sequence[Any], Dict[str, Any]]: # noqa: D102 "rng": self.rng, "state": self.state } + + +class BaseNeuralVectorField(abc.ABC): + + def __call__( + self, + t: jax.Array, + condition: Optional[jax.Array] = None, + keys_model: Optional[jax.Array] = None + ) -> jnp.ndarray: # noqa: D102): + pass diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 045e8bc86..bb0a30f22 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -44,6 +44,11 @@ def __call__(self, *args: Any, **kwargs: Any) -> None: """Train the model.""" pass + @abstractmethod + def transport(self, *args: Any, forward: bool, **kwargs: Any) -> Any: + """Transport.""" + pass + @abstractmethod def save(self, path: Path): """Save the model.""" diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index 966a1f3b0..e573d8444 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -1,6 +1,8 @@ import functools +import types from typing import Any, Callable, Dict, Mapping, Optional, Tuple, Type +import diffrax import jax import jax.numpy as jnp import orbax as obx @@ -168,13 +170,42 @@ def __call__(self, train_loader, valid_loader) -> None: states_to_save["state_xi"] = self.state_xi self.checkpoint_manager.save(iter, states_to_save) + def transport( + self, + data: jnp.array, + condition: Optional[jax.Array], + rng: random.PRNGKey, + forward: bool = True, + diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) + ) -> diffrax.Solution: + diffeqsolve_kwargs = dict(diffeqsolve_kwargs) + t0, t1 = (0, 1) if forward else (1, 0) + return diffrax.diffeqsolve( + diffrax.ODETerm( + lambda t, y: self.state_neural_vector_field. + apply({"params": self.state_neural_vector_field.params}, + t=t, + x=y, + condition=condition) + ), + diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()), + t0=t0, + t1=t1, + dt0=diffeqsolve_kwargs.pop("dt0", None), + y0=data, + stepsize_controller=diffeqsolve_kwargs.pop( + "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) + ), + **diffeqsolve_kwargs, + ) + def _valid_step(self, valid_loader, iter) -> None: batch = next(valid_loader) - batch, a, b = self.match_fn(batch) - if not self.is_balanced: - self.unbalancedness_step_fn(batch, a, b) - if self.callback_fn is not None: - self.callback_fn(batch, a, b) + tmat = self.match_fn(batch) + batch = self.resample( + batch, tmat, (batch["source"], batch["condition"]), + (batch["target"], batch["condition"]) + ) @property def learn_rescaling(self) -> bool: diff --git a/tests/neural/test_flow_matching.py b/tests/neural/test_flow_matching.py new file mode 100644 index 000000000..ca55f901d --- /dev/null +++ b/tests/neural/test_flow_matching.py @@ -0,0 +1,4 @@ +class TestFlowMatching: + + def test_flow_matching(self): + pass From f5a043cd7b88b16702b164e8faf66793e28993f7 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 22 Nov 2023 16:15:32 +0100 Subject: [PATCH 005/186] [ci skip] add neural networks --- src/ott/neural/models/models.py | 211 +++++++++++++++++++++++- src/ott/neural/solvers/flow_matching.py | 10 +- 2 files changed, 215 insertions(+), 6 deletions(-) diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 1983075b0..2277f6145 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -15,7 +15,9 @@ import functools from typing import Any, Callable, Dict, Optional, Sequence, Tuple +import flax.linen as nn import jax +import jax.numpy as jnp import optax from flax import linen as nn from flax.core import frozen_dict @@ -30,6 +32,7 @@ from ott.neural.models import layers from ott.neural.solvers import neuraldual from ott.problems.linear import linear_problem +from ott.solvers.nn.models import NeuralTrainState __all__ = ["ICNN", "MLP", "MetaInitializer"] @@ -406,12 +409,218 @@ def tree_flatten(self) -> Tuple[Sequence[Any], Dict[str, Any]]: # noqa: D102 } -class BaseNeuralVectorField(abc.ABC): +class Block(nn.Module): + dim: int = 128 + num_layers: int = 3 + activation_fn: Any = nn.silu + out_dim: int = 32 + @nn.compact + def __call__(self, x): + for i in range(self.num_layers): + x = nn.Dense(self.dim, name="fc{0}".format(i))(x) + x = self.activation_fn(x) + x = nn.Dense(self.out_dim, name="fc_final")(x) + return x + + +class BaseNeuralVectorField(nn.Module, abc.ABC): + + @abc.abstractmethod def __call__( self, t: jax.Array, + x: jax.Array, condition: Optional[jax.Array] = None, keys_model: Optional[jax.Array] = None ) -> jnp.ndarray: # noqa: D102): pass + + +class Block(nn.Module): + dim: int = 128 + out_dim: int = 32 + num_layers: int = 3 + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu + + @nn.compact + def __call__(self, x): + for i in range(self.num_layers): + x = nn.Dense(self.dim, name="fc{0}".format(i))(x) + x = self.act_fn(x) + x = nn.Dense(self.out_dim, name="fc_final")(x) + return x + + +class NeuralVectorField(BaseNeuralVectorField): + condition_dim: int + latent_embed_dim: int + condition_embed_dim: Optional[int] = None + t_embed_dim: Optional[int] = None + joint_hidden_dim: Optional[int] = None + num_layers_per_block: int = 3 + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu + n_frequencies: int = 128 + + def time_encoder(self, t: jax.Array) -> jnp.array: + freq = 2 * jnp.arange(self.n_frequencies) * jnp.pi + t = freq * t + return jnp.concatenate((jnp.cos(t), jnp.sin(t)), axis=-1) + + def __post_init__(self): + + # set embedded dim from latent embedded dim + if self.condition_embed_dim is None: + self.condition_embed_dim = self.latent_embed_dim + if self.t_embed_dim is None: + self.t_embed_dim = self.latent_embed_dim + + # set joint hidden dim from all embedded dim + concat_embed_dim = ( + self.latent_embed_dim + self.condition_embed_dim + self.t_embed_dim + ) + if self.joint_hidden_dim is not None: + assert (self.joint_hidden_dim >= concat_embed_dim), ( + "joint_hidden_dim must be greater than or equal to the sum of " + "all embedded dimensions. " + ) + self.joint_hidden_dim = self.latent_embed_dim + else: + self.joint_hidden_dim = concat_embed_dim + super().__post_init__() + + @nn.compact + def __call__( + self, + t: jax.Array, + condition: Optional[jax.Array], + latent: jax.Array, + keys_model: Optional[jax.Array] = None, + ) -> jax.Array: + + t = self.time_encoder(t) + t = Block( + dim=self.t_embed_dim, + out_dim=self.t_embed_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn, + )( + t + ) + + data = Block( + dim=self.latent_embed_dim, + out_dim=self.latent_embed_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn + )( + data + ) + + if self.condition_dim > 0: + condition = Block( + dim=self.condition_embed_dim, + out_dim=self.condition_embed_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn + )( + condition + ) + concatenated = jnp.concatenate((t, data, condition), axis=-1) + else: + concatenated = jnp.concatenate((t, data), axis=-1) + + out = Block( + dim=self.joint_hidden_dim, + out_dim=self.joint_hidden_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn, + )( + concatenated + ) + + return nn.Dense( + self.output_dim, + use_bias=True, + )( + out + ) + + def create_train_state( + self, + rng: jax.random.PRNGKeyArray, + optimizer: optax.OptState, + input_dim: int, + ) -> NeuralTrainState: + params = self.init( + rng, jnp.ones((1, 1)), jnp.ones((1, input_dim)), + jnp.ones((1, self.condition_dim)) + )["params"] + return train_state.TrainState.create( + apply_fn=self.apply, params=params, tx=optimizer + ) + + +class BaseRescalingNet(nn.Module, abc.ABC): + + @abc.abstractmethod + def __call___( + self, x: jax.Array, condition: Optional[jax.Array] = None + ) -> jax.Array: + pass + + +class Rescaling_MLP(nn.Module): + hidden_dim: int + cond_dim: int + is_potential: bool = False + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.selu + + @nn.compact + def __call__( + self, x: jnp.ndarray, condition: Optional[jax.Array] + ) -> jnp.ndarray: # noqa: D102 + x = Block( + dim=self.latent_embed_dim, + out_dim=self.latent_embed_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn + )( + x + ) + if self.condition_dim > 0: + condition = Block( + dim=self.condition_embed_dim, + out_dim=self.condition_embed_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn + )( + condition + ) + concatenated = jnp.concatenate((x, condition), axis=-1) + else: + concatenated = x + + out = Block( + dim=self.joint_hidden_dim, + out_dim=self.joint_hidden_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn, + )( + concatenated + ) + + return jnp.exp(out) + + def create_train_state( + self, + rng: jax.random.PRNGKeyArray, + optimizer: optax.OptState, + input_dim: int, + ) -> train_state.TrainState: + params = self.init( + rng, jnp.ones((1, input_dim)), jnp.ones((1, self.cond_dim)) + )["params"] + return train_state.TrainState.create( + apply_fn=self.apply, params=params, tx=optimizer + ) diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index e573d8444..2db7833ee 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -11,13 +11,13 @@ from ott.geometry import costs, pointcloud from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solver.base_solver import ( - BaseNeuralSolver, - MatchMixin, - UnbalancednessMixin, + BaseNeuralSolver, + MatchMixin, + UnbalancednessMixin, ) from ott.neural.solvers.flows import ( - BaseFlow, - ConstantNoiseFlow, + BaseFlow, + ConstantNoiseFlow, ) from ott.problems.linear import linear_problem from ott.solvers import was_solver From 34bb10fe41185d52dbe514abb8f8d09df71877e2 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 22 Nov 2023 17:01:05 +0100 Subject: [PATCH 006/186] [ci skip] add test --- src/ott/neural/models/models.py | 30 ++++--------------- src/ott/neural/solvers/flow_matching.py | 6 ++-- tests/neural/conftest.py | 38 +++++++++++++++++++++++++ tests/neural/flow_matching_test.py | 27 ++++++++++++++++++ tests/neural/test_flow_matching.py | 4 --- 5 files changed, 74 insertions(+), 31 deletions(-) create mode 100644 tests/neural/conftest.py create mode 100644 tests/neural/flow_matching_test.py delete mode 100644 tests/neural/test_flow_matching.py diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 2277f6145..0d28ab9fe 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -19,10 +19,8 @@ import jax import jax.numpy as jnp import optax -from flax import linen as nn from flax.core import frozen_dict from flax.training import train_state -from jax import numpy as jnp from jax.nn import initializers from ott import utils @@ -420,8 +418,7 @@ def __call__(self, x): for i in range(self.num_layers): x = nn.Dense(self.dim, name="fc{0}".format(i))(x) x = self.activation_fn(x) - x = nn.Dense(self.out_dim, name="fc_final")(x) - return x + return nn.Dense(self.out_dim)(x) class BaseNeuralVectorField(nn.Module, abc.ABC): @@ -437,21 +434,6 @@ def __call__( pass -class Block(nn.Module): - dim: int = 128 - out_dim: int = 32 - num_layers: int = 3 - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu - - @nn.compact - def __call__(self, x): - for i in range(self.num_layers): - x = nn.Dense(self.dim, name="fc{0}".format(i))(x) - x = self.act_fn(x) - x = nn.Dense(self.out_dim, name="fc_final")(x) - return x - - class NeuralVectorField(BaseNeuralVectorField): condition_dim: int latent_embed_dim: int @@ -493,8 +475,8 @@ def __post_init__(self): def __call__( self, t: jax.Array, + x: jax.Array, condition: Optional[jax.Array], - latent: jax.Array, keys_model: Optional[jax.Array] = None, ) -> jax.Array: @@ -508,13 +490,13 @@ def __call__( t ) - data = Block( + x = Block( dim=self.latent_embed_dim, out_dim=self.latent_embed_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn )( - data + x ) if self.condition_dim > 0: @@ -526,9 +508,9 @@ def __call__( )( condition ) - concatenated = jnp.concatenate((t, data, condition), axis=-1) + concatenated = jnp.concatenate((t, x, condition), axis=-1) else: - concatenated = jnp.concatenate((t, data), axis=-1) + concatenated = jnp.concatenate((t, x), axis=-1) out = Block( dim=self.joint_hidden_dim, diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index 2db7833ee..0ff68604e 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -5,6 +5,7 @@ import diffrax import jax import jax.numpy as jnp +import optax import orbax as obx from jax import random @@ -17,7 +18,6 @@ ) from ott.neural.solvers.flows import ( BaseFlow, - ConstantNoiseFlow, ) from ott.problems.linear import linear_problem from ott.solvers import was_solver @@ -32,8 +32,8 @@ def __init__( iterations: int, valid_freq: int, ot_solver: Type[was_solver.WassersteinSolver], - flow: Type[BaseFlow] = ConstantNoiseFlow(0), - optimizer: Optional[Any] = None, + flow: Type[BaseFlow], + optimizer: Type[optax.GradientTransformation], checkpoint_manager: Type[obx.CheckpointManager] = None, epsilon: float = 1e-2, cost_fn: Type[costs.CostFn] = costs.SqEuclidean(), diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py new file mode 100644 index 000000000..682aa5df8 --- /dev/null +++ b/tests/neural/conftest.py @@ -0,0 +1,38 @@ +from typing import Iterator + +import pytest + +from ott import datasets + + +class UnconditionalDataLoader: + + def __init__(self, iter: Iterator): + self.iter = iter + + def __next__(self): + return next(self.iter), None + + +@pytest.fixture(scope="module") +def data_loader_gaussian_1(): + """Returns a data loader for a simple Gaussian mixture.""" + loader = datasets.create_gaussian_mixture_samplers( + name_source="simple", + name_target="circle", + train_batch_size=30, + valid_batch_size=30, + ) + return UnconditionalDataLoader(loader[0]) + + +@pytest.fixture(scope="module") +def data_loader_gaussian_2(): + """Returns a data loader for a simple Gaussian mixture.""" + loader = datasets.create_gaussian_mixture_samplers( + name_source="simple", + name_target="circle", + train_batch_size=30, + valid_batch_size=30, + ) + return UnconditionalDataLoader(loader[0] + 1) diff --git a/tests/neural/flow_matching_test.py b/tests/neural/flow_matching_test.py new file mode 100644 index 000000000..2cab8608f --- /dev/null +++ b/tests/neural/flow_matching_test.py @@ -0,0 +1,27 @@ +import optax + +from ott.neural.flow_matching import FlowMatching +from ott.neural.flows import ConstantNoiseFlow +from ott.neural.models import NeuralVectorField +from ott.solvers.linear import sinkhorn + + +class TestFlowMatching: + + def test_flow_matching(self, data_loader_gaussian_1, data_loader_gaussian_2): + neural_vf = NeuralVectorField( + input_dim=2, hidden_dims=[32, 32], output_dim=2, activation="relu" + ) + ot_solver = sinkhorn.SinkhornSolver() + flow = ConstantNoiseFlow(sigma=0) + optimizer = optax.adam(learning_rate=1e-3) + fm = FlowMatching( + neural_vf, + input_dim=2, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + flow=flow, + optimizer=optimizer + ) + fm(data_loader_gaussian_1, data_loader_gaussian_2) diff --git a/tests/neural/test_flow_matching.py b/tests/neural/test_flow_matching.py deleted file mode 100644 index ca55f901d..000000000 --- a/tests/neural/test_flow_matching.py +++ /dev/null @@ -1,4 +0,0 @@ -class TestFlowMatching: - - def test_flow_matching(self): - pass From 374d05194018a869358aff01a6e4aaff9e2d35ed Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 22 Nov 2023 17:52:05 +0100 Subject: [PATCH 007/186] [ci skip] resolve import errors --- src/ott/neural/__init__.py | 2 +- src/ott/neural/data/__init__.py | 14 ++++++++++++ src/ott/neural/data/dataloaders.py | 30 ++++++++++++------------- src/ott/neural/models/models.py | 3 +-- src/ott/neural/solvers/base_solver.py | 26 ++++++++------------- src/ott/neural/solvers/flow_matching.py | 16 ++++++------- tests/neural/flow_matching_test.py | 6 ++--- 7 files changed, 50 insertions(+), 47 deletions(-) create mode 100644 src/ott/neural/data/__init__.py diff --git a/src/ott/neural/__init__.py b/src/ott/neural/__init__.py index f448c5dbe..326fae432 100644 --- a/src/ott/neural/__init__.py +++ b/src/ott/neural/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import models, solvers +from . import data, models, solvers diff --git a/src/ott/neural/data/__init__.py b/src/ott/neural/data/__init__.py new file mode 100644 index 000000000..51f8dd2af --- /dev/null +++ b/src/ott/neural/data/__init__.py @@ -0,0 +1,14 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from . import dataloaders diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index 8ea1f5571..c8976d348 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -1,22 +1,20 @@ -from typing import Dict -import jax -import jax.numpy as jnp -import tensorflow as tf +#import tensorflow as tf class ConditionalDataLoader: + pass - def __init__( - self, rng: jax.random.KeyArray, dataloaders: Dict[str, tf.Dataloader], - p: jax.Array - ) -> None: - super().__init__() - self.rng = rng - self.conditions = dataloaders.keys() - self.p = p + #def __init__( + # self, rng: jax.random.KeyArray, dataloaders: Dict[str, tf.Dataloader], + # p: jax.Array + #) -> None: + # super().__init__() + # self.rng = rng + # self.conditions = dataloaders.keys() + # self.p = p - def __next__(self) -> jnp.ndarray: - self.rng, rng = jax.random.split(self.rng, 2) - condition = jax.random.choice(rng, self.conditions, p=self.p) - return next(self.dataloaders[condition]) + #def __next__(self) -> jnp.ndarray: + # self.rng, rng = jax.random.split(self.rng, 2) + # condition = jax.random.choice(rng, self.conditions, p=self.p) + # return next(self.dataloaders[condition]) diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 0d28ab9fe..18d86144c 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -30,7 +30,6 @@ from ott.neural.models import layers from ott.neural.solvers import neuraldual from ott.problems.linear import linear_problem -from ott.solvers.nn.models import NeuralTrainState __all__ = ["ICNN", "MLP", "MetaInitializer"] @@ -533,7 +532,7 @@ def create_train_state( rng: jax.random.PRNGKeyArray, optimizer: optax.OptState, input_dim: int, - ) -> NeuralTrainState: + ) -> train_state.TrainState: params = self.init( rng, jnp.ones((1, 1)), jnp.ones((1, input_dim)), jnp.ones((1, self.condition_dim)) diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index bb0a30f22..ed716b50c 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -1,24 +1,16 @@ from abc import ABC, abstractmethod from pathlib import Path -from types import Mapping, MappingProxyType -from typing import ( - Any, - Callable, - Dict, - Literal, - Optional, - Tuple, - Union, -) +from types import MappingProxyType +from typing import Any, Callable, Dict, Literal, Mapping, Optional, Tuple, Union import jax import jax.numpy as jnp import optax -from flax import train_state +from flax.training import train_state from jax import random from ott.geometry.pointcloud import PointCloud -from ott.neural.solvers import models +from ott.neural.models import models from ott.problems.linear import linear_problem from ott.solvers.linear import sinkhorn @@ -54,14 +46,14 @@ def save(self, path: Path): """Save the model.""" pass - @abstractmethod @property - def is_balanced(self) -> Dict[str, Any]: + @abstractmethod + def is_balanced(self) -> bool: """Return the training logs.""" pass - @abstractmethod @property + @abstractmethod def training_logs(self) -> Dict[str, Any]: """Return the training logs.""" pass @@ -106,8 +98,8 @@ def __init__( cond_dim: Optional[int], tau_a: float = 1.0, tau_b: float = 1.0, - mlp_eta: Optional[models.ModelBase] = None, - mlp_xi: Optional[models.ModelBase] = None, + mlp_eta: Optional[models.BaseRescalingNet] = None, + mlp_xi: Optional[models.BaseRescalingNet] = None, seed: Optional[int] = None, opt_eta: Optional[optax.GradientTransformation] = None, opt_xi: Optional[optax.GradientTransformation] = None, diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index 0ff68604e..2a99c9163 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -6,24 +6,24 @@ import jax import jax.numpy as jnp import optax -import orbax as obx from jax import random +from orbax import checkpoint from ott.geometry import costs, pointcloud from ott.neural.models.models import BaseNeuralVectorField -from ott.neural.solver.base_solver import ( - BaseNeuralSolver, - MatchMixin, - UnbalancednessMixin, +from ott.neural.solvers.base_solver import ( + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, ) from ott.neural.solvers.flows import ( - BaseFlow, + BaseFlow, ) from ott.problems.linear import linear_problem from ott.solvers import was_solver -class FlowMatching(BaseNeuralSolver, MatchMixin, UnbalancednessMixin): +class FlowMatching(BaseNeuralSolver, ResampleMixin, UnbalancednessMixin): def __init__( self, @@ -34,7 +34,7 @@ def __init__( ot_solver: Type[was_solver.WassersteinSolver], flow: Type[BaseFlow], optimizer: Type[optax.GradientTransformation], - checkpoint_manager: Type[obx.CheckpointManager] = None, + checkpoint_manager: Type[checkpoint.CheckpointManager] = None, epsilon: float = 1e-2, cost_fn: Type[costs.CostFn] = costs.SqEuclidean(), tau_a: float = 1.0, diff --git a/tests/neural/flow_matching_test.py b/tests/neural/flow_matching_test.py index 2cab8608f..8c31bcc26 100644 --- a/tests/neural/flow_matching_test.py +++ b/tests/neural/flow_matching_test.py @@ -1,8 +1,8 @@ import optax -from ott.neural.flow_matching import FlowMatching -from ott.neural.flows import ConstantNoiseFlow -from ott.neural.models import NeuralVectorField +from ott.neural.models.models import NeuralVectorField +from ott.neural.solvers.flow_matching import FlowMatching +from ott.neural.solvers.flows import ConstantNoiseFlow from ott.solvers.linear import sinkhorn From a9e9a8c521008df5f65cd1812bdabb35d9e1ff25 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 22 Nov 2023 18:50:35 +0100 Subject: [PATCH 008/186] [ci skip] MRO not working --- src/ott/neural/data/dataloaders.py | 1 - src/ott/neural/models/models.py | 5 +- src/ott/neural/solvers/base_solver.py | 9 ++-- src/ott/neural/solvers/flow_matching.py | 40 ++++++++++++---- tests/neural/conftest.py | 63 +++++++++++++------------ tests/neural/flow_matching_test.py | 11 +++-- 6 files changed, 77 insertions(+), 52 deletions(-) diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index c8976d348..2bddebaa1 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -1,4 +1,3 @@ - #import tensorflow as tf diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 18d86144c..0e4d65203 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -409,14 +409,14 @@ def tree_flatten(self) -> Tuple[Sequence[Any], Dict[str, Any]]: # noqa: D102 class Block(nn.Module): dim: int = 128 num_layers: int = 3 - activation_fn: Any = nn.silu + act_fn: Any = nn.silu out_dim: int = 32 @nn.compact def __call__(self, x): for i in range(self.num_layers): x = nn.Dense(self.dim, name="fc{0}".format(i))(x) - x = self.activation_fn(x) + x = self.act_fn(x) return nn.Dense(self.out_dim)(x) @@ -434,6 +434,7 @@ def __call__( class NeuralVectorField(BaseNeuralVectorField): + output_dim: int condition_dim: int latent_embed_dim: int condition_embed_dim: Optional[int] = None diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index ed716b50c..b1a5c108d 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -46,12 +46,6 @@ def save(self, path: Path): """Save the model.""" pass - @property - @abstractmethod - def is_balanced(self) -> bool: - """Return the training logs.""" - pass - @property @abstractmethod def training_logs(self) -> Dict[str, Any]: @@ -61,6 +55,9 @@ def training_logs(self) -> Dict[str, Any]: class ResampleMixin: + def __init__(*args, **kwargs): + pass + def _resample_data( self, key: jax.random.KeyArray, diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index 2a99c9163..e53b9e45d 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -12,23 +12,24 @@ from ott.geometry import costs, pointcloud from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, ) from ott.neural.solvers.flows import ( - BaseFlow, + BaseFlow, ) from ott.problems.linear import linear_problem from ott.solvers import was_solver -class FlowMatching(BaseNeuralSolver, ResampleMixin, UnbalancednessMixin): +class FlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): def __init__( self, neural_vector_field: Type[BaseNeuralVectorField], input_dim: int, + cond_dim: int, iterations: int, valid_freq: int, ot_solver: Type[was_solver.WassersteinSolver], @@ -44,12 +45,15 @@ def __init__( unbalanced_kwargs: Dict[str, Any] = {}, callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, - seed: int = 0, + rng: random.PRNGKeyArray = random.PRNGKey(0), **kwargs: Any, ) -> None: super().__init__( iterations=iterations, valid_freq=valid_freq, + source_dim=input_dim, + target_dim=input_dim, + cond_dim=cond_dim, tau_a=tau_a, tau_b=tau_b, mlp_eta=mlp_eta, @@ -67,11 +71,13 @@ def __init__( self.cost_fn = cost_fn self.callback_fn = callback_fn self.checkpoint_manager = checkpoint_manager - self.seed = seed + self.rng = rng - def setup(self, **kwargs: Any) -> None: + self.setup() + + def setup(self) -> None: self.state_neural_vector_field = self.neural_vector_field.create_train_state( - self.rng, self.optimizer, self.output_dim + self.rng, self.optimizer, self.input_dim ) self.step_fn = self._get_step_fn() @@ -210,3 +216,19 @@ def _valid_step(self, valid_loader, iter) -> None: @property def learn_rescaling(self) -> bool: return self.mlp_eta is not None or self.mlp_xi is not None + + def save(self, path: str) -> None: + raise NotImplementedError + + def training_logs(self) -> Dict[str, Any]: + raise NotImplementedError + + def sample_t( + self, key: random.PRNGKey, batch_size: int + ) -> jnp.ndarray: #TODO: make more general + return random.uniform(key, batch_size) + + def sample_noise( + self, key: random.PRNGKey, batch_size: int + ) -> jnp.ndarray: #TODO: make more general + return random.normal(key, shape=(batch_size, self.input_dim)) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 682aa5df8..57dba63bf 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -1,38 +1,41 @@ -from typing import Iterator +from typing import Optional +import jax import pytest -from ott import datasets - -class UnconditionalDataLoader: - - def __init__(self, iter: Iterator): - self.iter = iter - - def __next__(self): - return next(self.iter), None - - -@pytest.fixture(scope="module") -def data_loader_gaussian_1(): - """Returns a data loader for a simple Gaussian mixture.""" - loader = datasets.create_gaussian_mixture_samplers( - name_source="simple", - name_target="circle", - train_batch_size=30, - valid_batch_size=30, - ) - return UnconditionalDataLoader(loader[0]) +class DataLoader: + + def __init__( + self, + source_data: jax.Array, + target_data: jax.Array, + conditions: Optional[jax.Array], + batch_size: int = 64 + ) -> None: + super().__init__() + self.source_data = source_data + self.target_data = target_data + self.conditions = conditions + self.batch_size = batch_size + self.key = jax.random.PRNGKey(0) + + def __next__(self) -> jax.Array: + key, self.key = jax.random.split(self.key) + inds_source = jax.random.choice( + key, len(self.source_data), shape=[self.batch_size] + ) + inds_target = jax.random.choice( + key, len(self.target_data), shape=[self.batch_size] + ) + return self.source_data[inds_source, :], self.target_data[ + inds_target, :], self.conditions[ + inds_source, :] if self.conditions is not None else None @pytest.fixture(scope="module") -def data_loader_gaussian_2(): +def data_loader_gaussian(): """Returns a data loader for a simple Gaussian mixture.""" - loader = datasets.create_gaussian_mixture_samplers( - name_source="simple", - name_target="circle", - train_batch_size=30, - valid_batch_size=30, - ) - return UnconditionalDataLoader(loader[0] + 1) + source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + return DataLoader(source, target, None, 16) diff --git a/tests/neural/flow_matching_test.py b/tests/neural/flow_matching_test.py index 8c31bcc26..23ce1e178 100644 --- a/tests/neural/flow_matching_test.py +++ b/tests/neural/flow_matching_test.py @@ -8,20 +8,23 @@ class TestFlowMatching: - def test_flow_matching(self, data_loader_gaussian_1, data_loader_gaussian_2): + def test_flow_matching(self, data_loader_gaussian): neural_vf = NeuralVectorField( - input_dim=2, hidden_dims=[32, 32], output_dim=2, activation="relu" + output_dim=2, + condition_dim=0, + latent_embed_dim=5, ) - ot_solver = sinkhorn.SinkhornSolver() + ot_solver = sinkhorn.Sinkhorn() flow = ConstantNoiseFlow(sigma=0) optimizer = optax.adam(learning_rate=1e-3) fm = FlowMatching( neural_vf, input_dim=2, + cond_dim=0, iterations=3, valid_freq=2, ot_solver=ot_solver, flow=flow, optimizer=optimizer ) - fm(data_loader_gaussian_1, data_loader_gaussian_2) + fm(data_loader_gaussian, data_loader_gaussian) From e4f89918a0568904d51c209940c23262b53ccb37 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Thu, 23 Nov 2023 10:48:37 +0100 Subject: [PATCH 009/186] [ci skip] basic test for flow matching passes --- pyproject.toml | 1 + src/ott/neural/models/models.py | 2 -- src/ott/neural/solvers/base_solver.py | 14 ++++---- src/ott/neural/solvers/flow_matching.py | 44 ++++++++++++------------- src/ott/neural/solvers/flows.py | 2 +- 5 files changed, 31 insertions(+), 32 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e168ec984..56aad6a91 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,6 +59,7 @@ Changelog = "https://github.com/ott-jax/ott/releases" neural = [ "flax>=0.6.6", "optax>=0.1.1", + "diffrax>=0.4.1", ] dev = [ "pre-commit>=2.16.0", diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 0e4d65203..853a1d69e 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -150,8 +150,6 @@ def _compute_gaussian_map_params( ) -> Tuple[jnp.ndarray, jnp.ndarray]: from ott.tools.gaussian_mixture import gaussian source, target = samples - # print(source) - # print(type(source)) g_s = gaussian.Gaussian.from_samples(source) g_t = gaussian.Gaussian.from_samples(target) lin_op = g_s.scale.gaussian_map(g_t.scale) diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index b1a5c108d..da7577565 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -23,7 +23,7 @@ class BaseNeuralSolver(ABC): valid_freq: Frequency at which to run validation. """ - def __init__(self, iterations: int, valid_freq: int, **_: Any) -> Any: + def __init__(self, iterations: int, valid_freq: int, **_: Any) -> None: self.iterations = iterations self.valid_freq = valid_freq @@ -66,16 +66,16 @@ def _resample_data( target_arrays: Tuple[jnp.ndarray, ...], ) -> Tuple[jnp.ndarray, ...]: """Resample a batch according to coupling `tmat`.""" - transition_matrix = tmat.flatten() + tmat_flattened = tmat.flatten() indices = random.choice( - key, transition_matrix.flatten(), shape=[len(transition_matrix) ** 2] + key, len(tmat_flattened), shape=[len(tmat_flattened)] ) - indices_source = indices // self.batch_size - indices_target = indices % self.batch_size + indices_source = indices // tmat.shape[1] + indices_target = indices % tmat.shape[1] return tuple( - b[indices_source] if b is not None else None for b in source_arrays + b[indices_source, :] if b is not None else None for b in source_arrays ), tuple( - b[indices_target] if b is not None else None for b in target_arrays + b[indices_target, :] if b is not None else None for b in target_arrays ) def _resample_data_conditionally( diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index e53b9e45d..a0e48d414 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -48,9 +48,12 @@ def __init__( rng: random.PRNGKeyArray = random.PRNGKey(0), **kwargs: Any, ) -> None: - super().__init__( - iterations=iterations, - valid_freq=valid_freq, + BaseNeuralSolver.__init__( + self, iterations=iterations, valid_freq=valid_freq + ) + ResampleMixin.__init__(self) + UnbalancednessMixin.__init__( + self, source_dim=input_dim, target_dim=input_dim, cond_dim=cond_dim, @@ -59,7 +62,6 @@ def __init__( mlp_eta=mlp_eta, mlp_xi=mlp_xi, unbalanced_kwargs=unbalanced_kwargs, - **kwargs ) self.neural_vector_field = neural_vector_field @@ -105,10 +107,10 @@ def loss_fn( x_t = self.flow.compute_xt(noise, t, batch["source"], batch["target"]) apply_fn = functools.partial( - state_neural_vector_field.apply, {"params": params} + state_neural_vector_field.apply_fn, {"params": params} ) v_t = jax.vmap(apply_fn)( - t=t, x_t=x_t, condition=batch["condition"], keys_model=keys_model + t=t, x=x_t, condition=batch["condition"], keys_model=keys_model ) u_t = self.flow.compute_ut(t, batch["source"], batch["target"]) return jnp.mean((v_t - u_t) ** 2) @@ -122,7 +124,7 @@ def loss_fn( loss, grads = loss_grad( state_neural_vector_field.params, t, noise, batch, keys_model ) - return state_neural_vector_field.apply_gradients(grads), loss + return state_neural_vector_field.apply_gradients(grads=grads), loss return step_fn @@ -151,14 +153,16 @@ def match_pairs( def __call__(self, train_loader, valid_loader) -> None: batch: Mapping[str, jnp.ndarray] = {} for iter in range(self.iterations): + rng_resample, rng_step_fn, self.rng = random.split(self.rng, 3) batch["source"], batch["target"], batch["condition"] = next(train_loader) - tmat = self.match_fn(batch) - batch = self.resample( - batch, tmat, (batch["source"], batch["condition"]), - (batch["target"], batch["condition"]) - ) + tmat = self.match_fn(batch["source"], batch["target"]) + (batch["source"], + batch["condition"]), (batch["target"],) = self._resample_data( + rng_resample, tmat, (batch["source"], batch["condition"]), + (batch["target"],) + ) self.state_neural_vector_field, loss = self.step_fn( - self.state_neural_vector_field, batch + rng_step_fn, self.state_neural_vector_field, batch ) if self.learn_rescaling: self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( @@ -206,12 +210,8 @@ def transport( ) def _valid_step(self, valid_loader, iter) -> None: - batch = next(valid_loader) - tmat = self.match_fn(batch) - batch = self.resample( - batch, tmat, (batch["source"], batch["condition"]), - (batch["target"], batch["condition"]) - ) + next(valid_loader) + # TODO: add callback and logging @property def learn_rescaling(self) -> bool: @@ -223,12 +223,12 @@ def save(self, path: str) -> None: def training_logs(self) -> Dict[str, Any]: raise NotImplementedError - def sample_t( + def sample_t( #TODO: make more general self, key: random.PRNGKey, batch_size: int ) -> jnp.ndarray: #TODO: make more general - return random.uniform(key, batch_size) + return random.uniform(key, [batch_size, 1]) - def sample_noise( + def sample_noise( #TODO: make more general self, key: random.PRNGKey, batch_size: int ) -> jnp.ndarray: #TODO: make more general return random.normal(key, shape=(batch_size, self.input_dim)) diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py index b8c635f61..68cc84f5f 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/solvers/flows.py @@ -27,7 +27,7 @@ def compute_xt( self, noise: jax.Array, t: jax.Array, x_0: jax.Array, x_1: jax.Array ) -> jax.Array: mu_t = self.compute_mu_t(t, x_0, x_1) - sigma_t = self.compute_sigma_t(t, x_0, x_1) + sigma_t = self.compute_sigma_t(t) return mu_t + sigma_t * noise From 7869e3774277c213d26d89925618de913e051cf1 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Thu, 23 Nov 2023 11:59:39 +0100 Subject: [PATCH 010/186] [ci skip] add tests for FM with conditions and conditional OT with FM --- src/ott/neural/data/dataloaders.py | 2 +- src/ott/neural/solvers/base_solver.py | 5 ++ src/ott/neural/solvers/flow_matching.py | 54 +++++++----- tests/neural/conftest.py | 51 +++++++++++- tests/neural/flow_matching_test.py | 106 +++++++++++++++++++++++- 5 files changed, 190 insertions(+), 28 deletions(-) diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index 2bddebaa1..fe0c367b7 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -1,7 +1,7 @@ #import tensorflow as tf -class ConditionalDataLoader: +class ConditionalDataLoader: #TODO(@MUCDK) uncomment, resolve installation issues with TF pass #def __init__( diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index da7577565..02db3cae3 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -46,6 +46,11 @@ def save(self, path: Path): """Save the model.""" pass + @abstractmethod + def load(self, path: Path): + """Load the model.""" + pass + @property @abstractmethod def training_logs(self) -> Dict[str, Any]: diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index a0e48d414..4d8ea15ce 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -32,7 +32,7 @@ def __init__( cond_dim: int, iterations: int, valid_freq: int, - ot_solver: Type[was_solver.WassersteinSolver], + ot_solver: Optional[Type[was_solver.WassersteinSolver]], flow: Type[BaseFlow], optimizer: Type[optax.GradientTransformation], checkpoint_manager: Type[checkpoint.CheckpointManager] = None, @@ -83,14 +83,17 @@ def setup(self) -> None: ) self.step_fn = self._get_step_fn() - self.match_fn = self._get_match_fn( - self.ot_solver, - epsilon=self.epsilon, - cost_fn=self.cost_fn, - tau_a=self.tau_a, - tau_b=self.tau_b, - scale_cost=self.scale_cost, - ) + if self.ot_solver is not None: + self.match_fn = self._get_match_fn( + self.ot_solver, + epsilon=self.epsilon, + cost_fn=self.cost_fn, + tau_a=self.tau_a, + tau_b=self.tau_b, + scale_cost=self.scale_cost, + ) + else: + self.match_fn = None def _get_step_fn(self) -> Callable: @@ -155,12 +158,13 @@ def __call__(self, train_loader, valid_loader) -> None: for iter in range(self.iterations): rng_resample, rng_step_fn, self.rng = random.split(self.rng, 3) batch["source"], batch["target"], batch["condition"] = next(train_loader) - tmat = self.match_fn(batch["source"], batch["target"]) - (batch["source"], - batch["condition"]), (batch["target"],) = self._resample_data( - rng_resample, tmat, (batch["source"], batch["condition"]), - (batch["target"],) - ) + if self.ot_solver is not None: + tmat = self.match_fn(batch["source"], batch["target"]) + (batch["source"], + batch["condition"]), (batch["target"],) = self._resample_data( + rng_resample, tmat, (batch["source"], batch["condition"]), + (batch["target"],) + ) self.state_neural_vector_field, loss = self.step_fn( rng_step_fn, self.state_neural_vector_field, batch ) @@ -184,19 +188,22 @@ def transport( self, data: jnp.array, condition: Optional[jax.Array], - rng: random.PRNGKey, forward: bool = True, diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) ) -> diffrax.Solution: diffeqsolve_kwargs = dict(diffeqsolve_kwargs) - t0, t1 = (0, 1) if forward else (1, 0) + arr = jnp.ones((len(data), 1)) + t0, t1 = (arr * 0.0, arr * 1.0) if forward else (arr * 1.0, arr * 0.0) + apply_fn_partial = functools.partial( + self.state_neural_vector_field.apply_fn, condition=condition + ) return diffrax.diffeqsolve( diffrax.ODETerm( - lambda t, y: self.state_neural_vector_field. - apply({"params": self.state_neural_vector_field.params}, - t=t, - x=y, - condition=condition) + lambda t, y, *args: apply_fn_partial( + {"params": self.state_neural_vector_field.params}, + t=t, + x=y, + ) ), diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()), t0=t0, @@ -220,6 +227,9 @@ def learn_rescaling(self) -> bool: def save(self, path: str) -> None: raise NotImplementedError + def load(self, path: str) -> "FlowMatching": + raise NotImplementedError + def training_logs(self) -> Dict[str, Any]: raise NotImplementedError diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 57dba63bf..f1b6e16f8 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -1,6 +1,7 @@ -from typing import Optional +from typing import Dict, Iterator, Optional import jax +import jax.numpy as jnp import pytest @@ -33,9 +34,55 @@ def __next__(self) -> jax.Array: inds_source, :] if self.conditions is not None else None +class ConditionalDataLoader: + + def __init__( + self, rng: jax.random.KeyArray, dataloaders: Dict[str, Iterator], + p: jax.Array + ) -> None: + super().__init__() + self.rng = rng + self.dataloaders = dataloaders + self.conditions = list(dataloaders.keys()) + self.p = p + + def __next__(self) -> jnp.ndarray: + self.rng, rng = jax.random.split(self.rng, 2) + idx = jax.random.choice(rng, len(self.conditions), p=self.p) + return next(self.dataloaders[self.conditions[idx]]) + + @pytest.fixture(scope="module") def data_loader_gaussian(): """Returns a data loader for a simple Gaussian mixture.""" source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 return DataLoader(source, target, None, 16) + + +@pytest.fixture(scope="module") +def data_loader_gaussian_conditional(): + """Returns a data loader for Gaussian mixtures with conditions.""" + source_0 = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target_0 = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 2.0 + + source_1 = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target_1 = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - 2.0 + dl0 = DataLoader(source_0, target_0, jnp.zeros_like(source_0) * 0.0, 16) + dl1 = DataLoader(source_1, target_1, jnp.ones_like(source_1) * 1.0, 16) + + return ConditionalDataLoader( + jax.random.PRNGKey(0), { + "0": dl0, + "1": dl1 + }, jnp.array([0.5, 0.5]) + ) + + +@pytest.fixture(scope="module") +def data_loader_gaussian_with_conditions(): + """Returns a data loader for a simple Gaussian mixture with conditions.""" + source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + conditions = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 + return DataLoader(source, target, conditions, 16) diff --git a/tests/neural/flow_matching_test.py b/tests/neural/flow_matching_test.py index 23ce1e178..a1c6e8f3d 100644 --- a/tests/neural/flow_matching_test.py +++ b/tests/neural/flow_matching_test.py @@ -1,21 +1,35 @@ +from typing import Type + +import diffrax +import jax.numpy as jnp import optax +import pytest from ott.neural.models.models import NeuralVectorField from ott.neural.solvers.flow_matching import FlowMatching -from ott.neural.solvers.flows import ConstantNoiseFlow +from ott.neural.solvers.flows import ( + BaseFlow, + BrownianNoiseFlow, + ConstantNoiseFlow, +) from ott.solvers.linear import sinkhorn class TestFlowMatching: - def test_flow_matching(self, data_loader_gaussian): + @pytest.mark.parametrize( + "flow", + [ConstantNoiseFlow(0.0), + ConstantNoiseFlow(1.0), + BrownianNoiseFlow(0.2)] + ) + def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): neural_vf = NeuralVectorField( output_dim=2, condition_dim=0, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - flow = ConstantNoiseFlow(sigma=0) optimizer = optax.adam(learning_rate=1e-3) fm = FlowMatching( neural_vf, @@ -28,3 +42,89 @@ def test_flow_matching(self, data_loader_gaussian): optimizer=optimizer ) fm(data_loader_gaussian, data_loader_gaussian) + + source, target, condition = next(data_loader_gaussian) + result_forward = fm.transport(source, condition=condition, forward=True) + assert isinstance(result_forward, diffrax.Solution) + assert jnp.sum(jnp.isnan(result_forward.y)) == 0 + + result_backward = fm.transport(target, condition=condition, forward=False) + assert isinstance(result_backward, diffrax.Solution) + assert jnp.sum(jnp.isnan(result_backward.y)) == 0 + + @pytest.mark.parametrize( + "flow", + [ConstantNoiseFlow(0.0), + ConstantNoiseFlow(1.0), + BrownianNoiseFlow(0.2)] + ) + def test_flow_matching_with_conditions( + self, data_loader_gaussian_with_conditions, flow: Type[BaseFlow] + ): + neural_vf = NeuralVectorField( + output_dim=2, + condition_dim=1, + latent_embed_dim=5, + ) + ot_solver = sinkhorn.Sinkhorn() + optimizer = optax.adam(learning_rate=1e-3) + fm = FlowMatching( + neural_vf, + input_dim=2, + cond_dim=1, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + flow=flow, + optimizer=optimizer + ) + fm( + data_loader_gaussian_with_conditions, + data_loader_gaussian_with_conditions + ) + + source, target, condition = next(data_loader_gaussian_with_conditions) + result_forward = fm.transport(source, condition=condition, forward=True) + assert isinstance(result_forward, diffrax.Solution) + assert jnp.sum(jnp.isnan(result_forward.y)) == 0 + + result_backward = fm.transport(target, condition=condition, forward=False) + assert isinstance(result_backward, diffrax.Solution) + assert jnp.sum(jnp.isnan(result_backward.y)) == 0 + + @pytest.mark.parametrize( + "flow", + [ConstantNoiseFlow(0.0), + ConstantNoiseFlow(1.0), + BrownianNoiseFlow(0.2)] + ) + def test_flow_matching_conditional( + self, data_loader_gaussian_conditional, flow: Type[BaseFlow] + ): + neural_vf = NeuralVectorField( + output_dim=2, + condition_dim=0, + latent_embed_dim=5, + ) + ot_solver = sinkhorn.Sinkhorn() + optimizer = optax.adam(learning_rate=1e-3) + fm = FlowMatching( + neural_vf, + input_dim=2, + cond_dim=0, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + flow=flow, + optimizer=optimizer + ) + fm(data_loader_gaussian_conditional, data_loader_gaussian_conditional) + + source, target, condition = next(data_loader_gaussian_conditional) + result_forward = fm.transport(source, condition=condition, forward=True) + assert isinstance(result_forward, diffrax.Solution) + assert jnp.sum(jnp.isnan(result_forward.y)) == 0 + + result_backward = fm.transport(target, condition=condition, forward=False) + assert isinstance(result_backward, diffrax.Solution) + assert jnp.sum(jnp.isnan(result_backward.y)) == 0 From 5a90dc1b2c4b20f10309af00256e6d0bbf136130 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Thu, 23 Nov 2023 18:03:45 +0100 Subject: [PATCH 011/186] [ci skip] add genot outline --- src/ott/neural/solvers/base_solver.py | 131 +++++++- src/ott/neural/solvers/flow_matching.py | 90 +++--- src/ott/neural/solvers/genot.py | 391 ++++++++++++++++++++++++ tests/neural/flow_matching_test.py | 9 +- 4 files changed, 574 insertions(+), 47 deletions(-) create mode 100644 src/ott/neural/solvers/genot.py diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 02db3cae3..662fa73a3 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -9,9 +9,11 @@ from flax.training import train_state from jax import random +from ott.geometry import pointcloud from ott.geometry.pointcloud import PointCloud from ott.neural.models import models from ott.problems.linear import linear_problem +from ott.problems.quadratic import quadratic_problem from ott.solvers.linear import sinkhorn @@ -83,12 +85,131 @@ def _resample_data( b[indices_target, :] if b is not None else None for b in target_arrays ) - def _resample_data_conditionally( + def sample_conditional_indices_from_tmap( + key: jax.random.PRNGKeyArray, + tmat: jnp.ndarray, + k_samples_per_x: Union[int, jnp.ndarray], + source_arrays: Tuple[jnp.ndarray, ...], + target_arrays: Tuple[jnp.ndarray, ...], + *, + is_balanced: bool, + ) -> Tuple[jnp.array, jnp.array]: + left_marginals = tmat.sum(axis=1) + if not is_balanced: + key, key2 = jax.random.split(key, 2) + indices = jax.random.choice( + key=key2, + a=jnp.arange(len(left_marginals)), + p=left_marginals, + shape=(len(left_marginals),) + ) + else: + indices = jnp.arange(tmat.shape[0]) + tmat_adapted = tmat[indices] + indices_per_row = jax.vmap( + lambda tmat_adapted: jax.random.choice( + key=key, + a=jnp.arange(tmat.shape[1]), + p=tmat_adapted, + shape=(k_samples_per_x,) + ), + in_axes=0, + out_axes=0, + )( + tmat_adapted + ) + + indices_source = jnp.repeat(indices, k_samples_per_x) + indices_target = indices_per_row % tmat.shape[1] + return tuple( + b[indices_source, :] if b is not None else None for b in source_arrays + ), tuple( + b[indices_target, :] if b is not None else None for b in target_arrays + ) + + def _get_sinkhorn_match_fn( + self, + ot_solver: Any, + epsilon: float, + cost_fn: str, + scale_cost: Any, + tau_a: float, + tau_b: float, + ) -> Callable: + + def match_pairs( + x: jax.Array, y: jax.Array + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: + geom = pointcloud.PointCloud( + x, y, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn + ) + return ot_solver( + linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) + ).matrix + + return match_pairs + + def _get_gromov_match_fn( self, - *args: Any, - **kwargs: Any, - ): - raise NotImplementedError + ot_solver: Any, + cost_fn: Union[Any, Mapping[str, Any]], + scale_cost: Union[Any, Mapping[str, Any]], + tau_a: float, + tau_b: float, + fused_penalty: float, + ) -> Callable: + if isinstance(cost_fn, Mapping): + assert "x_cost_fn" in cost_fn + assert "y_cost_fn" in cost_fn + x_cost_fn = cost_fn["x_cost_fn"] + y_cost_fn = cost_fn["y_cost_fn"] + if fused_penalty > 0: + assert "xy_cost_fn" in x_cost_fn + xy_cost_fn = cost_fn["xy_cost_fn"] + else: + x_cost_fn = y_cost_fn = xy_cost_fn = cost_fn + + if isinstance(scale_cost, Mapping): + assert "x_scale_cost" in scale_cost + assert "y_scale_cost" in scale_cost + x_scale_cost = scale_cost["x_scale_cost"] + y_scale_cost = scale_cost["y_scale_cost"] + if fused_penalty > 0: + assert "xy_scale_cost" in scale_cost + xy_scale_cost = cost_fn["xy_scale_cost"] + else: + x_scale_cost = y_scale_cost = xy_scale_cost = scale_cost + + def match_pairs( + x_quad: Tuple[jnp.ndarray, jnp.ndarray], + y_quad: Tuple[jnp.ndarray, jnp.ndarray], + x_lin: Optional[jax.Array], + y_lin: Optional[jax.Array], + ) -> Tuple[jnp.array, jnp.array]: + geom_xx = pointcloud.PointCloud( + x=x_quad, y=x_quad, cost_fn=x_cost_fn, scale_cost=x_scale_cost + ) + geom_yy = pointcloud.PointCloud( + x=y_quad, y=y_quad, cost_fn=y_cost_fn, scale_cost=y_scale_cost + ) + if fused_penalty > 0: + geom_xy = pointcloud.PointCloud( + x=x_lin, y=y_lin, cost_fn=xy_cost_fn, scale_cost=xy_scale_cost + ) + else: + geom_xy = None + prob = quadratic_problem.QuadraticProblem( + geom_xx, + geom_yy, + geom_xy, + fused_penalty=fused_penalty, + tau_a=tau_a, + tau_b=tau_b + ) + out = ot_solver(prob) + return out.matrix + + return match_pairs class UnbalancednessMixin: diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index 4d8ea15ce..3532d09c1 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -9,7 +9,7 @@ from jax import random from orbax import checkpoint -from ott.geometry import costs, pointcloud +from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( BaseNeuralSolver, @@ -19,7 +19,6 @@ from ott.neural.solvers.flows import ( BaseFlow, ) -from ott.problems.linear import linear_problem from ott.solvers import was_solver @@ -84,13 +83,13 @@ def setup(self) -> None: self.step_fn = self._get_step_fn() if self.ot_solver is not None: - self.match_fn = self._get_match_fn( + self.match_fn = self._get_sinkhorn_match_fn( self.ot_solver, epsilon=self.epsilon, cost_fn=self.cost_fn, + scale_cost=self.scale_cost, tau_a=self.tau_a, tau_b=self.tau_b, - scale_cost=self.scale_cost, ) else: self.match_fn = None @@ -131,28 +130,6 @@ def loss_fn( return step_fn - def _get_match_fn( - self, - ot_solver: Any, - epsilon: float, - cost_fn: str, - tau_a: float, - tau_b: float, - scale_cost: Any, - ) -> Callable: - - def match_pairs( - x: jax.Array, y: jax.Array - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: - geom = pointcloud.PointCloud( - x, y, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn - ) - return ot_solver( - linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) - ).matrix - - return match_pairs - def __call__(self, train_loader, valid_loader) -> None: batch: Mapping[str, jnp.ndarray] = {} for iter in range(self.iterations): @@ -192,27 +169,64 @@ def transport( diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) ) -> diffrax.Solution: diffeqsolve_kwargs = dict(diffeqsolve_kwargs) + + def solve_ode( + t0: jax.Array, t1: jax.Array, input: jax.Array, cond: jax.Array + ): + return diffrax.diffeqsolve( + diffrax.ODETerm( + lambda t, x, args: self.state_neural_vector_field. + apply_fn({"params": self.state_neural_vector_field.params}, + t=t, + x=x, + condition=cond) + ), + diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()), + t0=t0, + t1=t1, + dt0=diffeqsolve_kwargs.pop("dt0", None), + y0=input, + stepsize_controller=diffeqsolve_kwargs.pop( + "stepsize_controller", + diffrax.PIDController(rtol=1e-5, atol=1e-5) + ), + **diffeqsolve_kwargs, + ).solution.y + + arr = jnp.ones((len(data), 1)) + t0, t1 = (arr * 0.0, arr * 1.0) if forward else (arr * 1.0, arr * 0.0) + + out = jax.vmap(solve_ode)(t0, t1, data, condition) + return out + + def _transport( + self, + data: jnp.array, + condition: Optional[jax.Array], + forward: bool = True, + diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) + ) -> diffrax.Solution: + diffeqsolve_kwargs = dict(diffeqsolve_kwargs) arr = jnp.ones((len(data), 1)) t0, t1 = (arr * 0.0, arr * 1.0) if forward else (arr * 1.0, arr * 0.0) apply_fn_partial = functools.partial( - self.state_neural_vector_field.apply_fn, condition=condition + self.state_neural_vector_field.apply_fn, + params={"params": self.state_neural_vector_field.params}, + condition=condition + ) + term = diffrax.ODETerm(lambda t, y, *args: apply_fn_partial(t, y, *args)) + solver = diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()) + stepsize_controller = diffeqsolve_kwargs.pop( + "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) ) return diffrax.diffeqsolve( - diffrax.ODETerm( - lambda t, y, *args: apply_fn_partial( - {"params": self.state_neural_vector_field.params}, - t=t, - x=y, - ) - ), - diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()), + term, + solver, t0=t0, t1=t1, dt0=diffeqsolve_kwargs.pop("dt0", None), y0=data, - stepsize_controller=diffeqsolve_kwargs.pop( - "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) - ), + stepsize_controller=stepsize_controller, **diffeqsolve_kwargs, ) diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py new file mode 100644 index 000000000..26ae93ede --- /dev/null +++ b/src/ott/neural/solvers/genot.py @@ -0,0 +1,391 @@ +import types +from functools import partial +from typing import ( + Any, + Callable, + Dict, + Literal, + Mapping, + Optional, + Tuple, + Type, + Union, +) + +import diffrax +import jax +import jax.numpy as jnp +import optax +from flax.training.train_state import TrainState +from jax import random +from tqdm import tqdm + +from ott.geometry import costs +from ott.neural.models.models import BaseNeuralVectorField +from ott.neural.solvers.base_solver import ( + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, +) +from ott.neural.solvers.flows import BaseFlow, ConstantNoiseFlow +from ott.solvers import was_solver +from ott.solvers.linear import sinkhorn +from ott.solvers.quadratic import gromov_wasserstein + +Match_fn_T = Callable[[jax.random.PRNGKeyArray, jnp.array, jnp.array], + Tuple[jnp.array, jnp.array, jnp.array, jnp.array]] +Match_latent_fn_T = Callable[[jax.random.PRNGKeyArray, jnp.array, jnp.array], + Tuple[jnp.array, jnp.array]] + + +class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): + + def __init__( + self, + neural_vector_field: Type[BaseNeuralVectorField], + input_dim: int, + output_dim: int, + cond_dim: int, + iterations: int, + valid_freq: int, + ot_solver: Type[was_solver.WassersteinSolver], + optimizer: Type[optax.GradientTransformation], + flow: Type[BaseFlow] = ConstantNoiseFlow(0.0), + k_noise_per_x: int = 1, + t_offset: float = 1e-5, + epsilon: float = 1e-2, + cost_fn: Union[costs.CostFn, Literal["graph"]] = costs.SqEuclidean(), + solver_latent_to_data: Optional[Type[was_solver.WassersteinSolver] + ] = None, + latent_to_data_epsilon: float = 1e-2, + latent_to_data_scale_cost: Any = 1.0, + scale_cost: Union[Any, Mapping[str, Any]] = 1.0, + graph_kwargs: Dict[str, Any] = types.MappingProxyType({}), + fused_penalty: float = 0.0, + tau_a: float = 1.0, + tau_b: float = 1.0, + mlp_eta: Callable[[jnp.ndarray], float] = None, + mlp_xi: Callable[[jnp.ndarray], float] = None, + unbalanced_kwargs: Dict[str, Any] = {}, + callback: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], + Any]] = None, + callback_kwargs: Dict[str, Any] = {}, + callback_iters: int = 10, + rng: random.PRNGKeyArray = random.PRNGKey(0), + **kwargs: Any, + ) -> None: + """The GENOT training class. + + Parameters + ---------- + neural_vector_field + Neural vector field + input_dim + Dimension of the source distribution + output_dim + Dimension of the target distribution + cond_dim + Dimension of the condition + iterations + Number of iterations to train + valid_freq + Number of iterations after which to perform a validation step + ot_solver + Solver to match samples from the source to the target distribution + optimizer + Optimizer for the neural vector field + flow + Flow to use in the target space from noise to data. Should be of type + `ConstantNoiseFlow` to recover the setup in the paper TODO. + k_noise_per_x + Number of samples to draw from the conditional distribution + t_offset + Offset for sampling from the time t + epsilon + Entropy regularization parameter for the discrete solver + cost_fn + Cost function to use for the discrete OT solver + solver_latent_to_data + Linear OT solver to match samples from the noise to the conditional distribution + latent_to_data_epsilon + Entropy regularization term for `solver_latent_to_data` + latent_to_data_scale_cost + How to scale the cost matrix for the `solver_latent_to_data` solver + scale_cost + How to scale the cost matrix in each discrete OT problem + graph_kwargs + Keyword arguments for the graph cost computation in case `cost="graph"` + fused_penalty + Penalisation term for the linear term in a Fused GW setting + split_dim + Dimension to split the data into fused term and purely quadratic term in the FGW setting + mlp_eta + Neural network to learn the left rescaling function + mlp_xi + Neural network to learn the right rescaling function + tau_a + Left unbalancedness parameter + tau_b + Right unbalancedness parameter + callback + Callback function + callback_kwargs + Keyword arguments to the callback function + callback_iters + Number of iterations after which to evaluate callback function + seed + Random seed + kwargs + Keyword arguments passed to `setup`, e.g. custom choice of optimizers for learning rescaling functions + """ + BaseNeuralSolver.__init__( + self, iterations=iterations, valid_freq=valid_freq + ) + ResampleMixin.__init__(self) + UnbalancednessMixin.__init__( + self, + source_dim=input_dim, + target_dim=input_dim, + cond_dim=cond_dim, + tau_a=tau_a, + tau_b=tau_b, + mlp_eta=mlp_eta, + mlp_xi=mlp_xi, + unbalanced_kwargs=unbalanced_kwargs, + ) + + if isinstance( + ot_solver, gromov_wasserstein.GromovWasserstein + ) and epsilon is not None: + raise ValueError( + "If `ot_solver` is `GromovWasserstein`, `epsilon` must be `None`. This check is performed " + "to ensure that in the (fused) Gromov case the `epsilon` parameter is passed via the `ot_solver`." + ) + + # setup parameters + self.rng = rng + self.metrics = {"loss": [], "loss_eta": [], "loss_xi": []} + + # neural parameters + self.neural_vector_field = neural_vector_field + self.state_neural_vector_field: Optional[TrainState] = None + self.optimizer = optimizer + self.noise_fn = jax.tree_util.Partial( + jax.random.multivariate_normal, + mean=jnp.zeros((output_dim,)), + cov=jnp.diag(jnp.ones((output_dim,))) + ) + self.input_dim = input_dim + self.output_dim = output_dim + self.cond_dim = cond_dim + self.k_noise_per_x = k_noise_per_x + + # OT data-data matching parameters + self.ot_solver = ot_solver + self.epsilon = epsilon + self.cost_fn = cost_fn + self.scale_cost = scale_cost + self.graph_kwargs = graph_kwargs # "k_neighbors", kwargs for graph.Graph.from_graph() + self.fused_penalty = fused_penalty + + # OT latent-data matching parameters + self.solver_latent_to_data = solver_latent_to_data + self.latent_to_data_epsilon = latent_to_data_epsilon + self.latent_to_data_scale_cost = latent_to_data_scale_cost + + # callback parameteres + self.callback = callback + self.callback_kwargs = callback_kwargs + self.callback_iters = callback_iters + + #TODO: check how to handle this + self.t_offset = t_offset + + self.setup(**kwargs) + + def setup(self) -> None: + """Set up the model. + + Parameters + ---------- + kwargs + Keyword arguments for the setup function + """ + self.state_neural_vector_field = self.neural_vector_field.create_train_state( + self.rng, self.optimizer, self.input_dim + ) + self.step_fn = self._get_step_fn() + if self.solver_latent_to_data is not None: + self.match_latent_to_data_fn = self._get_match_latent_fn( + self.solver_latent_to_data, self.latent_to_data_epsilon, + self.latent_to_data_scale_cost + ) + else: + self.match_latent_to_data_fn = lambda key, x, y, **_: (x, y) + + if isinstance(self.ot_solver, sinkhorn.Sinkhorn): + self.match_fn = self._get_sinkhorn_match_fn( + self.ot_solver, self.epsilon, self.cost_fn, self.tau_a, self.tau_b, + self.scale_cost + ) + else: + self._get_gromov_match_fn( + self.ot_solver, self.cost_fn, self.tau_a, self.tau_b, self.scale_cost, + self.fused_penalty + ) + + def __call__(self, train_loader, valid_loader) -> None: + """Train GENOT.""" + batch: Dict[str, jnp.array] = {} + for step in tqdm(range(self.iterations)): + batch["source"], batch["source_q"], batch["target"], batch[ + "target_q"], batch["condition"] = next(train_loader) + + self.rng, rng_time, rng_match, rng_resample, rng_noise, rng_step_fn = jax.random.split( + self.rng, 6 + ) + n_samples = len(batch["source"]) * self.k_noise_per_k + t = ( + jax.random.uniform(rng_time, (1,)) + jnp.arange(n_samples) / n_samples + ) % (1 - self.t_offset) + batch["time"] = t[:, None] + batch["noise"] = self.noise_fn( + rng_noise, shape=(batch["source"], self.k_noise_per_x) + ) + + tmat = self.match_fn(rng_match, batch["source"], batch["target"]) + (batch["source"], batch["source_q"], batch["condition"] + ), (batch["target"], batch["target_q"]) = self._resample_data( + rng_resample, tmat, + (batch["source"], batch["source_q"], batch["condition"]), + (batch["target"], batch["target_q"]) + ) + rng_noise = jax.random.split(rng_noise, (len(batch["target"]))) + + noise_matched, conditional_target = jax.vmap( + self.match_latent_to_data_fn, 0, 0 + )(key=rng_noise, x=batch["noise"], y=batch["target"]) + + batch["source"] = jnp.reshape(batch["source"], (len(batch["source"]), -1)) + batch["target"] = jnp.reshape( + conditional_target, (len(batch["source"]), -1) + ) + batch["noise"] = jnp.reshape(noise_matched, (len(batch["soruce"]), -1)) + + self.state_neural_vector_field, loss = self.step_fn( + rng_step_fn, self.state_neural_vector_field, batch + ) + if self.learn_rescaling: + self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( + batch, tmat.sum(axis=1), tmat.sum(axis=0) + ) + if iter % self.valid_freq == 0: + self._valid_step(valid_loader, iter) + if self.checkpoint_manager is not None: + states_to_save = { + "state_neural_vector_field": self.state_neural_vector_field + } + if self.state_mlp is not None: + states_to_save["state_eta"] = self.state_mlp + if self.state_xi is not None: + states_to_save["state_xi"] = self.state_xi + self.checkpoint_manager.save(iter, states_to_save) + + def _get_step_fn(self) -> Callable: + + def loss_fn( + params_mlp: jnp.array, + apply_fn_mlp: Callable, + batch: Dict[str, jnp.array], + ): + + def phi_t( + x_0: jnp.ndarray, x_1: jnp.ndarray, t: jnp.ndarray + ) -> jnp.ndarray: + return (1 - t) * x_0 + t * x_1 + + def u_t(x_0: jnp.ndarray, x_1: jnp.ndarray) -> jnp.ndarray: + return x_1 - x_0 + + phi_t_eval = phi_t(batch["noise"], batch["target"], batch["time"]) + mlp_pred = apply_fn_mlp({"params": params_mlp}, + t=batch["time"], + latent=phi_t_eval, + condition=batch["source"]) + d_psi = u_t(batch["noise"], batch["target"]) + + return jnp.mean(optax.l2_loss(mlp_pred, d_psi)) + + @jax.jit + def step_fn( + key: jax.random.PRNGKeyArray, + state_neural_net: TrainState, + batch: Dict[str, jnp.array], + ): + + grad_fn = jax.value_and_grad(loss_fn, has_aux=False) + loss, grads_mlp = grad_fn( + state_neural_net.params, + state_neural_net.apply_fn, + batch, + ) + metrics = {} + metrics["loss"] = loss + + return (state_neural_net.apply_gradients(grads=grads_mlp), loss) + + return step_fn + + def transport( + self, + source: jnp.array, + seed: int = 0, + diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) + ) -> Union[jnp.array, diffrax.Solution, Optional[jnp.ndarray]]: + """Transport the distribution. + + Parameters + ---------- + source + Source distribution to transport + seed + Random seed for sampling from the latent distribution + diffeqsolve_kwargs + Keyword arguments for the ODE solver. + + Returns: + ------- + The transported samples, the solution of the neural ODE, and the rescaling factor. + """ + diffeqsolve_kwargs = dict(diffeqsolve_kwargs) + rng = jax.random.PRNGKey(seed) + latent_shape = (len(source),) + latent_batch = self.noise_fn(rng, shape=latent_shape) + apply_fn_partial = partial( + self.state_neural_vector_field.apply_fn, condition=source + ) + solution = diffrax.diffeqsolve( + diffrax.ODETerm( + lambda t, y, *args: + apply_fn_partial({"params": self.state_neural_vector_field.params}, + t=t, + latent=y) + ), + diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()), + t0=0, + t1=1, + dt0=diffeqsolve_kwargs.pop("dt0", None), + y0=latent_batch, + stepsize_controller=diffeqsolve_kwargs.pop( + "stepsize_controller", diffrax.PIDController(rtol=1e-3, atol=1e-6) + ), + **diffeqsolve_kwargs, + ) + if self.state_eta is not None: + weight_factors = self.state_eta.apply_fn({ + "params": self.state_eta.params + }, + x=source) + else: + weight_factors = jnp.ones(source.shape) + return solution.ys, solution, weight_factors diff --git a/tests/neural/flow_matching_test.py b/tests/neural/flow_matching_test.py index a1c6e8f3d..39199106b 100644 --- a/tests/neural/flow_matching_test.py +++ b/tests/neural/flow_matching_test.py @@ -1,6 +1,7 @@ from typing import Type import diffrax +import jax import jax.numpy as jnp import optax import pytest @@ -85,12 +86,12 @@ def test_flow_matching_with_conditions( source, target, condition = next(data_loader_gaussian_with_conditions) result_forward = fm.transport(source, condition=condition, forward=True) - assert isinstance(result_forward, diffrax.Solution) - assert jnp.sum(jnp.isnan(result_forward.y)) == 0 + assert isinstance(result_forward, jax.Array) + assert jnp.sum(jnp.isnan(result_forward)) == 0 result_backward = fm.transport(target, condition=condition, forward=False) - assert isinstance(result_backward, diffrax.Solution) - assert jnp.sum(jnp.isnan(result_backward.y)) == 0 + assert isinstance(result_backward, jax.Array) + assert jnp.sum(jnp.isnan(result_backward)) == 0 @pytest.mark.parametrize( "flow", From c843758c8510db019b2892f08959043537508d7b Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Thu, 23 Nov 2023 19:15:27 +0100 Subject: [PATCH 012/186] [ci skip] restructure genot --- src/ott/neural/solvers/base_solver.py | 2 +- src/ott/neural/solvers/flow_matching.py | 7 +- src/ott/neural/solvers/genot.py | 150 +++++++++++++----------- 3 files changed, 89 insertions(+), 70 deletions(-) diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 662fa73a3..eb14fadee 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -85,7 +85,7 @@ def _resample_data( b[indices_target, :] if b is not None else None for b in target_arrays ) - def sample_conditional_indices_from_tmap( + def _sample_conditional_indices_from_tmap( key: jax.random.PRNGKeyArray, tmat: jnp.ndarray, k_samples_per_x: Union[int, jnp.ndarray], diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index 3532d09c1..bfa5b5110 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -6,6 +6,7 @@ import jax import jax.numpy as jnp import optax +from flax.training import train_state from jax import random from orbax import checkpoint @@ -98,7 +99,7 @@ def _get_step_fn(self) -> Callable: def step_fn( key: random.PRNGKeyArray, - state_neural_vector_field: Any, + state_neural_vector_field: train_state.TrainState, batch: Dict[str, jnp.ndarray], ) -> Tuple[Any, Any]: @@ -122,8 +123,8 @@ def loss_fn( keys_model = random.split(key_model, batch_size) t = self.sample_t(key_t, batch_size) noise = self.sample_noise(key_noise, batch_size) - loss_grad = jax.value_and_grad(loss_fn) - loss, grads = loss_grad( + grad_fn = jax.value_and_grad(loss_fn) + loss, grads = grad_fn( state_neural_vector_field.params, t, noise, batch, keys_model ) return state_neural_vector_field.apply_gradients(grads=grads), loss diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 26ae93ede..110d65738 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -1,21 +1,23 @@ +import functools import types from functools import partial from typing import ( - Any, - Callable, - Dict, - Literal, - Mapping, - Optional, - Tuple, - Type, - Union, + Any, + Callable, + Dict, + Literal, + Mapping, + Optional, + Tuple, + Type, + Union, ) import diffrax import jax import jax.numpy as jnp import optax +from flax.training import train_state from flax.training.train_state import TrainState from jax import random from tqdm import tqdm @@ -23,9 +25,9 @@ from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, ) from ott.neural.solvers.flows import BaseFlow, ConstantNoiseFlow from ott.solvers import was_solver @@ -57,8 +59,7 @@ def __init__( cost_fn: Union[costs.CostFn, Literal["graph"]] = costs.SqEuclidean(), solver_latent_to_data: Optional[Type[was_solver.WassersteinSolver] ] = None, - latent_to_data_epsilon: float = 1e-2, - latent_to_data_scale_cost: Any = 1.0, + kwargs_solver_latent_to_data: Dict[str, Any] = types.MappingProxyType({}), scale_cost: Union[Any, Mapping[str, Any]] = 1.0, graph_kwargs: Dict[str, Any] = types.MappingProxyType({}), fused_penalty: float = 0.0, @@ -190,8 +191,7 @@ def __init__( # OT latent-data matching parameters self.solver_latent_to_data = solver_latent_to_data - self.latent_to_data_epsilon = latent_to_data_epsilon - self.latent_to_data_scale_cost = latent_to_data_scale_cost + self.kwargs_solver_latent_to_data = kwargs_solver_latent_to_data # callback parameteres self.callback = callback @@ -216,13 +216,13 @@ def setup(self) -> None: ) self.step_fn = self._get_step_fn() if self.solver_latent_to_data is not None: - self.match_latent_to_data_fn = self._get_match_latent_fn( - self.solver_latent_to_data, self.latent_to_data_epsilon, - self.latent_to_data_scale_cost + self.match_latent_to_data_fn = self._get_sinkhorn_match_fn( + self.solver_latent_to_data, **self.kwargs_solver_latent_to_data ) else: self.match_latent_to_data_fn = lambda key, x, y, **_: (x, y) + # TODO: add graph construction function if isinstance(self.ot_solver, sinkhorn.Sinkhorn): self.match_fn = self._get_sinkhorn_match_fn( self.ot_solver, self.epsilon, self.cost_fn, self.tau_a, self.tau_b, @@ -241,36 +241,39 @@ def __call__(self, train_loader, valid_loader) -> None: batch["source"], batch["source_q"], batch["target"], batch[ "target_q"], batch["condition"] = next(train_loader) - self.rng, rng_time, rng_match, rng_resample, rng_noise, rng_step_fn = jax.random.split( - self.rng, 6 + self.rng, rng_time, rng_match, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn = jax.random.split( + self.rng, 7 ) n_samples = len(batch["source"]) * self.k_noise_per_k - t = ( - jax.random.uniform(rng_time, (1,)) + jnp.arange(n_samples) / n_samples - ) % (1 - self.t_offset) - batch["time"] = t[:, None] + batch["time"] = self.sample_t(key, n_samples) batch["noise"] = self.noise_fn( rng_noise, shape=(batch["source"], self.k_noise_per_x) ) tmat = self.match_fn(rng_match, batch["source"], batch["target"]) (batch["source"], batch["source_q"], batch["condition"] + ), (batch["target"], + batch["target_q"]) = self._sample_conditional_indices_from_tmap( + rng_resample, tmat, self.k_noise_per_x, + (batch["source"], batch["source_q"], batch["condition"]), + (batch["target"], batch["target_q"]) + ) + rng_noise = jax.random.split(rng_noise, (len(batch["target"]))) + + tmat_latent_data = jax.vmap(self.match_latent_to_data_fn, 0, 0)( + key=rng_noise, x=batch["noise"], y=batch["target"] + ) + (batch["source"], batch["source_q"], batch["condition"] ), (batch["target"], batch["target_q"]) = self._resample_data( - rng_resample, tmat, + rng_latent_data_match, tmat_latent_data, (batch["source"], batch["source_q"], batch["condition"]), (batch["target"], batch["target_q"]) ) - rng_noise = jax.random.split(rng_noise, (len(batch["target"]))) - - noise_matched, conditional_target = jax.vmap( - self.match_latent_to_data_fn, 0, 0 - )(key=rng_noise, x=batch["noise"], y=batch["target"]) - batch["source"] = jnp.reshape(batch["source"], (len(batch["source"]), -1)) - batch["target"] = jnp.reshape( - conditional_target, (len(batch["source"]), -1) - ) - batch["noise"] = jnp.reshape(noise_matched, (len(batch["soruce"]), -1)) + batch = { + key: jnp.reshape(arr, (len(batch["source"]), -1)) + for key, arr in batch.items() + } self.state_neural_vector_field, loss = self.step_fn( rng_step_fn, self.state_neural_vector_field, batch @@ -293,46 +296,38 @@ def __call__(self, train_loader, valid_loader) -> None: def _get_step_fn(self) -> Callable: - def loss_fn( - params_mlp: jnp.array, - apply_fn_mlp: Callable, - batch: Dict[str, jnp.array], - ): - - def phi_t( - x_0: jnp.ndarray, x_1: jnp.ndarray, t: jnp.ndarray - ) -> jnp.ndarray: - return (1 - t) * x_0 + t * x_1 - - def u_t(x_0: jnp.ndarray, x_1: jnp.ndarray) -> jnp.ndarray: - return x_1 - x_0 - - phi_t_eval = phi_t(batch["noise"], batch["target"], batch["time"]) - mlp_pred = apply_fn_mlp({"params": params_mlp}, - t=batch["time"], - latent=phi_t_eval, - condition=batch["source"]) - d_psi = u_t(batch["noise"], batch["target"]) - - return jnp.mean(optax.l2_loss(mlp_pred, d_psi)) - @jax.jit def step_fn( key: jax.random.PRNGKeyArray, - state_neural_net: TrainState, + state_neural_vector_field: train_state.TrainState, batch: Dict[str, jnp.array], ): + def loss_fn( + params: jax.Array, t: jax.Array, noise: jax.Array, + batch: Dict[str, jnp.array], keys_model: random.PRNGKeyArray + ): + + x_t = self.flow.compute_xt(noise, t, batch["latent"], batch["target"]) + apply_fn = functools.partial( + state_neural_vector_field.apply_fn, {"params": params} + ) + cond_input = jnp.concatenate([batch["source"], batch["condition"]], + axis=-1) + v_t = jax.vmap(apply_fn)( + t=t, x=x_t, condition=cond_input, keys_model=keys_model + ) + u_t = self.flow.compute_ut(t, batch["latent"], batch["target"]) + return jnp.mean((v_t - u_t) ** 2) + grad_fn = jax.value_and_grad(loss_fn, has_aux=False) - loss, grads_mlp = grad_fn( - state_neural_net.params, - state_neural_net.apply_fn, + loss, grads = grad_fn( + state_neural_vector_field.params, + state_neural_vector_field.apply_fn, batch, ) - metrics = {} - metrics["loss"] = loss - return (state_neural_net.apply_gradients(grads=grads_mlp), loss) + return state_neural_vector_field.apply_gradients(grads=grads), loss return step_fn @@ -389,3 +384,26 @@ def transport( else: weight_factors = jnp.ones(source.shape) return solution.ys, solution, weight_factors + + def _valid_step(self, valid_loader, iter) -> None: + next(valid_loader) + + # TODO: add callback and logging + + @property + def learn_rescaling(self) -> bool: + return self.mlp_eta is not None or self.mlp_xi is not None + + def save(self, path: str) -> None: + raise NotImplementedError + + def load(self, path: str) -> "GENOT": + raise NotImplementedError + + def training_logs(self) -> Dict[str, Any]: + raise NotImplementedError + + def sample_t( #TODO: make more general + self, key: random.PRNGKey, batch_size: int + ) -> jnp.ndarray: #TODO: make more general + return random.uniform(key, [batch_size, 1]) From ef86c540651addcc4d5d3d774621f51f57b6c255 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 24 Nov 2023 11:02:48 +0100 Subject: [PATCH 013/186] [ci skip] restructure genot --- src/ott/neural/solvers/base_solver.py | 23 +++- src/ott/neural/solvers/genot.py | 164 ++++++++++++++++---------- tests/neural/conftest.py | 66 +++++++++++ tests/neural/genot_test.py | 39 ++++++ 4 files changed, 226 insertions(+), 66 deletions(-) create mode 100644 tests/neural/genot_test.py diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index eb14fadee..9d323b13c 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -86,16 +86,17 @@ def _resample_data( ) def _sample_conditional_indices_from_tmap( + self, key: jax.random.PRNGKeyArray, tmat: jnp.ndarray, k_samples_per_x: Union[int, jnp.ndarray], source_arrays: Tuple[jnp.ndarray, ...], target_arrays: Tuple[jnp.ndarray, ...], *, - is_balanced: bool, + source_is_balanced: bool, ) -> Tuple[jnp.array, jnp.array]: left_marginals = tmat.sum(axis=1) - if not is_balanced: + if not source_is_balanced: key, key2 = jax.random.split(key, 2) indices = jax.random.choice( key=key2, @@ -135,6 +136,8 @@ def _get_sinkhorn_match_fn( scale_cost: Any, tau_a: float, tau_b: float, + *, + filter_input: bool = False, ) -> Callable: def match_pairs( @@ -147,7 +150,17 @@ def match_pairs( linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) ).matrix - return match_pairs + def match_pairs_filtered( + x_lin: jax.Array, x_quad: jax.Array, y_lin: jax.Array, y_quad: jax.Array + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: + geom = pointcloud.PointCloud( + x_lin, y_lin, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn + ) + return ot_solver( + linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) + ).matrix + + return match_pairs_filtered if filter_input else match_pairs def _get_gromov_match_fn( self, @@ -181,10 +194,10 @@ def _get_gromov_match_fn( x_scale_cost = y_scale_cost = xy_scale_cost = scale_cost def match_pairs( - x_quad: Tuple[jnp.ndarray, jnp.ndarray], - y_quad: Tuple[jnp.ndarray, jnp.ndarray], x_lin: Optional[jax.Array], + x_quad: Tuple[jnp.ndarray, jnp.ndarray], y_lin: Optional[jax.Array], + y_quad: Tuple[jnp.ndarray, jnp.ndarray], ) -> Tuple[jnp.array, jnp.array]: geom_xx = pointcloud.PointCloud( x=x_quad, y=x_quad, cost_fn=x_cost_fn, scale_cost=x_scale_cost diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 110d65738..19cf7536c 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -20,7 +20,7 @@ from flax.training import train_state from flax.training.train_state import TrainState from jax import random -from tqdm import tqdm +from orbax import checkpoint from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField @@ -52,6 +52,7 @@ def __init__( valid_freq: int, ot_solver: Type[was_solver.WassersteinSolver], optimizer: Type[optax.GradientTransformation], + checkpoint_manager: Type[checkpoint.CheckpointManager] = None, flow: Type[BaseFlow] = ConstantNoiseFlow(0.0), k_noise_per_x: int = 1, t_offset: float = 1e-5, @@ -61,7 +62,6 @@ def __init__( ] = None, kwargs_solver_latent_to_data: Dict[str, Any] = types.MappingProxyType({}), scale_cost: Union[Any, Mapping[str, Any]] = 1.0, - graph_kwargs: Dict[str, Any] = types.MappingProxyType({}), fused_penalty: float = 0.0, tau_a: float = 1.0, tau_b: float = 1.0, @@ -163,15 +163,13 @@ def __init__( "to ensure that in the (fused) Gromov case the `epsilon` parameter is passed via the `ot_solver`." ) - # setup parameters self.rng = rng - self.metrics = {"loss": [], "loss_eta": [], "loss_xi": []} - - # neural parameters self.neural_vector_field = neural_vector_field self.state_neural_vector_field: Optional[TrainState] = None + self.flow = flow self.optimizer = optimizer - self.noise_fn = jax.tree_util.Partial( + self.checkpoint_manager = checkpoint_manager + self.latent_noise_fn = jax.tree_util.Partial( jax.random.multivariate_normal, mean=jnp.zeros((output_dim,)), cov=jnp.diag(jnp.ones((output_dim,))) @@ -186,7 +184,6 @@ def __init__( self.epsilon = epsilon self.cost_fn = cost_fn self.scale_cost = scale_cost - self.graph_kwargs = graph_kwargs # "k_neighbors", kwargs for graph.Graph.from_graph() self.fused_penalty = fused_penalty # OT latent-data matching parameters @@ -225,8 +222,13 @@ def setup(self) -> None: # TODO: add graph construction function if isinstance(self.ot_solver, sinkhorn.Sinkhorn): self.match_fn = self._get_sinkhorn_match_fn( - self.ot_solver, self.epsilon, self.cost_fn, self.tau_a, self.tau_b, - self.scale_cost + self.ot_solver, + self.epsilon, + self.cost_fn, + self.tau_a, + self.tau_b, + self.scale_cost, + filter_input=True ) else: self._get_gromov_match_fn( @@ -237,41 +239,65 @@ def setup(self) -> None: def __call__(self, train_loader, valid_loader) -> None: """Train GENOT.""" batch: Dict[str, jnp.array] = {} - for step in tqdm(range(self.iterations)): + for iteration in range(self.iterations): batch["source"], batch["source_q"], batch["target"], batch[ "target_q"], batch["condition"] = next(train_loader) self.rng, rng_time, rng_match, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn = jax.random.split( self.rng, 7 ) - n_samples = len(batch["source"]) * self.k_noise_per_k - batch["time"] = self.sample_t(key, n_samples) - batch["noise"] = self.noise_fn( - rng_noise, shape=(batch["source"], self.k_noise_per_x) + batch_size = len(batch["source"] + ) if "source" in batch else len(batch["source_q"]) + n_samples = batch_size * self.k_noise_per_x + batch["time"] = self.sample_t(rng_time, n_samples) + batch["noise"] = self.sample_noise(rng_noise, n_samples) + batch["latent"] = self.latent_noise_fn( + rng_noise, shape=(batch_size, self.k_noise_per_x) ) - tmat = self.match_fn(rng_match, batch["source"], batch["target"]) + tmat = self.match_fn( + batch["source"], batch["source_q"], batch["target"], batch["target_q"] + ) (batch["source"], batch["source_q"], batch["condition"] ), (batch["target"], batch["target_q"]) = self._sample_conditional_indices_from_tmap( - rng_resample, tmat, self.k_noise_per_x, + rng_resample, + tmat, + self.k_noise_per_x, (batch["source"], batch["source_q"], batch["condition"]), - (batch["target"], batch["target_q"]) + (batch["target"], batch["target_q"]), + source_is_balanced=(self.tau_a == 1.0) ) rng_noise = jax.random.split(rng_noise, (len(batch["target"]))) - tmat_latent_data = jax.vmap(self.match_latent_to_data_fn, 0, 0)( - key=rng_noise, x=batch["noise"], y=batch["target"] - ) - (batch["source"], batch["source_q"], batch["condition"] - ), (batch["target"], batch["target_q"]) = self._resample_data( - rng_latent_data_match, tmat_latent_data, - (batch["source"], batch["source_q"], batch["condition"]), - (batch["target"], batch["target_q"]) - ) + if self.solver_latent_to_data is not None: + tmats_latent_data = jnp.array( + jax.vmap(self.match_latent_to_data_fn, 0, + 0)(key=rng_noise, x=batch["noise"], y=batch["target"]) + ) + + if self.k_noise_per_x > 1: + rng_latent_data_match = jax.random.split( + rng_latent_data_match, batch_size + ) + (batch["source"], batch["source_q"], batch["condition"] + ), (batch["target"], + batch["target_q"]) = jax.vmap(self._resample_data, 0, 0)( + rng_latent_data_match, tmats_latent_data, + (batch["source"], batch["source_q"], batch["condition"]), + (batch["target"], batch["target_q"]) + ) + #(batch["source"], batch["source_q"], batch["condition"] + #), (batch["target"], batch["target_q"]) = self._resample_data( + # rng_latent_data_match, tmat_latent_data, + # (batch["source"], batch["source_q"], batch["condition"]), + # (batch["target"], batch["target_q"]) + #) batch = { - key: jnp.reshape(arr, (len(batch["source"]), -1)) + key: + jnp.reshape(arr, (len(batch["source"]), + -1)) if arr is not None else None for key, arr in batch.items() } @@ -282,8 +308,8 @@ def __call__(self, train_loader, valid_loader) -> None: self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( batch, tmat.sum(axis=1), tmat.sum(axis=0) ) - if iter % self.valid_freq == 0: - self._valid_step(valid_loader, iter) + if iteration % self.valid_freq == 0: + self._valid_step(valid_loader, iteration) if self.checkpoint_manager is not None: states_to_save = { "state_neural_vector_field": self.state_neural_vector_field @@ -292,7 +318,7 @@ def __call__(self, train_loader, valid_loader) -> None: states_to_save["state_eta"] = self.state_mlp if self.state_xi is not None: states_to_save["state_xi"] = self.state_xi - self.checkpoint_manager.save(iter, states_to_save) + self.checkpoint_manager.save(iteration, states_to_save) def _get_step_fn(self) -> Callable: @@ -304,28 +330,34 @@ def step_fn( ): def loss_fn( - params: jax.Array, t: jax.Array, noise: jax.Array, - batch: Dict[str, jnp.array], keys_model: random.PRNGKeyArray + params: jax.Array, batch: Dict[str, jnp.array], + keys_model: random.PRNGKeyArray ): - x_t = self.flow.compute_xt(noise, t, batch["latent"], batch["target"]) + x_t = self.flow.compute_xt( + batch["noise"], batch["time"], batch["latent"], batch["target"] + ) apply_fn = functools.partial( state_neural_vector_field.apply_fn, {"params": params} ) - cond_input = jnp.concatenate([batch["source"], batch["condition"]], - axis=-1) + + if batch["condition"] is None: + cond_input = batch["source"] + else: + cond_input = jnp.concatenate([batch["source"], batch["condition"]], + axis=-1) v_t = jax.vmap(apply_fn)( - t=t, x=x_t, condition=cond_input, keys_model=keys_model + t=batch["time"], x=x_t, condition=cond_input, keys_model=keys_model + ) + u_t = self.flow.compute_ut( + batch["time"], batch["latent"], batch["target"] ) - u_t = self.flow.compute_ut(t, batch["latent"], batch["target"]) return jnp.mean((v_t - u_t) ** 2) + keys_model = random.split(key, len(batch["noise"])) + grad_fn = jax.value_and_grad(loss_fn, has_aux=False) - loss, grads = grad_fn( - state_neural_vector_field.params, - state_neural_vector_field.apply_fn, - batch, - ) + loss, grads = grad_fn(state_neural_vector_field.params, batch, keys_model) return state_neural_vector_field.apply_gradients(grads=grads), loss @@ -333,9 +365,11 @@ def loss_fn( def transport( self, - source: jnp.array, - seed: int = 0, - diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) + source: jax.Array, + condition: jax.Array, + rng: random.PRNGKeyArray = random.PRNGKey(0), + diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}), + forward: bool = True, ) -> Union[jnp.array, diffrax.Solution, Optional[jnp.ndarray]]: """Transport the distribution. @@ -352,23 +386,33 @@ def transport( ------- The transported samples, the solution of the neural ODE, and the rescaling factor. """ + if not forward: + raise NotImplementedError diffeqsolve_kwargs = dict(diffeqsolve_kwargs) - rng = jax.random.PRNGKey(seed) - latent_shape = (len(source),) - latent_batch = self.noise_fn(rng, shape=latent_shape) + assert len(source) == len(condition) if condition is not None else True + + latent_batch = self.latent_noise_fn( + rng, shape=(len(source), self.output_dim) + ) + cond_input = source if condition is None else jnp.concatenate([ + source, condition + ], + axis=-1) apply_fn_partial = partial( - self.state_neural_vector_field.apply_fn, condition=source + self.state_neural_vector_field.apply_fn, condition=cond_input ) + t0 = jnp.zeros((len(source),1)) + t1 = jnp.ones((len(source),1)) solution = diffrax.diffeqsolve( diffrax.ODETerm( lambda t, y, *args: apply_fn_partial({"params": self.state_neural_vector_field.params}, t=t, - latent=y) + x=y) ), diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()), - t0=0, - t1=1, + t0=t0, + t1=t1, dt0=diffeqsolve_kwargs.pop("dt0", None), y0=latent_batch, stepsize_controller=diffeqsolve_kwargs.pop( @@ -376,14 +420,7 @@ def transport( ), **diffeqsolve_kwargs, ) - if self.state_eta is not None: - weight_factors = self.state_eta.apply_fn({ - "params": self.state_eta.params - }, - x=source) - else: - weight_factors = jnp.ones(source.shape) - return solution.ys, solution, weight_factors + return solution.ys def _valid_step(self, valid_loader, iter) -> None: next(valid_loader) @@ -407,3 +444,8 @@ def sample_t( #TODO: make more general self, key: random.PRNGKey, batch_size: int ) -> jnp.ndarray: #TODO: make more general return random.uniform(key, [batch_size, 1]) + + def sample_noise( #TODO: make more general + self, key: random.PRNGKey, batch_size: int + ) -> jnp.ndarray: #TODO: make more general + return random.normal(key, shape=(batch_size, self.input_dim)) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index f1b6e16f8..161a5a1ab 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -86,3 +86,69 @@ def data_loader_gaussian_with_conditions(): conditions = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 return DataLoader(source, target, conditions, 16) + + +class GENOTDataLoader: + + def __init__( + self, + source_lin: Optional[jax.Array], + source_quad: Optional[jax.Array], + target_lin: Optional[jax.Array], + target_quad: Optional[jax.Array], + conditions: Optional[jax.Array], + batch_size: int = 64 + ) -> None: + super().__init__() + self.source_lin = source_lin + self.target_lin = target_lin + self.source_quad = source_quad + self.target_quad = target_quad + self.conditions = conditions + self.batch_size = batch_size + self.key = jax.random.PRNGKey(0) + + def __next__(self) -> jax.Array: + key, self.key = jax.random.split(self.key) + inds_source = jax.random.choice( + key, len(self.source_lin), shape=[self.batch_size] + ) + inds_target = jax.random.choice( + key, len(self.target_lin), shape=[self.batch_size] + ) + return self.source_lin[ + inds_source, : + ] if self.source_lin is not None else None, self.source_quad[ + inds_source, : + ] if self.source_quad is not None else None, self.target_lin[ + inds_target, : + ] if self.target_lin is not None else None, self.target_quad[ + inds_target, : + ] if self.target_quad is not None else None, self.conditions[ + inds_source, :] if self.conditions is not None else None + + +@pytest.fixture(scope="module") +def genot_data_loader_linear(): + """Returns a data loader for a simple Gaussian mixture.""" + source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 + return GENOTDataLoader(source, None, target, None, None, 16) + + +@pytest.fixture(scope="module") +def genot_data_loader_quad(): + """Returns a data loader for a simple Gaussian mixture.""" + source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + 1.0 + return GENOTDataLoader(None, source, None, target, None, 16) + + +@pytest.fixture(scope="module") +def genot_data_loader_fused(): + """Returns a data loader for a simple Gaussian mixture.""" + source_q = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target_q = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + 1.0 + source_lin = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target_lin = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 + return GENOTDataLoader(source_lin, source_q, target_lin, target_q, None, 16) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py new file mode 100644 index 000000000..55e2a351d --- /dev/null +++ b/tests/neural/genot_test.py @@ -0,0 +1,39 @@ +import diffrax +import jax.numpy as jnp +import optax + +from ott.neural.models.models import NeuralVectorField +from ott.neural.solvers.genot import GENOT +from ott.solvers.linear import sinkhorn + + +class TestGENOT: + + def test_genot_linear(self, genot_data_loader_linear): + neural_vf = NeuralVectorField( + output_dim=2, + condition_dim=0, + latent_embed_dim=5, + ) + ot_solver = sinkhorn.Sinkhorn() + optimizer = optax.adam(learning_rate=1e-3) + genot = GENOT( + neural_vf, + input_dim=2, + output_dim=2, + cond_dim=0, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + optimizer=optimizer + ) + genot(genot_data_loader_linear, genot_data_loader_linear) + + source_lin, source_quad, target_lin, target_quad, condition = next( + genot_data_loader_linear + ) + result_forward = genot.transport( + source_lin, condition=condition, forward=True + ) + assert isinstance(result_forward, diffrax.Solution) + assert jnp.sum(jnp.isnan(result_forward.y)) == 0 From 70a6173715070d956db486fd3e23ae0e553b5f80 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 24 Nov 2023 12:02:20 +0100 Subject: [PATCH 014/186] [ci skip] fix transport --- src/ott/neural/solvers/flow_matching.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index bfa5b5110..8ad52310f 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -171,8 +171,9 @@ def transport( ) -> diffrax.Solution: diffeqsolve_kwargs = dict(diffeqsolve_kwargs) + t0, t1 = (0.0, 1.0) if forward else (1.0, 0.0) def solve_ode( - t0: jax.Array, t1: jax.Array, input: jax.Array, cond: jax.Array + input: jax.Array, cond: jax.Array ): return diffrax.diffeqsolve( diffrax.ODETerm( @@ -192,12 +193,9 @@ def solve_ode( diffrax.PIDController(rtol=1e-5, atol=1e-5) ), **diffeqsolve_kwargs, - ).solution.y + ).ys[0] - arr = jnp.ones((len(data), 1)) - t0, t1 = (arr * 0.0, arr * 1.0) if forward else (arr * 1.0, arr * 0.0) - - out = jax.vmap(solve_ode)(t0, t1, data, condition) + out = jax.vmap(solve_ode)(data, condition) return out def _transport( From 40570e683591325d91941452cd46478775116488 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 24 Nov 2023 12:51:57 +0100 Subject: [PATCH 015/186] [ci skip] flow matching tests passing --- tests/neural/flow_matching_test.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/neural/flow_matching_test.py b/tests/neural/flow_matching_test.py index 39199106b..ab8b7713f 100644 --- a/tests/neural/flow_matching_test.py +++ b/tests/neural/flow_matching_test.py @@ -46,12 +46,12 @@ def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): source, target, condition = next(data_loader_gaussian) result_forward = fm.transport(source, condition=condition, forward=True) - assert isinstance(result_forward, diffrax.Solution) - assert jnp.sum(jnp.isnan(result_forward.y)) == 0 + assert isinstance(result_forward, jax.Array) + assert jnp.sum(jnp.isnan(result_forward)) == 0 result_backward = fm.transport(target, condition=condition, forward=False) - assert isinstance(result_backward, diffrax.Solution) - assert jnp.sum(jnp.isnan(result_backward.y)) == 0 + assert isinstance(result_backward, jax.Array) + assert jnp.sum(jnp.isnan(result_backward)) == 0 @pytest.mark.parametrize( "flow", @@ -123,9 +123,9 @@ def test_flow_matching_conditional( source, target, condition = next(data_loader_gaussian_conditional) result_forward = fm.transport(source, condition=condition, forward=True) - assert isinstance(result_forward, diffrax.Solution) - assert jnp.sum(jnp.isnan(result_forward.y)) == 0 + assert isinstance(result_forward, jax.Array) + assert jnp.sum(jnp.isnan(result_forward)) == 0 result_backward = fm.transport(target, condition=condition, forward=False) - assert isinstance(result_backward, diffrax.Solution) - assert jnp.sum(jnp.isnan(result_backward.y)) == 0 + assert isinstance(result_backward, jax.Array) + assert jnp.sum(jnp.isnan(result_backward)) == 0 From b0910ea1355f94e97fc57cbf2b1dec38c0df6a62 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 24 Nov 2023 14:40:26 +0100 Subject: [PATCH 016/186] [ci skip] add more tests genot --- src/ott/neural/solvers/flow_matching.py | 5 +- src/ott/neural/solvers/genot.py | 87 ++++++++++++------------- tests/neural/conftest.py | 34 ++++++++-- tests/neural/flow_matching_test.py | 1 - tests/neural/genot_test.py | 71 ++++++++++++++++++-- 5 files changed, 139 insertions(+), 59 deletions(-) diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index 8ad52310f..909c54207 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -172,9 +172,8 @@ def transport( diffeqsolve_kwargs = dict(diffeqsolve_kwargs) t0, t1 = (0.0, 1.0) if forward else (1.0, 0.0) - def solve_ode( - input: jax.Array, cond: jax.Array - ): + + def solve_ode(input: jax.Array, cond: jax.Array): return diffrax.diffeqsolve( diffrax.ODETerm( lambda t, x, args: self.state_neural_vector_field. diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 19cf7536c..b72d84c48 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -1,6 +1,5 @@ import functools import types -from functools import partial from typing import ( Any, Callable, @@ -209,7 +208,7 @@ def setup(self) -> None: Keyword arguments for the setup function """ self.state_neural_vector_field = self.neural_vector_field.create_train_state( - self.rng, self.optimizer, self.input_dim + self.rng, self.optimizer, self.input_dim + self.cond_dim ) self.step_fn = self._get_step_fn() if self.solver_latent_to_data is not None: @@ -231,7 +230,7 @@ def setup(self) -> None: filter_input=True ) else: - self._get_gromov_match_fn( + self.match_fn = self._get_gromov_match_fn( self.ot_solver, self.cost_fn, self.tau_a, self.tau_b, self.scale_cost, self.fused_penalty ) @@ -243,11 +242,11 @@ def __call__(self, train_loader, valid_loader) -> None: batch["source"], batch["source_q"], batch["target"], batch[ "target_q"], batch["condition"] = next(train_loader) - self.rng, rng_time, rng_match, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn = jax.random.split( - self.rng, 7 + self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn = jax.random.split( + self.rng, 6 ) batch_size = len(batch["source"] - ) if "source" in batch else len(batch["source_q"]) + ) if batch["source"] is not None else len(batch["source_q"]) n_samples = batch_size * self.k_noise_per_x batch["time"] = self.sample_t(rng_time, n_samples) batch["noise"] = self.sample_noise(rng_noise, n_samples) @@ -268,12 +267,13 @@ def __call__(self, train_loader, valid_loader) -> None: (batch["target"], batch["target_q"]), source_is_balanced=(self.tau_a == 1.0) ) - rng_noise = jax.random.split(rng_noise, (len(batch["target"]))) - + rng_latent = jax.random.split(rng_noise, batch_size * self.k_noise_per_x) + if self.solver_latent_to_data is not None: + target = jnp.concatenate([batch[el] for el in ["target", "target_q"] if batch[el] is not None], axis=1) tmats_latent_data = jnp.array( jax.vmap(self.match_latent_to_data_fn, 0, - 0)(key=rng_noise, x=batch["noise"], y=batch["target"]) + 0)(key=rng_latent, x=batch["latent"], y=target) ) if self.k_noise_per_x > 1: @@ -296,7 +296,7 @@ def __call__(self, train_loader, valid_loader) -> None: batch = { key: - jnp.reshape(arr, (len(batch["source"]), + jnp.reshape(arr, (batch_size*self.k_noise_per_x, -1)) if arr is not None else None for key, arr in batch.items() } @@ -333,24 +333,21 @@ def loss_fn( params: jax.Array, batch: Dict[str, jnp.array], keys_model: random.PRNGKeyArray ): - + target = jnp.concatenate([batch[el] for el in ["target", "target_q"] if batch[el] is not None], axis=1) x_t = self.flow.compute_xt( - batch["noise"], batch["time"], batch["latent"], batch["target"] + batch["noise"], batch["time"], batch["latent"], target ) apply_fn = functools.partial( state_neural_vector_field.apply_fn, {"params": params} ) - if batch["condition"] is None: - cond_input = batch["source"] - else: - cond_input = jnp.concatenate([batch["source"], batch["condition"]], - axis=-1) + cond_input = jnp.concatenate([batch[el] for el in ["source", "source_q", "condition"] if batch[el] is not None], axis=1) + v_t = jax.vmap(apply_fn)( t=batch["time"], x=x_t, condition=cond_input, keys_model=keys_model ) u_t = self.flow.compute_ut( - batch["time"], batch["latent"], batch["target"] + batch["time"], batch["latent"], target ) return jnp.mean((v_t - u_t) ** 2) @@ -366,7 +363,7 @@ def loss_fn( def transport( self, source: jax.Array, - condition: jax.Array, + condition: Optional[jax.Array], rng: random.PRNGKeyArray = random.PRNGKey(0), diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}), forward: bool = True, @@ -391,36 +388,36 @@ def transport( diffeqsolve_kwargs = dict(diffeqsolve_kwargs) assert len(source) == len(condition) if condition is not None else True - latent_batch = self.latent_noise_fn( - rng, shape=(len(source), self.output_dim) - ) + latent_batch = self.latent_noise_fn(rng, shape=(len(source),)) cond_input = source if condition is None else jnp.concatenate([ source, condition ], axis=-1) - apply_fn_partial = partial( - self.state_neural_vector_field.apply_fn, condition=cond_input - ) - t0 = jnp.zeros((len(source),1)) - t1 = jnp.ones((len(source),1)) - solution = diffrax.diffeqsolve( - diffrax.ODETerm( - lambda t, y, *args: - apply_fn_partial({"params": self.state_neural_vector_field.params}, - t=t, - x=y) - ), - diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()), - t0=t0, - t1=t1, - dt0=diffeqsolve_kwargs.pop("dt0", None), - y0=latent_batch, - stepsize_controller=diffeqsolve_kwargs.pop( - "stepsize_controller", diffrax.PIDController(rtol=1e-3, atol=1e-6) - ), - **diffeqsolve_kwargs, - ) - return solution.ys + t0, t1 = (0.0, 1.0) + + def solve_ode(input: jax.Array, cond: jax.Array): + return diffrax.diffeqsolve( + diffrax.ODETerm( + lambda t, x, args: self.state_neural_vector_field. + apply_fn({"params": self.state_neural_vector_field.params}, + t=t, + x=x, + condition=cond) + ), + diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()), + t0=t0, + t1=t1, + dt0=diffeqsolve_kwargs.pop("dt0", None), + y0=input, + stepsize_controller=diffeqsolve_kwargs.pop( + "stepsize_controller", + diffrax.PIDController(rtol=1e-5, atol=1e-5) + ), + **diffeqsolve_kwargs, + ).ys[0] + + out = jax.vmap(solve_ode)(latent_batch, cond_input) + return out def _valid_step(self, valid_loader, iter) -> None: next(valid_loader) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 161a5a1ab..c9b226ce6 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -100,6 +100,23 @@ def __init__( batch_size: int = 64 ) -> None: super().__init__() + if source_lin is not None: + if source_quad is not None: + assert len(source_lin) == len(source_quad) + else: + self.n_source = len(source_lin) + else: + self.n_source = len(source_quad) + if conditions is not None: + assert len(conditions) == self.n_source + if target_lin is not None: + if target_quad is not None: + assert len(target_lin) == len(target_quad) + else: + self.n_target = len(target_lin) + else: + self.n_target = len(target_quad) + self.source_lin = source_lin self.target_lin = target_lin self.source_quad = source_quad @@ -110,12 +127,8 @@ def __init__( def __next__(self) -> jax.Array: key, self.key = jax.random.split(self.key) - inds_source = jax.random.choice( - key, len(self.source_lin), shape=[self.batch_size] - ) - inds_target = jax.random.choice( - key, len(self.target_lin), shape=[self.batch_size] - ) + inds_source = jax.random.choice(key, self.n_source, shape=[self.batch_size]) + inds_target = jax.random.choice(key, self.n_target, shape=[self.batch_size]) return self.source_lin[ inds_source, : ] if self.source_lin is not None else None, self.source_quad[ @@ -136,6 +149,15 @@ def genot_data_loader_linear(): return GENOTDataLoader(source, None, target, None, None, 16) +@pytest.fixture(scope="module") +def genot_data_loader_linear_conditional(): + """Returns a data loader for a simple Gaussian mixture.""" + source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 + conditions = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 4)) + return GENOTDataLoader(source, None, target, None, conditions, 16) + + @pytest.fixture(scope="module") def genot_data_loader_quad(): """Returns a data loader for a simple Gaussian mixture.""" diff --git a/tests/neural/flow_matching_test.py b/tests/neural/flow_matching_test.py index ab8b7713f..858c90084 100644 --- a/tests/neural/flow_matching_test.py +++ b/tests/neural/flow_matching_test.py @@ -1,6 +1,5 @@ from typing import Type -import diffrax import jax import jax.numpy as jnp import optax diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 55e2a351d..c1bf870bd 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -1,15 +1,16 @@ -import diffrax +import jax import jax.numpy as jnp import optax from ott.neural.models.models import NeuralVectorField from ott.neural.solvers.genot import GENOT from ott.solvers.linear import sinkhorn +from ott.solvers.quadratic import gromov_wasserstein class TestGENOT: - def test_genot_linear(self, genot_data_loader_linear): + def test_genot_linear_unconditional(self, genot_data_loader_linear): neural_vf = NeuralVectorField( output_dim=2, condition_dim=0, @@ -35,5 +36,67 @@ def test_genot_linear(self, genot_data_loader_linear): result_forward = genot.transport( source_lin, condition=condition, forward=True ) - assert isinstance(result_forward, diffrax.Solution) - assert jnp.sum(jnp.isnan(result_forward.y)) == 0 + assert isinstance(result_forward, jax.Array) + assert jnp.sum(jnp.isnan(result_forward)) == 0 + + def test_genot_quad_unconditional(self, genot_data_loader_quad): + neural_vf = NeuralVectorField( + output_dim=2, + condition_dim=0, + latent_embed_dim=5, + ) + ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + optimizer = optax.adam(learning_rate=1e-3) + genot = GENOT( + neural_vf, + input_dim=1, + output_dim=2, + cond_dim=0, + epsilon=None, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + optimizer=optimizer + ) + genot(genot_data_loader_quad, genot_data_loader_quad) + + source_lin, source_quad, target_lin, target_quad, condition = next( + genot_data_loader_quad + ) + result_forward = genot.transport( + source_quad, condition=condition, forward=True + ) + assert isinstance(result_forward, jax.Array) + assert jnp.sum(jnp.isnan(result_forward)) == 0 + + def test_genot_linear_conditional(self, genot_data_loader_linear_conditional): + neural_vf = NeuralVectorField( + output_dim=2, + condition_dim=4, + latent_embed_dim=5, + ) + ot_solver = sinkhorn.Sinkhorn() + optimizer = optax.adam(learning_rate=1e-3) + genot = GENOT( + neural_vf, + input_dim=2, + output_dim=2, + cond_dim=4, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + optimizer=optimizer + ) + genot( + genot_data_loader_linear_conditional, + genot_data_loader_linear_conditional + ) + + source_lin, source_quad, target_lin, target_quad, condition = next( + genot_data_loader_linear_conditional + ) + result_forward = genot.transport( + source_lin, condition=condition, forward=True + ) + assert isinstance(result_forward, jax.Array) + assert jnp.sum(jnp.isnan(result_forward)) == 0 From 542f5122ebdc3cf5d7ff0eefa0ed03d86246691e Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 24 Nov 2023 15:37:06 +0100 Subject: [PATCH 017/186] [ci skip] add more tests genot --- src/ott/neural/solvers/flow_matching.py | 34 +---- src/ott/neural/solvers/genot.py | 43 +++--- tests/neural/conftest.py | 24 +++ tests/neural/genot_test.py | 190 +++++++++++++++++++++--- 4 files changed, 219 insertions(+), 72 deletions(-) diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/flow_matching.py index 909c54207..a80f4cc49 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/flow_matching.py @@ -194,39 +194,7 @@ def solve_ode(input: jax.Array, cond: jax.Array): **diffeqsolve_kwargs, ).ys[0] - out = jax.vmap(solve_ode)(data, condition) - return out - - def _transport( - self, - data: jnp.array, - condition: Optional[jax.Array], - forward: bool = True, - diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) - ) -> diffrax.Solution: - diffeqsolve_kwargs = dict(diffeqsolve_kwargs) - arr = jnp.ones((len(data), 1)) - t0, t1 = (arr * 0.0, arr * 1.0) if forward else (arr * 1.0, arr * 0.0) - apply_fn_partial = functools.partial( - self.state_neural_vector_field.apply_fn, - params={"params": self.state_neural_vector_field.params}, - condition=condition - ) - term = diffrax.ODETerm(lambda t, y, *args: apply_fn_partial(t, y, *args)) - solver = diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()) - stepsize_controller = diffeqsolve_kwargs.pop( - "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) - ) - return diffrax.diffeqsolve( - term, - solver, - t0=t0, - t1=t1, - dt0=diffeqsolve_kwargs.pop("dt0", None), - y0=data, - stepsize_controller=stepsize_controller, - **diffeqsolve_kwargs, - ) + return jax.vmap(solve_ode)(data, condition) def _valid_step(self, valid_loader, iter) -> None: next(valid_loader) diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index b72d84c48..1d9a9fc72 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -208,7 +208,7 @@ def setup(self) -> None: Keyword arguments for the setup function """ self.state_neural_vector_field = self.neural_vector_field.create_train_state( - self.rng, self.optimizer, self.input_dim + self.cond_dim + self.rng, self.optimizer, self.output_dim ) self.step_fn = self._get_step_fn() if self.solver_latent_to_data is not None: @@ -245,13 +245,16 @@ def __call__(self, train_loader, valid_loader) -> None: self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn = jax.random.split( self.rng, 6 ) - batch_size = len(batch["source"] - ) if batch["source"] is not None else len(batch["source_q"]) + batch_size = len(batch["source"]) if batch["source"] is not None else len( + batch["source_q"] + ) n_samples = batch_size * self.k_noise_per_x batch["time"] = self.sample_t(rng_time, n_samples) batch["noise"] = self.sample_noise(rng_noise, n_samples) batch["latent"] = self.latent_noise_fn( - rng_noise, shape=(batch_size, self.k_noise_per_x) + rng_noise, + shape=(batch_size, self.k_noise_per_x) if self.k_noise_per_x > 1 else + (batch_size,) ) tmat = self.match_fn( @@ -268,9 +271,12 @@ def __call__(self, train_loader, valid_loader) -> None: source_is_balanced=(self.tau_a == 1.0) ) rng_latent = jax.random.split(rng_noise, batch_size * self.k_noise_per_x) - + if self.solver_latent_to_data is not None: - target = jnp.concatenate([batch[el] for el in ["target", "target_q"] if batch[el] is not None], axis=1) + target = jnp.concatenate([ + batch[el] for el in ["target", "target_q"] if batch[el] is not None + ], + axis=1) tmats_latent_data = jnp.array( jax.vmap(self.match_latent_to_data_fn, 0, 0)(key=rng_latent, x=batch["latent"], y=target) @@ -293,10 +299,9 @@ def __call__(self, train_loader, valid_loader) -> None: # (batch["source"], batch["source_q"], batch["condition"]), # (batch["target"], batch["target_q"]) #) - batch = { key: - jnp.reshape(arr, (batch_size*self.k_noise_per_x, + jnp.reshape(arr, (batch_size * self.k_noise_per_x, -1)) if arr is not None else None for key, arr in batch.items() } @@ -333,7 +338,10 @@ def loss_fn( params: jax.Array, batch: Dict[str, jnp.array], keys_model: random.PRNGKeyArray ): - target = jnp.concatenate([batch[el] for el in ["target", "target_q"] if batch[el] is not None], axis=1) + target = jnp.concatenate([ + batch[el] for el in ["target", "target_q"] if batch[el] is not None + ], + axis=1) x_t = self.flow.compute_xt( batch["noise"], batch["time"], batch["latent"], target ) @@ -341,14 +349,16 @@ def loss_fn( state_neural_vector_field.apply_fn, {"params": params} ) - cond_input = jnp.concatenate([batch[el] for el in ["source", "source_q", "condition"] if batch[el] is not None], axis=1) - + cond_input = jnp.concatenate([ + batch[el] + for el in ["source", "source_q", "condition"] + if batch[el] is not None + ], + axis=1) v_t = jax.vmap(apply_fn)( t=batch["time"], x=x_t, condition=cond_input, keys_model=keys_model ) - u_t = self.flow.compute_ut( - batch["time"], batch["latent"], target - ) + u_t = self.flow.compute_ut(batch["time"], batch["latent"], target) return jnp.mean((v_t - u_t) ** 2) keys_model = random.split(key, len(batch["noise"])) @@ -416,8 +426,7 @@ def solve_ode(input: jax.Array, cond: jax.Array): **diffeqsolve_kwargs, ).ys[0] - out = jax.vmap(solve_ode)(latent_batch, cond_input) - return out + return jax.vmap(solve_ode)(latent_batch, cond_input) def _valid_step(self, valid_loader, iter) -> None: next(valid_loader) @@ -445,4 +454,4 @@ def sample_t( #TODO: make more general def sample_noise( #TODO: make more general self, key: random.PRNGKey, batch_size: int ) -> jnp.ndarray: #TODO: make more general - return random.normal(key, shape=(batch_size, self.input_dim)) + return random.normal(key, shape=(batch_size, self.output_dim)) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index c9b226ce6..2dc9f1e43 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -103,6 +103,7 @@ def __init__( if source_lin is not None: if source_quad is not None: assert len(source_lin) == len(source_quad) + self.n_source = len(source_lin) else: self.n_source = len(source_lin) else: @@ -112,6 +113,7 @@ def __init__( if target_lin is not None: if target_quad is not None: assert len(target_lin) == len(target_quad) + self.n_target = len(target_lin) else: self.n_target = len(target_lin) else: @@ -166,6 +168,15 @@ def genot_data_loader_quad(): return GENOTDataLoader(None, source, None, target, None, 16) +@pytest.fixture(scope="module") +def genot_data_loader_quad_conditional(): + """Returns a data loader for a simple Gaussian mixture.""" + source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + 1.0 + conditions = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 7)) + return GENOTDataLoader(None, source, None, target, conditions, 16) + + @pytest.fixture(scope="module") def genot_data_loader_fused(): """Returns a data loader for a simple Gaussian mixture.""" @@ -174,3 +185,16 @@ def genot_data_loader_fused(): source_lin = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) target_lin = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 return GENOTDataLoader(source_lin, source_q, target_lin, target_q, None, 16) + + +@pytest.fixture(scope="module") +def genot_data_loader_fused_conditional(): + """Returns a data loader for a simple Gaussian mixture.""" + source_q = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target_q = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + 1.0 + source_lin = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + target_lin = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 + conditions = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + return GENOTDataLoader( + source_lin, source_q, target_lin, target_q, conditions, 16 + ) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index c1bf870bd..5f59db542 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -1,6 +1,9 @@ +from typing import Iterator + import jax import jax.numpy as jnp import optax +import pytest from ott.neural.models.models import NeuralVectorField from ott.neural.solvers.genot import GENOT @@ -9,24 +12,36 @@ class TestGENOT: + #TODO: add tests for unbalancedness + + @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + def test_genot_linear_unconditional( + self, genot_data_loader_linear: Iterator, k_noise_per_x: int + ): + source_lin, source_quad, target_lin, target_quad, condition = next( + genot_data_loader_linear + ) + source_dim = source_lin.shape[1] + target_dim = target_lin.shape[1] + condition_dim = 0 - def test_genot_linear_unconditional(self, genot_data_loader_linear): neural_vf = NeuralVectorField( - output_dim=2, - condition_dim=0, + output_dim=target_dim, + condition_dim=condition_dim, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, - input_dim=2, - output_dim=2, - cond_dim=0, + input_dim=source_dim, + output_dim=target_dim, + cond_dim=condition_dim, iterations=3, valid_freq=2, ot_solver=ot_solver, - optimizer=optimizer + optimizer=optimizer, + k_noise_per_x=k_noise_per_x, ) genot(genot_data_loader_linear, genot_data_loader_linear) @@ -39,53 +54,109 @@ def test_genot_linear_unconditional(self, genot_data_loader_linear): assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 - def test_genot_quad_unconditional(self, genot_data_loader_quad): + @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + def test_genot_quad_unconditional( + self, genot_data_loader_quad: Iterator, k_noise_per_x: int + ): + source_lin, source_quad, target_lin, target_quad, condition = next( + genot_data_loader_quad + ) + source_dim = source_quad.shape[1] + target_dim = target_quad.shape[1] + condition_dim = 0 neural_vf = NeuralVectorField( - output_dim=2, - condition_dim=0, + output_dim=target_dim, + condition_dim=condition_dim, latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, - input_dim=1, - output_dim=2, - cond_dim=0, + input_dim=source_dim, + output_dim=target_dim, + cond_dim=condition_dim, epsilon=None, iterations=3, valid_freq=2, ot_solver=ot_solver, - optimizer=optimizer + optimizer=optimizer, + k_noise_per_x=k_noise_per_x, ) genot(genot_data_loader_quad, genot_data_loader_quad) + result_forward = genot.transport( + source_quad, condition=condition, forward=True + ) + assert isinstance(result_forward, jax.Array) + assert jnp.sum(jnp.isnan(result_forward)) == 0 + + @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + def test_genot_fused_unconditional( + self, genot_data_loader_fused: Iterator, k_noise_per_x: int + ): source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_quad + genot_data_loader_fused + ) + source_dim = source_lin.shape[1] + source_quad.shape[1] + target_dim = target_lin.shape[1] + target_quad.shape[1] + condition_dim = 0 + neural_vf = NeuralVectorField( + output_dim=target_dim, + condition_dim=condition_dim, + latent_embed_dim=5, + ) + ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + optimizer = optax.adam(learning_rate=1e-3) + genot = GENOT( + neural_vf, + input_dim=source_dim, + output_dim=target_dim, + cond_dim=condition_dim, + epsilon=None, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + optimizer=optimizer, + fused_penalty=0.5, + k_noise_per_x=k_noise_per_x, ) + genot(genot_data_loader_fused, genot_data_loader_fused) + result_forward = genot.transport( source_quad, condition=condition, forward=True ) assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 - def test_genot_linear_conditional(self, genot_data_loader_linear_conditional): + @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + def test_genot_linear_conditional( + self, genot_data_loader_linear_conditional: Iterator, k_noise_per_x: int + ): + source_lin, source_quad, target_lin, target_quad, condition = next( + genot_data_loader_linear_conditional + ) + source_dim = source_lin.shape[1] + target_dim = target_lin.shape[1] + condition_dim = condition.shape[1] + neural_vf = NeuralVectorField( - output_dim=2, - condition_dim=4, + output_dim=target_dim, + condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, - input_dim=2, - output_dim=2, - cond_dim=4, + input_dim=source_dim, + output_dim=target_dim, + cond_dim=condition_dim, iterations=3, valid_freq=2, ot_solver=ot_solver, - optimizer=optimizer + optimizer=optimizer, + k_noise_per_x=k_noise_per_x, ) genot( genot_data_loader_linear_conditional, @@ -100,3 +171,78 @@ def test_genot_linear_conditional(self, genot_data_loader_linear_conditional): ) assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 + + @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + def test_genot_quad_conditional( + self, genot_data_loader_quad: Iterator, k_noise_per_x: int + ): + source_lin, source_quad, target_lin, target_quad, condition = next( + genot_data_loader_quad + ) + source_dim = source_quad.shape[1] + target_dim = target_quad.shape[1] + condition_dim = condition.shape[1] + neural_vf = NeuralVectorField( + output_dim=target_dim, + condition_dim=condition_dim, + latent_embed_dim=5, + ) + ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + optimizer = optax.adam(learning_rate=1e-3) + genot = GENOT( + neural_vf, + input_dim=source_dim, + output_dim=target_dim, + cond_dim=condition_dim, + epsilon=None, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + optimizer=optimizer, + k_noise_per_x=k_noise_per_x, + ) + genot(genot_data_loader_quad, genot_data_loader_quad) + + result_forward = genot.transport( + source_quad, condition=condition, forward=True + ) + assert isinstance(result_forward, jax.Array) + assert jnp.sum(jnp.isnan(result_forward)) == 0 + + @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + def test_genot_fused_conditional( + self, genot_data_loader_fused: Iterator, k_noise_per_x: int + ): + source_lin, source_quad, target_lin, target_quad, condition = next( + genot_data_loader_fused + ) + source_dim = source_lin.shape[1] + source_quad.shape[1] + target_dim = target_lin.shape[1] + target_quad.shape[1] + condition_dim = condition.shape[1] + neural_vf = NeuralVectorField( + output_dim=target_dim, + condition_dim=condition_dim, + latent_embed_dim=5, + ) + ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + optimizer = optax.adam(learning_rate=1e-3) + genot = GENOT( + neural_vf, + input_dim=source_dim, + output_dim=target_dim, + cond_dim=condition_dim, + epsilon=None, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + optimizer=optimizer, + fused_penalty=0.5, + k_noise_per_x=k_noise_per_x, + ) + genot(genot_data_loader_fused, genot_data_loader_fused) + + result_forward = genot.transport( + source_quad, condition=condition, forward=True + ) + assert isinstance(result_forward, jax.Array) + assert jnp.sum(jnp.isnan(result_forward)) == 0 From c067f45b04c18dc7f265aa6a42cd645a6eca761f Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 26 Nov 2023 13:26:12 +0100 Subject: [PATCH 018/186] [ci skip] add TimeSampler --- src/ott/neural/data/dataloaders.py | 13 +++++ src/ott/neural/solvers/base_solver.py | 13 +++++ src/ott/neural/solvers/flows.py | 48 +++++++++++++++++ src/ott/neural/solvers/genot.py | 53 ++++++++++++------- .../solvers/{flow_matching.py => otfm.py} | 28 ++++++---- tests/neural/flow_matching_test.py | 37 ++++++++++--- tests/neural/genot_test.py | 26 +++++++++ 7 files changed, 182 insertions(+), 36 deletions(-) rename src/ott/neural/solvers/{flow_matching.py => otfm.py} (89%) diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index fe0c367b7..acceb36c1 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -1,3 +1,16 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. #import tensorflow as tf diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 9d323b13c..66d3ecbef 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -1,3 +1,16 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. from abc import ABC, abstractmethod from pathlib import Path from types import MappingProxyType diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py index 68cc84f5f..1eba46982 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/solvers/flows.py @@ -1,3 +1,16 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import abc import jax @@ -54,3 +67,38 @@ class BrownianNoiseFlow(StraightFlow): def compute_sigma_t(self, t: jax.Array): return jnp.sqrt(self.sigma * t * (1 - t)) + + +class BaseTimeSampler(abc.ABC): + + @abc.abstractmethod + def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + pass + + +class UniformSampler(BaseTimeSampler): + + def __init__(self, low: float = 0.0, high: float = 1.0) -> None: + self.low = low + self.high = high + + def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + return jax.random.uniform( + rng, (num_samples, 1), minval=self.low, maxval=self.high + ) + + +class OffsetUniformSampler(BaseTimeSampler): + + def __init__( + self, offset: float, low: float = 0.0, high: float = 1.0 + ) -> None: + self.offset = offset + self.low = low + self.high = high + + def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + return ( + jax.random.uniform(rng, (1, 1), minval=self.low, maxval=self.high) + + jnp.arange(num_samples)[:, None] / num_samples + ) % ((self.high - self.low) - self.offset) diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 1d9a9fc72..f120ea6d8 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -1,15 +1,28 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import functools import types from typing import ( - Any, - Callable, - Dict, - Literal, - Mapping, - Optional, - Tuple, - Type, - Union, + Any, + Callable, + Dict, + Literal, + Mapping, + Optional, + Tuple, + Type, + Union, ) import diffrax @@ -24,11 +37,16 @@ from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, +) +from ott.neural.solvers.flows import ( + BaseFlow, + BaseTimeSampler, + ConstantNoiseFlow, + UniformSampler, ) -from ott.neural.solvers.flows import BaseFlow, ConstantNoiseFlow from ott.solvers import was_solver from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein @@ -53,6 +71,7 @@ def __init__( optimizer: Type[optax.GradientTransformation], checkpoint_manager: Type[checkpoint.CheckpointManager] = None, flow: Type[BaseFlow] = ConstantNoiseFlow(0.0), + time_sampler: Type[BaseTimeSampler] = UniformSampler(), k_noise_per_x: int = 1, t_offset: float = 1e-5, epsilon: float = 1e-2, @@ -166,6 +185,7 @@ def __init__( self.neural_vector_field = neural_vector_field self.state_neural_vector_field: Optional[TrainState] = None self.flow = flow + self.time_sampler = time_sampler self.optimizer = optimizer self.checkpoint_manager = checkpoint_manager self.latent_noise_fn = jax.tree_util.Partial( @@ -249,7 +269,7 @@ def __call__(self, train_loader, valid_loader) -> None: batch["source_q"] ) n_samples = batch_size * self.k_noise_per_x - batch["time"] = self.sample_t(rng_time, n_samples) + batch["time"] = self.time_sampler(rng_time, n_samples) batch["noise"] = self.sample_noise(rng_noise, n_samples) batch["latent"] = self.latent_noise_fn( rng_noise, @@ -446,11 +466,6 @@ def load(self, path: str) -> "GENOT": def training_logs(self) -> Dict[str, Any]: raise NotImplementedError - def sample_t( #TODO: make more general - self, key: random.PRNGKey, batch_size: int - ) -> jnp.ndarray: #TODO: make more general - return random.uniform(key, [batch_size, 1]) - def sample_noise( #TODO: make more general self, key: random.PRNGKey, batch_size: int ) -> jnp.ndarray: #TODO: make more general diff --git a/src/ott/neural/solvers/flow_matching.py b/src/ott/neural/solvers/otfm.py similarity index 89% rename from src/ott/neural/solvers/flow_matching.py rename to src/ott/neural/solvers/otfm.py index a80f4cc49..ec0be23da 100644 --- a/src/ott/neural/solvers/flow_matching.py +++ b/src/ott/neural/solvers/otfm.py @@ -1,3 +1,16 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import functools import types from typing import Any, Callable, Dict, Mapping, Optional, Tuple, Type @@ -19,11 +32,12 @@ ) from ott.neural.solvers.flows import ( BaseFlow, + BaseTimeSampler, ) from ott.solvers import was_solver -class FlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): +class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): def __init__( self, @@ -34,6 +48,7 @@ def __init__( valid_freq: int, ot_solver: Optional[Type[was_solver.WassersteinSolver]], flow: Type[BaseFlow], + time_sampler: Type[BaseTimeSampler], optimizer: Type[optax.GradientTransformation], checkpoint_manager: Type[checkpoint.CheckpointManager] = None, epsilon: float = 1e-2, @@ -46,7 +61,6 @@ def __init__( callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, rng: random.PRNGKeyArray = random.PRNGKey(0), - **kwargs: Any, ) -> None: BaseNeuralSolver.__init__( self, iterations=iterations, valid_freq=valid_freq @@ -68,6 +82,7 @@ def __init__( self.input_dim = input_dim self.ot_solver = ot_solver self.flow = flow + self.time_sampler = time_sampler self.optimizer = optimizer self.epsilon = epsilon self.cost_fn = cost_fn @@ -121,7 +136,7 @@ def loss_fn( batch_size = len(batch["source"]) key_noise, key_t, key_model = random.split(key, 3) keys_model = random.split(key_model, batch_size) - t = self.sample_t(key_t, batch_size) + t = self.time_sampler(key_t, batch_size) noise = self.sample_noise(key_noise, batch_size) grad_fn = jax.value_and_grad(loss_fn) loss, grads = grad_fn( @@ -207,17 +222,12 @@ def learn_rescaling(self) -> bool: def save(self, path: str) -> None: raise NotImplementedError - def load(self, path: str) -> "FlowMatching": + def load(self, path: str) -> "OTFlowMatching": raise NotImplementedError def training_logs(self) -> Dict[str, Any]: raise NotImplementedError - def sample_t( #TODO: make more general - self, key: random.PRNGKey, batch_size: int - ) -> jnp.ndarray: #TODO: make more general - return random.uniform(key, [batch_size, 1]) - def sample_noise( #TODO: make more general self, key: random.PRNGKey, batch_size: int ) -> jnp.ndarray: #TODO: make more general diff --git a/tests/neural/flow_matching_test.py b/tests/neural/flow_matching_test.py index 858c90084..9529e8a62 100644 --- a/tests/neural/flow_matching_test.py +++ b/tests/neural/flow_matching_test.py @@ -1,3 +1,16 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. from typing import Type import jax @@ -6,16 +19,18 @@ import pytest from ott.neural.models.models import NeuralVectorField -from ott.neural.solvers.flow_matching import FlowMatching from ott.neural.solvers.flows import ( - BaseFlow, - BrownianNoiseFlow, - ConstantNoiseFlow, + BaseFlow, + BrownianNoiseFlow, + ConstantNoiseFlow, + OffsetUniformSampler, + UniformSampler, ) +from ott.neural.solvers.otfm import OTFlowMatching from ott.solvers.linear import sinkhorn -class TestFlowMatching: +class TestOTFlowMatching: @pytest.mark.parametrize( "flow", @@ -30,8 +45,9 @@ def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() + time_sampler = UniformSampler() optimizer = optax.adam(learning_rate=1e-3) - fm = FlowMatching( + fm = OTFlowMatching( neural_vf, input_dim=2, cond_dim=0, @@ -39,6 +55,7 @@ def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): valid_freq=2, ot_solver=ot_solver, flow=flow, + time_sampler=time_sampler, optimizer=optimizer ) fm(data_loader_gaussian, data_loader_gaussian) @@ -67,8 +84,9 @@ def test_flow_matching_with_conditions( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() + time_sampler = OffsetUniformSampler(1e-6) optimizer = optax.adam(learning_rate=1e-3) - fm = FlowMatching( + fm = OTFlowMatching( neural_vf, input_dim=2, cond_dim=1, @@ -76,6 +94,7 @@ def test_flow_matching_with_conditions( valid_freq=2, ot_solver=ot_solver, flow=flow, + time_sampler=time_sampler, optimizer=optimizer ) fm( @@ -107,8 +126,9 @@ def test_flow_matching_conditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() + time_sampler = UniformSampler() optimizer = optax.adam(learning_rate=1e-3) - fm = FlowMatching( + fm = OTFlowMatching( neural_vf, input_dim=2, cond_dim=0, @@ -116,6 +136,7 @@ def test_flow_matching_conditional( valid_freq=2, ot_solver=ot_solver, flow=flow, + time_sampler=time_sampler, optimizer=optimizer ) fm(data_loader_gaussian_conditional, data_loader_gaussian_conditional) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 5f59db542..183af8419 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -1,3 +1,16 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. from typing import Iterator import jax @@ -6,6 +19,7 @@ import pytest from ott.neural.models.models import NeuralVectorField +from ott.neural.solvers.flows import OffsetUniformSampler, UniformSampler from ott.neural.solvers.genot import GENOT from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein @@ -31,6 +45,7 @@ def test_genot_linear_unconditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() + time_sampler = UniformSampler() optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -40,6 +55,7 @@ def test_genot_linear_unconditional( iterations=3, valid_freq=2, ot_solver=ot_solver, + time_sampler=time_sampler, optimizer=optimizer, k_noise_per_x=k_noise_per_x, ) @@ -70,6 +86,7 @@ def test_genot_quad_unconditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + time_sampler = OffsetUniformSampler(1e-3) optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -80,6 +97,7 @@ def test_genot_quad_unconditional( iterations=3, valid_freq=2, ot_solver=ot_solver, + time_sampler=time_sampler, optimizer=optimizer, k_noise_per_x=k_noise_per_x, ) @@ -107,6 +125,7 @@ def test_genot_fused_unconditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + time_sampler = UniformSampler() optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -117,6 +136,7 @@ def test_genot_fused_unconditional( iterations=3, valid_freq=2, ot_solver=ot_solver, + time_sampler=time_sampler, optimizer=optimizer, fused_penalty=0.5, k_noise_per_x=k_noise_per_x, @@ -146,6 +166,7 @@ def test_genot_linear_conditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() + time_sampler = UniformSampler() optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -155,6 +176,7 @@ def test_genot_linear_conditional( iterations=3, valid_freq=2, ot_solver=ot_solver, + time_sampler=time_sampler, optimizer=optimizer, k_noise_per_x=k_noise_per_x, ) @@ -188,6 +210,7 @@ def test_genot_quad_conditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + time_sampler = UniformSampler() optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -198,6 +221,7 @@ def test_genot_quad_conditional( iterations=3, valid_freq=2, ot_solver=ot_solver, + time_sampler=time_sampler, optimizer=optimizer, k_noise_per_x=k_noise_per_x, ) @@ -225,6 +249,7 @@ def test_genot_fused_conditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + time_sampler = UniformSampler() optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -235,6 +260,7 @@ def test_genot_fused_conditional( iterations=3, valid_freq=2, ot_solver=ot_solver, + time_sampler=time_sampler, optimizer=optimizer, fused_penalty=0.5, k_noise_per_x=k_noise_per_x, From 2546afc4e95b5ab73c719f5a1b6b3fde52ea23ca Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 26 Nov 2023 14:18:21 +0100 Subject: [PATCH 019/186] [ci skip] add docs for TimeSampler and Flow --- src/ott/neural/solvers/flows.py | 112 ++++++++++++++++++++++++++++- src/ott/neural/solvers/genot.py | 32 ++++----- tests/neural/flow_matching_test.py | 10 +-- 3 files changed, 132 insertions(+), 22 deletions(-) diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py index 1eba46982..19c3d2f67 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/solvers/flows.py @@ -18,77 +18,178 @@ class BaseFlow(abc.ABC): + """Base class for all flows. + + Args: + sigma: Constant noise used for computing time-dependent noise schedule. + """ def __init__(self, sigma: float) -> None: self.sigma = sigma @abc.abstractmethod def compute_mu_t(self, t: jax.Array, x_0: jax.Array, x_1: jax.Array): + """Compute the mean of the probablitiy path between :math:`x` and :math:`y` at time :math:`t`. + + Args: + t: Time :math:`t`. + x_0: Sample from the source distribution. + x_1: Sample from the target distribution. + """ pass @abc.abstractmethod def compute_sigma_t(self, t: jax.Array): + """Compute the standard deviation of the probablity path at time :math:`t`. + + Args: + t: Time :math:`t`. + """ pass @abc.abstractmethod def compute_ut( self, t: jax.Array, x_0: jax.Array, x_1: jax.Array ) -> jax.Array: + """Evaluate the conditional vector field defined between :math:`x_0` and :math:`x_1` at time :math:`t`. + + Args: + t: Time :math:`t`. + x_0: Sample from the source distribution. + x_1: Sample from the target distribution. + """ pass def compute_xt( self, noise: jax.Array, t: jax.Array, x_0: jax.Array, x_1: jax.Array ) -> jax.Array: + """Sample from the probability path between :math:`x_0` and :math:`x_1` at time :math:`t`. + + Args: + noise: Noise sampled from a standard normal distribution. + t: Time :math:`t`. + x_0: Sample from the source distribution. + x_1: Sample from the target distribution. + + Returns: + Samples from the probability path between :math:`x_0` and :math:`x_1` at time :math:`t`. + """ mu_t = self.compute_mu_t(t, x_0, x_1) sigma_t = self.compute_sigma_t(t) return mu_t + sigma_t * noise -class StraightFlow(BaseFlow): +class StraightFlow(BaseFlow, abc.ABC): + """Base class for flows with straight paths.""" def compute_mu_t( self, t: jax.Array, x_0: jax.Array, x_1: jax.Array ) -> jax.Array: + """Compute the mean of the probablitiy path between :math:`x` and :math:`y` at time :math:`t`. + + Args: + t: Time :math:`t`. + x_0: Sample from the source distribution. + x_1: Sample from the target distribution. + """ return t * x_0 + (1 - t) * x_1 def compute_ut( self, t: jax.Array, x_0: jax.Array, x_1: jax.Array ) -> jax.Array: + """Evaluate the conditional vector field defined between :math:`x_0` and :math:`x_1` at time :math:`t`. + + Args: + t: Time :math:`t`. + x_0: Sample from the source distribution. + x_1: Sample from the target distribution. + + Returns: + Conditional vector field evaluated at time :math:`t`. + """ return x_1 - x_0 class ConstantNoiseFlow(StraightFlow): + r"""Flow with straight paths and constant flow noise :math:`\sigma`.""" def compute_sigma_t(self, t: jax.Array): + r"""Compute noise of the flow at time :math:`t`. + + Args: + t: Time :math:`t`. + + Returns: + Constant, time-independent standard deviation :math:`\sigma`. + """ return self.sigma class BrownianNoiseFlow(StraightFlow): + r"""Sampler for sampling noise implicitly defined by a Schroedinger Bridge problem with parameter `\sigma` such that :math:`\sigma_t = \sigma * \sqrt(t * (1-t))`.""" def compute_sigma_t(self, t: jax.Array): + """Compute the standard deviation of the probablity path at time :math:`t`. + + Args: + t: Time :math:`t`. + + Returns: + Standard deviation of the probablity path at time :math:`t`. + """ return jnp.sqrt(self.sigma * t * (1 - t)) class BaseTimeSampler(abc.ABC): + """Base class for time samplers.""" @abc.abstractmethod def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + """Generate `num_samples` samples of the time `math`:t:. + + Args: + rng: Random number generator. + num_samples: Number of samples to generate. + + """ pass class UniformSampler(BaseTimeSampler): + """Sample :math:`t` from a uniform distribution :math:`[low, high]`. + + Args: + low: Lower bound of the uniform distribution. + high: Upper bound of the uniform distribution. + """ def __init__(self, low: float = 0.0, high: float = 1.0) -> None: self.low = low self.high = high def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + """Generate `num_samples` samples of the time `math`:t:. + + Args: + rng: Random number generator. + num_samples: Number of samples to generate. + + Returns: + `num_samples` samples of the time :math:`t``. + """ return jax.random.uniform( rng, (num_samples, 1), minval=self.low, maxval=self.high ) class OffsetUniformSampler(BaseTimeSampler): + """Sample :math:`t` from a uniform distribution :math:`[low, high]` with offset `offset`. + + Args: + offset: Offset of the uniform distribution. + low: Lower bound of the uniform distribution. + high: Upper bound of the uniform distribution. + """ def __init__( self, offset: float, low: float = 0.0, high: float = 1.0 @@ -98,6 +199,15 @@ def __init__( self.high = high def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + """Generate `num_samples` samples of the time `math`:t:. + + Args: + rng: Random number generator. + num_samples: Number of samples to generate. + + Returns: + An array with `num_samples` samples of the time `math`:t:. + """ return ( jax.random.uniform(rng, (1, 1), minval=self.low, maxval=self.high) + jnp.arange(num_samples)[:, None] / num_samples diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index f120ea6d8..3d6b3fafb 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -14,15 +14,15 @@ import functools import types from typing import ( - Any, - Callable, - Dict, - Literal, - Mapping, - Optional, - Tuple, - Type, - Union, + Any, + Callable, + Dict, + Literal, + Mapping, + Optional, + Tuple, + Type, + Union, ) import diffrax @@ -37,15 +37,15 @@ from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, ) from ott.neural.solvers.flows import ( - BaseFlow, - BaseTimeSampler, - ConstantNoiseFlow, - UniformSampler, + BaseFlow, + BaseTimeSampler, + ConstantNoiseFlow, + UniformSampler, ) from ott.solvers import was_solver from ott.solvers.linear import sinkhorn diff --git a/tests/neural/flow_matching_test.py b/tests/neural/flow_matching_test.py index 9529e8a62..a1135cf2d 100644 --- a/tests/neural/flow_matching_test.py +++ b/tests/neural/flow_matching_test.py @@ -20,11 +20,11 @@ from ott.neural.models.models import NeuralVectorField from ott.neural.solvers.flows import ( - BaseFlow, - BrownianNoiseFlow, - ConstantNoiseFlow, - OffsetUniformSampler, - UniformSampler, + BaseFlow, + BrownianNoiseFlow, + ConstantNoiseFlow, + OffsetUniformSampler, + UniformSampler, ) from ott.neural.solvers.otfm import OTFlowMatching from ott.solvers.linear import sinkhorn From 579852f93e6a5c5be9c6a632a8c005e62fb6dc8e Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 26 Nov 2023 15:56:46 +0100 Subject: [PATCH 020/186] [ci skip] add docs for OTFlowMatching and replace jnp.ndarray by jax.Array --- docs/tutorials/Hessians.ipynb | 2 +- docs/tutorials/Monge_Gap.ipynb | 6 +- docs/tutorials/One_Sinkhorn.ipynb | 4 +- .../tutorials/basic_ot_between_datasets.ipynb | 2 +- docs/tutorials/point_clouds.ipynb | 4 +- .../sinkhorn_divergence_gradient_flow.ipynb | 4 +- .../sparse_monge_displacements.ipynb | 2 +- src/ott/datasets.py | 4 +- src/ott/geometry/costs.py | 166 ++++++++-------- src/ott/geometry/geometry.py | 186 +++++++++--------- src/ott/geometry/graph.py | 44 ++--- src/ott/geometry/grid.py | 40 ++-- src/ott/geometry/low_rank.py | 56 +++--- src/ott/geometry/pointcloud.py | 108 +++++----- src/ott/geometry/segment.py | 33 ++-- src/ott/initializers/linear/initializers.py | 42 ++-- .../initializers/linear/initializers_lr.py | 70 +++---- .../initializers/quadratic/initializers.py | 4 +- src/ott/math/fixed_point_loop.py | 2 +- src/ott/math/matrix_square_root.py | 51 +++-- src/ott/math/unbalanced_functions.py | 23 +-- src/ott/math/utils.py | 22 +-- src/ott/neural/data/dataloaders.py | 2 +- src/ott/neural/models/conjugate_solvers.py | 13 +- src/ott/neural/models/layers.py | 10 +- src/ott/neural/models/models.py | 38 ++-- src/ott/neural/solvers/base_solver.py | 68 ++++--- src/ott/neural/solvers/flows.py | 25 +-- src/ott/neural/solvers/genot.py | 10 +- src/ott/neural/solvers/losses.py | 8 +- src/ott/neural/solvers/map_estimator.py | 28 +-- src/ott/neural/solvers/neuraldual.py | 50 ++--- src/ott/neural/solvers/otfm.py | 100 ++++++++-- src/ott/problems/linear/barycenter_problem.py | 20 +- src/ott/problems/linear/linear_problem.py | 13 +- src/ott/problems/linear/potentials.py | 34 ++-- src/ott/problems/quadratic/gw_barycenter.py | 50 +++-- src/ott/problems/quadratic/quadratic_costs.py | 3 +- .../problems/quadratic/quadratic_problem.py | 30 +-- src/ott/solvers/linear/_solve.py | 6 +- src/ott/solvers/linear/acceleration.py | 8 +- .../solvers/linear/continuous_barycenter.py | 18 +- src/ott/solvers/linear/discrete_barycenter.py | 18 +- .../linear/implicit_differentiation.py | 31 ++- src/ott/solvers/linear/lineax_implicit.py | 4 +- src/ott/solvers/linear/lr_utils.py | 42 ++-- src/ott/solvers/linear/sinkhorn.py | 86 ++++---- src/ott/solvers/linear/sinkhorn_lr.py | 129 ++++++------ src/ott/solvers/linear/univariate.py | 14 +- src/ott/solvers/quadratic/_solve.py | 6 +- .../solvers/quadratic/gromov_wasserstein.py | 16 +- .../quadratic/gromov_wasserstein_lr.py | 133 +++++++------ src/ott/solvers/quadratic/gw_barycenter.py | 32 ++- src/ott/tools/gaussian_mixture/fit_gmm.py | 34 ++-- .../tools/gaussian_mixture/fit_gmm_pair.py | 26 +-- src/ott/tools/gaussian_mixture/gaussian.py | 30 +-- .../gaussian_mixture/gaussian_mixture.py | 39 ++-- .../gaussian_mixture/gaussian_mixture_pair.py | 6 +- src/ott/tools/gaussian_mixture/linalg.py | 30 ++- .../tools/gaussian_mixture/probabilities.py | 12 +- src/ott/tools/gaussian_mixture/scale_tril.py | 36 ++-- src/ott/tools/k_means.py | 58 +++--- src/ott/tools/plot.py | 6 +- src/ott/tools/segment_sinkhorn.py | 24 +-- src/ott/tools/sinkhorn_divergence.py | 44 ++--- src/ott/tools/soft_sort.py | 78 ++++---- src/ott/types.py | 8 +- tests/conftest.py | 3 +- tests/geometry/costs_test.py | 2 +- tests/geometry/graph_test.py | 12 +- tests/geometry/low_rank_test.py | 2 +- tests/geometry/scaling_cost_test.py | 6 +- .../initializers/linear/sinkhorn_init_test.py | 8 +- tests/math/matrix_square_root_test.py | 8 +- tests/neural/conftest.py | 2 +- tests/neural/map_estimator_test.py | 6 +- tests/neural/meta_initializer_test.py | 10 +- .../linear/continuous_barycenter_test.py | 6 +- tests/solvers/linear/sinkhorn_diff_test.py | 34 ++-- tests/solvers/linear/sinkhorn_misc_test.py | 8 +- tests/solvers/quadratic/fgw_test.py | 12 +- tests/solvers/quadratic/gw_barycenter_test.py | 6 +- tests/solvers/quadratic/gw_test.py | 7 +- tests/solvers/quadratic/lower_bound_test.py | 4 +- tests/tools/k_means_test.py | 16 +- tests/tools/sinkhorn_divergence_test.py | 2 +- tests/tools/soft_sort_test.py | 4 +- 87 files changed, 1271 insertions(+), 1238 deletions(-) diff --git a/docs/tutorials/Hessians.ipynb b/docs/tutorials/Hessians.ipynb index 0e50ec959..f7c8b56d1 100644 --- a/docs/tutorials/Hessians.ipynb +++ b/docs/tutorials/Hessians.ipynb @@ -103,7 +103,7 @@ }, "outputs": [], "source": [ - "def loss(a: jnp.ndarray, x: jnp.ndarray, implicit: bool = True) -> float:\n", + "def loss(a: jax.Array, x: jax.Array, implicit: bool = True) -> float:\n", " return sinkhorn_divergence.sinkhorn_divergence(\n", " pointcloud.PointCloud,\n", " x,\n", diff --git a/docs/tutorials/Monge_Gap.ipynb b/docs/tutorials/Monge_Gap.ipynb index a1622a8c5..ac38d89b4 100644 --- a/docs/tutorials/Monge_Gap.ipynb +++ b/docs/tutorials/Monge_Gap.ipynb @@ -94,13 +94,13 @@ "\n", " name: Literal[\"moon\", \"s_curve\"]\n", " theta_rotation: float = 0.0\n", - " mean: Optional[jnp.ndarray] = None\n", + " mean: Optional[jax.Array] = None\n", " noise: float = 0.01\n", " scale: float = 1.0\n", " batch_size: int = 1024\n", " rng: Optional[jax.Array] = (None,)\n", "\n", - " def __iter__(self) -> Iterator[jnp.ndarray]:\n", + " def __iter__(self) -> Iterator[jax.Array]:\n", " \"\"\"Random sample generator from Gaussian mixture.\n", "\n", " Returns:\n", @@ -108,7 +108,7 @@ " \"\"\"\n", " return self._create_sample_generators()\n", "\n", - " def _create_sample_generators(self) -> Iterator[jnp.ndarray]:\n", + " def _create_sample_generators(self) -> Iterator[jax.Array]:\n", " rng = jax.random.PRNGKey(0) if self.rng is None else self.rng\n", "\n", " # define rotation matrix tp rotate samples\n", diff --git a/docs/tutorials/One_Sinkhorn.ipynb b/docs/tutorials/One_Sinkhorn.ipynb index 8c3d98e2e..9465441d8 100644 --- a/docs/tutorials/One_Sinkhorn.ipynb +++ b/docs/tutorials/One_Sinkhorn.ipynb @@ -555,9 +555,7 @@ }, "outputs": [], "source": [ - "def my_sinkhorn(\n", - " geom: geometry.Geometry, a: jnp.ndarray, b: jnp.ndarray, **kwargs\n", - "):\n", + "def my_sinkhorn(geom: geometry.Geometry, a: jax.Array, b: jax.Array, **kwargs):\n", " return linear.solve(\n", " geom, a, b, inner_iterations=1, max_iterations=10_000, **kwargs\n", " )" diff --git a/docs/tutorials/basic_ot_between_datasets.ipynb b/docs/tutorials/basic_ot_between_datasets.ipynb index 3cc61d403..b3c452d36 100644 --- a/docs/tutorials/basic_ot_between_datasets.ipynb +++ b/docs/tutorials/basic_ot_between_datasets.ipynb @@ -260,7 +260,7 @@ "metadata": {}, "outputs": [], "source": [ - "def reg_ot_cost(x: jnp.ndarray, y: jnp.ndarray) -> float:\n", + "def reg_ot_cost(x: jax.Array, y: jax.Array) -> float:\n", " geom = pointcloud.PointCloud(x, y)\n", " ot = linear.solve(geom)\n", " return ot.reg_ot_cost" diff --git a/docs/tutorials/point_clouds.ipynb b/docs/tutorials/point_clouds.ipynb index fd20ffc9a..e1b77edca 100644 --- a/docs/tutorials/point_clouds.ipynb +++ b/docs/tutorials/point_clouds.ipynb @@ -241,8 +241,8 @@ "outputs": [], "source": [ "def optimize(\n", - " x: jnp.ndarray,\n", - " y: jnp.ndarray,\n", + " x: jax.Array,\n", + " y: jax.Array,\n", " num_iter: int = 300,\n", " dump_every: int = 5,\n", " learning_rate: float = 0.2,\n", diff --git a/docs/tutorials/sinkhorn_divergence_gradient_flow.ipynb b/docs/tutorials/sinkhorn_divergence_gradient_flow.ipynb index c3b73039c..ff84f53b4 100644 --- a/docs/tutorials/sinkhorn_divergence_gradient_flow.ipynb +++ b/docs/tutorials/sinkhorn_divergence_gradient_flow.ipynb @@ -145,8 +145,8 @@ "outputs": [], "source": [ "def gradient_flow(\n", - " x: jnp.ndarray,\n", - " y: jnp.ndarray,\n", + " x: jax.Array,\n", + " y: jax.Array,\n", " cost_fn: callable,\n", " num_iter: int = 500,\n", " lr: float = 0.2,\n", diff --git a/docs/tutorials/sparse_monge_displacements.ipynb b/docs/tutorials/sparse_monge_displacements.ipynb index a21213703..8fcb49096 100644 --- a/docs/tutorials/sparse_monge_displacements.ipynb +++ b/docs/tutorials/sparse_monge_displacements.ipynb @@ -241,7 +241,7 @@ "solver = jax.jit(sinkhorn.Sinkhorn())\n", "\n", "\n", - "def entropic_map(x, y, cost_fn: costs.TICost) -> jnp.ndarray:\n", + "def entropic_map(x, y, cost_fn: costs.TICost) -> jax.Array:\n", " geom = pointcloud.PointCloud(x, y, cost_fn=cost_fn)\n", " output = solver(linear_problem.LinearProblem(geom))\n", " dual_potentials = output.to_dual_potentials()\n", diff --git a/src/ott/datasets.py b/src/ott/datasets.py index 4d67c5976..12dda06bb 100644 --- a/src/ott/datasets.py +++ b/src/ott/datasets.py @@ -32,8 +32,8 @@ class Dataset(NamedTuple): source_iter: loader for the source measure target_iter: loader for the target measure """ - source_iter: Iterator[jnp.ndarray] - target_iter: Iterator[jnp.ndarray] + source_iter: Iterator[jax.Array] + target_iter: Iterator[jax.Array] @dataclasses.dataclass diff --git a/src/ott/geometry/costs.py b/src/ott/geometry/costs.py index 9f1a6c3a0..aeaf89b72 100644 --- a/src/ott/geometry/costs.py +++ b/src/ott/geometry/costs.py @@ -56,10 +56,10 @@ class CostFn(abc.ABC): """ # no norm function created by default. - norm: Optional[Callable[[jnp.ndarray], Union[float, jnp.ndarray]]] = None + norm: Optional[Callable[[jax.Array], Union[float, jax.Array]]] = None @abc.abstractmethod - def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: + def pairwise(self, x: jax.Array, y: jax.Array) -> float: """Compute cost between :math:`x` and :math:`y`. Args: @@ -70,8 +70,8 @@ def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: The cost. """ - def barycenter(self, weights: jnp.ndarray, - xs: jnp.ndarray) -> Tuple[jnp.ndarray, Any]: + def barycenter(self, weights: jax.Array, + xs: jax.Array) -> Tuple[jax.Array, Any]: """Barycentric operator. Args: @@ -86,7 +86,7 @@ def barycenter(self, weights: jnp.ndarray, raise NotImplementedError("Barycenter is not implemented.") @classmethod - def _padder(cls, dim: int) -> jnp.ndarray: + def _padder(cls, dim: int) -> jax.Array: """Create a padding vector of adequate dimension, well-suited to a cost. Args: @@ -97,7 +97,7 @@ def _padder(cls, dim: int) -> jnp.ndarray: """ return jnp.zeros((1, dim)) - def __call__(self, x: jnp.ndarray, y: jnp.ndarray) -> float: + def __call__(self, x: jax.Array, y: jax.Array) -> float: """Compute cost between :math:`x` and :math:`y`. Args: @@ -113,7 +113,7 @@ def __call__(self, x: jnp.ndarray, y: jnp.ndarray) -> float: return cost return cost + self.norm(x) + self.norm(y) - def all_pairs(self, x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: + def all_pairs(self, x: jax.Array, y: jax.Array) -> jax.Array: """Compute matrix of all pairwise costs, including the :attr:`norms `. Args: @@ -125,7 +125,7 @@ def all_pairs(self, x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: """ return jax.vmap(lambda x_: jax.vmap(lambda y_: self(x_, y_))(y))(x) - def all_pairs_pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: + def all_pairs_pairwise(self, x: jax.Array, y: jax.Array) -> jax.Array: """Compute matrix of all pairwise costs, excluding the :attr:`norms `. Args: @@ -163,7 +163,7 @@ class TICost(CostFn): """ @abc.abstractmethod - def h(self, z: jnp.ndarray) -> float: + def h(self, z: jax.Array) -> float: """TI function acting on difference of :math:`x-y` to output cost. Args: @@ -173,11 +173,11 @@ def h(self, z: jnp.ndarray) -> float: The cost. """ - def h_legendre(self, z: jnp.ndarray) -> float: + def h_legendre(self, z: jax.Array) -> float: """Legendre transform of :func:`h` when it is convex.""" raise NotImplementedError("Legendre transform of `h` is not implemented.") - def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: + def pairwise(self, x: jax.Array, y: jax.Array) -> float: """Compute cost as evaluation of :func:`h` on :math:`x-y`.""" return self.h(x - y) @@ -198,10 +198,10 @@ def __init__(self, p: float): self.p = p self.q = 1.0 / (1.0 - (1.0 / p)) if p > 1.0 else jnp.inf - def h(self, z: jnp.ndarray) -> float: # noqa: D102 + def h(self, z: jax.Array) -> float: # noqa: D102 return 0.5 * mu.norm(z, self.p) ** 2 - def h_legendre(self, z: jnp.ndarray) -> float: + def h_legendre(self, z: jax.Array) -> float: """Legendre transform of :func:`h`. For details on the derivation, see e.g., :cite:`boyd:04`, p. 93/94. @@ -234,10 +234,10 @@ def __init__(self, p: float): self.p = p self.q = 1.0 / (1.0 - (1.0 / p)) if p > 1.0 else jnp.inf - def h(self, z: jnp.ndarray) -> float: # noqa: D102 + def h(self, z: jax.Array) -> float: # noqa: D102 return mu.norm(z, self.p) ** self.p / self.p - def h_legendre(self, z: jnp.ndarray) -> float: # noqa: D102 + def h_legendre(self, z: jax.Array) -> float: # noqa: D102 # not defined for `p=1` return mu.norm(z, self.q) ** self.q / self.q @@ -260,7 +260,7 @@ class Euclidean(CostFn): because the function is not strictly convex (it is linear on rays). """ - def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: + def pairwise(self, x: jax.Array, y: jax.Array) -> float: """Compute Euclidean norm using custom jvp implementation. Here we use a custom jvp implementation for the norm that does not yield @@ -277,22 +277,22 @@ class SqEuclidean(TICost): Implemented as a translation invariant cost, :math:`h(z) = \|z\|^2`. """ - def norm(self, x: jnp.ndarray) -> Union[float, jnp.ndarray]: + def norm(self, x: jax.Array) -> Union[float, jax.Array]: """Compute squared Euclidean norm for vector.""" return jnp.sum(x ** 2, axis=-1) - def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: + def pairwise(self, x: jax.Array, y: jax.Array) -> float: """Compute minus twice the dot-product between vectors.""" return -2. * jnp.vdot(x, y) - def h(self, z: jnp.ndarray) -> float: # noqa: D102 + def h(self, z: jax.Array) -> float: # noqa: D102 return jnp.sum(z ** 2) - def h_legendre(self, z: jnp.ndarray) -> float: # noqa: D102 + def h_legendre(self, z: jax.Array) -> float: # noqa: D102 return 0.25 * jnp.sum(z ** 2) - def barycenter(self, weights: jnp.ndarray, - xs: jnp.ndarray) -> Tuple[jnp.ndarray, Any]: + def barycenter(self, weights: jax.Array, + xs: jax.Array) -> Tuple[jax.Array, Any]: """Output barycenter of vectors when using squared-Euclidean distance.""" return jnp.average(xs, weights=weights, axis=0), None @@ -309,7 +309,7 @@ def __init__(self, ridge: float = 1e-8): super().__init__() self._ridge = ridge - def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: + def pairwise(self, x: jax.Array, y: jax.Array) -> float: """Cosine distance between vectors, denominator regularized with ridge.""" ridge = self._ridge x_norm = jnp.linalg.norm(x, axis=-1) @@ -318,7 +318,7 @@ def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: return 1.0 - cosine_similarity @classmethod - def _padder(cls, dim: int) -> jnp.ndarray: + def _padder(cls, dim: int) -> jax.Array: return jnp.ones((1, dim)) @@ -341,7 +341,7 @@ class RegTICost(TICost, abc.ABC): def __init__( self, scaling_reg: float = 1.0, - matrix: Optional[jnp.ndarray] = None, + matrix: Optional[jax.Array] = None, orthogonal: bool = False, ): super().__init__() @@ -350,16 +350,16 @@ def __init__( self.orthogonal = orthogonal @abc.abstractmethod - def _reg(self, z: jnp.ndarray) -> float: + def _reg(self, z: jax.Array) -> float: """Regularization function.""" - def _reg_stiefel_orth(self, z: jnp.ndarray) -> float: + def _reg_stiefel_orth(self, z: jax.Array) -> float: raise NotImplementedError( "Regularization in the orthogonal " "subspace is not implemented." ) - def reg(self, z: jnp.ndarray) -> float: + def reg(self, z: jax.Array) -> float: """Regularization function. Args: @@ -374,7 +374,7 @@ def reg(self, z: jnp.ndarray) -> float: return self._reg_stiefel_orth(z) return self._reg(self.matrix @ z) - def prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: + def prox_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: """Proximal operator of :meth:`reg`. Args: @@ -391,26 +391,24 @@ def prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: return self._prox_reg_stiefel_orth(z, tau) return self._prox_reg_stiefel(z, tau) - def _prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: + def _prox_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: raise NotImplementedError("Proximal operator is not implemented.") - def _prox_reg_stiefel_orth( - self, z: jnp.ndarray, tau: float = 1.0 - ) -> jnp.ndarray: + def _prox_reg_stiefel_orth(self, z: jax.Array, tau: float = 1.0) -> jax.Array: - def orth(x: jnp.ndarray) -> jnp.ndarray: + def orth(x: jax.Array) -> jax.Array: return x - self.matrix.T @ (self.matrix @ x) # assumes `matrix` has orthogonal rows tmp = orth(z) return z - orth(tmp - self._prox_reg(tmp, tau)) - def _prox_reg_stiefel(self, z: jnp.ndarray, tau: float) -> jnp.ndarray: + def _prox_reg_stiefel(self, z: jax.Array, tau: float) -> jax.Array: # assumes `matrix` has orthogonal rows tmp = self.matrix @ z return z - self.matrix.T @ (tmp - self._prox_reg(tmp, tau)) - def prox_legendre_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: + def prox_legendre_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: r"""Proximal operator of the Legendre transform of :meth:`reg`. Uses Moreau's decomposition: @@ -428,16 +426,16 @@ def prox_legendre_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: """ return z - tau * self.prox_reg(z / tau, 1.0 / tau) - def h(self, z: jnp.ndarray) -> float: # noqa: D102 + def h(self, z: jax.Array) -> float: # noqa: D102 out = 0.5 * jnp.sum(z ** 2) return out + self.scaling_reg * self.reg(z) - def h_legendre(self, z: jnp.ndarray) -> float: # noqa: D102 + def h_legendre(self, z: jax.Array) -> float: # noqa: D102 q = jax.lax.stop_gradient(self.prox_reg(z)) return jnp.sum(q * z) - self.h(q) - def h_transform(self, f: Callable[[jnp.ndarray], float], - **kwargs: Any) -> Callable[[jnp.ndarray], float]: + def h_transform(self, f: Callable[[jax.Array], float], + **kwargs: Any) -> Callable[[jax.Array], float]: r"""Compute the h-transform of a concave function. Return a callable :math:`f_h` defined as: @@ -467,18 +465,16 @@ def h_transform(self, f: Callable[[jnp.ndarray], float], The h-transform of ``f``. """ - def minus_f(z: jnp.ndarray, x: jnp.ndarray) -> float: + def minus_f(z: jax.Array, x: jax.Array) -> float: return -f(x - z) - def prox( - x: jnp.ndarray, scaling_reg: float, scaling_h: float - ) -> jnp.ndarray: + def prox(x: jax.Array, scaling_reg: float, scaling_h: float) -> jax.Array: # https://web.stanford.edu/~boyd/papers/pdf/prox_algs.pdf 2.2. tmp = 1.0 / (1.0 + scaling_h) tau = scaling_reg * scaling_h * tmp return self.prox_reg(x * tmp, tau) - def f_h(x: jnp.ndarray) -> float: + def f_h(x: jax.Array) -> float: pg = jaxopt.ProximalGradient(fun=minus_f, prox=prox, **kwargs) pg_run = pg.run(x, self.scaling_reg, x=x) pg_sol = jax.lax.stop_gradient(pg_run.params) @@ -508,10 +504,10 @@ class ElasticL1(RegTICost): to promote displacements in the span of ``matrix``. """ - def _reg(self, z: jnp.ndarray) -> float: # noqa: D102 + def _reg(self, z: jax.Array) -> float: # noqa: D102 return jnp.linalg.norm(z, ord=1) - def _prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: + def _prox_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: return jnp.sign(z) * jax.nn.relu(jnp.abs(z) - tau * self.scaling_reg) @@ -529,19 +525,17 @@ class ElasticL2(RegTICost): to promote displacements in the span of ``matrix``. """ - def _reg(self, z: jnp.ndarray) -> float: # noqa: D102 + def _reg(self, z: jax.Array) -> float: # noqa: D102 return 0.5 * jnp.sum(z ** 2) - def _reg_stiefel_orth(self, z: jnp.ndarray) -> float: + def _reg_stiefel_orth(self, z: jax.Array) -> float: # Pythagorean identity return self._reg(z) - self._reg(self.matrix @ z) - def _prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: + def _prox_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: return z / (1.0 + tau * self.scaling_reg) - def _prox_reg_stiefel_orth( - self, z: jnp.ndarray, tau: float = 1.0 - ) -> jnp.ndarray: + def _prox_reg_stiefel_orth(self, z: jax.Array, tau: float = 1.0) -> jax.Array: out = z + tau * self.scaling_reg * self.matrix.T @ (self.matrix @ z) return self._prox_reg(out, tau) @@ -565,7 +559,7 @@ class ElasticSTVS(RegTICost): to promote displacements in the span of ``matrix``. """ # noqa: D205,E501 - def _reg(self, z: jnp.ndarray) -> float: # noqa: D102 + def _reg(self, z: jax.Array) -> float: # noqa: D102 u = jnp.arcsinh(jnp.abs(z) / (2 * self.scaling_reg)) out = u - 0.5 * jnp.exp(-2.0 * u) # Lemma 2.1 of `schreck:15`; @@ -573,8 +567,8 @@ def _reg(self, z: jnp.ndarray) -> float: # noqa: D102 return self.scaling_reg * jnp.sum(out + 0.5) # make positive def _prox_reg( # noqa: D102 - self, z: jnp.ndarray, tau: float = 1.0 - ) -> jnp.ndarray: + self, z: jax.Array, tau: float = 1.0 + ) -> jax.Array: tmp = 1.0 - (self.scaling_reg * tau / (jnp.abs(z) + 1e-12)) ** 2 return jax.nn.relu(tmp) * z @@ -600,7 +594,7 @@ def __init__(self, k: int, *args, **kwargs: Any): super().__init__(*args, **kwargs) self.k = k - def _reg(self, z: jnp.ndarray) -> float: # noqa: D102 + def _reg(self, z: jax.Array) -> float: # noqa: D102 # Prop 2.1 in :cite:`argyriou:12` k = self.k top_w = jax.lax.top_k(jnp.abs(z), k)[0] # Fetch largest k values @@ -621,15 +615,14 @@ def _reg(self, z: jnp.ndarray) -> float: # noqa: D102 return 0.5 * (s + (r + 1) * cesaro[r] ** 2) - def prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> float: # noqa: D102 + def prox_reg(self, z: jax.Array, tau: float = 1.0) -> float: # noqa: D102 @functools.partial(jax.vmap, in_axes=[0, None, None]) - def find_indices(r: int, l: jnp.ndarray, - z: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: + def find_indices(r: int, l: jax.Array, + z: jax.Array) -> Tuple[jax.Array, jax.Array]: @functools.partial(jax.vmap, in_axes=[None, 0, None]) - def inner(r: int, l: int, - z: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: + def inner(r: int, l: int, z: jax.Array) -> Tuple[jax.Array, jax.Array]: i = k - r - 1 res = jnp.sum(z * ((i <= ixs) & (ixs < l))) res /= l - k + (beta + 1) * r + beta + 1 @@ -692,14 +685,14 @@ def __init__(self, dimension: int, sqrtm_kw: Optional[Dict[str, Any]] = None): self._dimension = dimension self._sqrtm_kw = {} if sqrtm_kw is None else sqrtm_kw - def norm(self, x: jnp.ndarray) -> jnp.ndarray: + def norm(self, x: jax.Array) -> jax.Array: """Compute norm of Gaussian, sq. 2-norm of mean + trace of covariance.""" mean, cov = x_to_means_and_covs(x, self._dimension) norm = jnp.sum(mean ** 2, axis=-1) norm += jnp.trace(cov, axis1=-2, axis2=-1) return norm - def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: + def pairwise(self, x: jax.Array, y: jax.Array) -> float: """Compute - 2 x Bures dot-product.""" mean_x, cov_x = x_to_means_and_covs(x, self._dimension) mean_y, cov_y = x_to_means_and_covs(y, self._dimension) @@ -713,12 +706,12 @@ def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: def covariance_fixpoint_iter( self, - covs: jnp.ndarray, - weights: jnp.ndarray, + covs: jax.Array, + weights: jax.Array, tolerance: float = 1e-4, sqrtm_kw: Optional[Dict[str, Any]] = None, **kwargs: Any - ) -> jnp.ndarray: + ) -> jax.Array: """Iterate fix-point updates to compute barycenter of Gaussians. Args: @@ -744,8 +737,8 @@ def covariance_fixpoint_iter( @functools.partial(jax.vmap, in_axes=[None, 0, 0]) def scale_covariances( - cov_sqrt: jnp.ndarray, cov: jnp.ndarray, weight: jnp.ndarray - ) -> jnp.ndarray: + cov_sqrt: jax.Array, cov: jax.Array, weight: jax.Array + ) -> jax.Array: """Rescale covariance in barycenter step.""" return weight * matrix_square_root.sqrtm_only((cov_sqrt @ cov) @ cov_sqrt, **sqrtm_kw) @@ -757,8 +750,8 @@ def cond_fn(iteration: int, constants: Tuple[Any, ...], state) -> bool: def body_fn( iteration: int, constants: Tuple[Any, ...], - state: Tuple[jnp.ndarray, float], compute_error: bool - ) -> Tuple[jnp.ndarray, float]: + state: Tuple[jax.Array, float], compute_error: bool + ) -> Tuple[jax.Array, float]: del constants, compute_error cov, diffs = state cov_sqrt, cov_inv_sqrt, _ = matrix_square_root.sqrtm(cov, **sqrtm_kw) @@ -770,7 +763,7 @@ def body_fn( diffs = diffs.at[iteration // inner_iterations].set(diff) return next_cov, diffs - def init_state() -> Tuple[jnp.ndarray, float]: + def init_state() -> Tuple[jax.Array, float]: cov_init = jnp.eye(self._dimension) diffs = -jnp.ones( (np.ceil(max_iterations / inner_iterations).astype(int),), @@ -791,12 +784,12 @@ def init_state() -> Tuple[jnp.ndarray, float]: def barycenter( self, - weights: jnp.ndarray, - xs: jnp.ndarray, + weights: jax.Array, + xs: jax.Array, tolerance: float = 1e-4, sqrtm_kw: Optional[Dict[str, Any]] = None, **kwargs: Any - ) -> Tuple[jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array]: """Compute the Bures barycenter of weighted Gaussian distributions. Implements the fixed point approach proposed in :cite:`alvarez-esteban:16` @@ -842,7 +835,7 @@ def barycenter( return mean_and_cov_to_x(mu_bary, cov_bary, self._dimension), diffs @classmethod - def _padder(cls, dim: int) -> jnp.ndarray: + def _padder(cls, dim: int) -> jax.Array: dimension = int((-1 + math.sqrt(1 + 4 * dim)) / 2) padding = mean_and_cov_to_x( jnp.zeros((dimension,)), jnp.eye(dimension), dimension @@ -885,7 +878,7 @@ def __init__( self._gamma = gamma self._sqrtm_kw = kwargs - def norm(self, x: jnp.ndarray) -> jnp.ndarray: + def norm(self, x: jax.Array) -> jax.Array: """Compute norm of Gaussian for unbalanced Bures. Args: @@ -898,7 +891,7 @@ def norm(self, x: jnp.ndarray) -> jnp.ndarray: """ return self._gamma * x[..., 0] - def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: + def pairwise(self, x: jax.Array, y: jax.Array) -> float: """Compute dot-product for unbalanced Bures. Args: @@ -992,18 +985,17 @@ def __init__( self.ground_cost = SqEuclidean() if ground_cost is None else ground_cost self.debiased = debiased - def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: # noqa: D102 + def pairwise(self, x: jax.Array, y: jax.Array) -> float: # noqa: D102 c_xy = self._soft_dtw(x, y) if self.debiased: return c_xy - 0.5 * (self._soft_dtw(x, x) + self._soft_dtw(y, y)) return c_xy - def _soft_dtw(self, t1: jnp.ndarray, t2: jnp.ndarray) -> float: + def _soft_dtw(self, t1: jax.Array, t2: jax.Array) -> float: def body( - carry: Tuple[jnp.ndarray, jnp.ndarray], - current_antidiagonal: jnp.ndarray - ) -> Tuple[Tuple[jnp.ndarray, jnp.ndarray], jnp.ndarray]: + carry: Tuple[jax.Array, jax.Array], current_antidiagonal: jax.Array + ) -> Tuple[Tuple[jax.Array, jax.Array], jax.Array]: # modified from: https://github.com/khdlr/softdtw_jax two_ago, one_ago = carry @@ -1050,8 +1042,8 @@ def tree_unflatten(cls, aux_data, children): # noqa: D102 return cls(*children, **aux_data) -def x_to_means_and_covs(x: jnp.ndarray, - dimension: int) -> Tuple[jnp.ndarray, jnp.ndarray]: +def x_to_means_and_covs(x: jax.Array, + dimension: int) -> Tuple[jax.Array, jax.Array]: """Extract means and covariance matrices of Gaussians from raveled vector. Args: @@ -1071,8 +1063,8 @@ def x_to_means_and_covs(x: jnp.ndarray, def mean_and_cov_to_x( - mean: jnp.ndarray, covariance: jnp.ndarray, dimension: int -) -> jnp.ndarray: + mean: jax.Array, covariance: jax.Array, dimension: int +) -> jax.Array: """Ravel a Gaussian's mean and covariance matrix to d(1 + d) vector.""" return jnp.concatenate( (mean, jnp.reshape(covariance, (dimension * dimension))) diff --git a/src/ott/geometry/geometry.py b/src/ott/geometry/geometry.py index f953bf38c..5d3db3ee6 100644 --- a/src/ott/geometry/geometry.py +++ b/src/ott/geometry/geometry.py @@ -79,14 +79,14 @@ class Geometry: def __init__( self, - cost_matrix: Optional[jnp.ndarray] = None, - kernel_matrix: Optional[jnp.ndarray] = None, + cost_matrix: Optional[jax.Array] = None, + kernel_matrix: Optional[jax.Array] = None, epsilon: Optional[Union[float, epsilon_scheduler.Epsilon]] = None, relative_epsilon: Optional[bool] = None, scale_cost: Union[bool, int, float, Literal["mean", "max_cost", "median"]] = 1.0, - src_mask: Optional[jnp.ndarray] = None, - tgt_mask: Optional[jnp.ndarray] = None, + src_mask: Optional[jax.Array] = None, + tgt_mask: Optional[jax.Array] = None, ): self._cost_matrix = cost_matrix self._kernel_matrix = kernel_matrix @@ -107,7 +107,7 @@ def cost_rank(self) -> Optional[int]: """Output rank of cost matrix, if any was provided.""" @property - def cost_matrix(self) -> jnp.ndarray: + def cost_matrix(self) -> jax.Array: """Cost matrix, recomputed from kernel if only kernel was specified.""" if self._cost_matrix is None: # If no epsilon was passed on to the geometry, then assume it is one by @@ -131,7 +131,7 @@ def mean_cost_matrix(self) -> float: return jnp.sum(tmp * self._m_normed_ones) @property - def kernel_matrix(self) -> jnp.ndarray: + def kernel_matrix(self) -> jax.Array: """Kernel matrix. Either provided by user or recomputed from :attr:`cost_matrix`. @@ -245,12 +245,12 @@ def copy_epsilon(self, other: "Geometry") -> "Geometry": def apply_lse_kernel( self, - f: jnp.ndarray, - g: jnp.ndarray, + f: jax.Array, + g: jax.Array, eps: float, - vec: jnp.ndarray = None, + vec: jax.Array = None, axis: int = 0 - ) -> jnp.ndarray: + ) -> jax.Array: r"""Apply :attr:`kernel_matrix` in log domain. This function applies the ground geometry's kernel in log domain, using @@ -267,10 +267,10 @@ def apply_lse_kernel( f and g in iterations 1 & 2 respectively. Args: - f: jnp.ndarray [num_a,] , potential of size num_rows of cost_matrix - g: jnp.ndarray [num_b,] , potential of size num_cols of cost_matrix + f: jax.Array [num_a,] , potential of size num_rows of cost_matrix + g: jax.Array [num_b,] , potential of size num_cols of cost_matrix eps: float, regularization strength - vec: jnp.ndarray [num_a or num_b,] , when not None, this has the effect of + vec: jax.Array [num_a or num_b,] , when not None, this has the effect of doing log-Kernel computations with an addition elementwise multiplication of exp(g / eps) by a vector. This is carried out by adding weights to the log-sum-exp function, and needs to handle signs @@ -278,7 +278,7 @@ def apply_lse_kernel( axis: summing over axis 0 when doing (2), or over axis 1 when doing (1) Returns: - A jnp.ndarray corresponding to output above, depending on axis. + A jax.Array corresponding to output above, depending on axis. """ w_res, w_sgn = self._softmax(f, g, eps, vec, axis) remove = f if axis == 1 else g @@ -286,20 +286,20 @@ def apply_lse_kernel( def apply_kernel( self, - scaling: jnp.ndarray, + scaling: jax.Array, eps: Optional[float] = None, axis: int = 0, - ) -> jnp.ndarray: + ) -> jax.Array: """Apply :attr:`kernel_matrix` on positive scaling vector. Args: - scaling: jnp.ndarray [num_a or num_b] , scaling of size num_rows or + scaling: jax.Array [num_a or num_b] , scaling of size num_rows or num_cols of kernel_matrix eps: passed for consistency, not used yet. axis: standard kernel product if axis is 1, transpose if 0. Returns: - a jnp.ndarray corresponding to output above, depending on axis. + a jax.Array corresponding to output above, depending on axis. """ if eps is None: kernel = self.kernel_matrix @@ -311,10 +311,10 @@ def apply_kernel( def marginal_from_potentials( self, - f: jnp.ndarray, - g: jnp.ndarray, + f: jax.Array, + g: jax.Array, axis: int = 0, - ) -> jnp.ndarray: + ) -> jax.Array: """Output marginal of transportation matrix from potentials. This applies first lse kernel in the standard way, removes the @@ -323,8 +323,8 @@ def marginal_from_potentials( by potentials. Args: - f: jnp.ndarray [num_a,] , potential of size num_rows of cost_matrix - g: jnp.ndarray [num_b,] , potential of size num_cols of cost_matrix + f: jax.Array [num_a,] , potential of size num_rows of cost_matrix + g: jax.Array [num_b,] , potential of size num_cols of cost_matrix axis: axis along which to integrate, returns marginal on other axis. Returns: @@ -336,23 +336,19 @@ def marginal_from_potentials( def marginal_from_scalings( self, - u: jnp.ndarray, - v: jnp.ndarray, + u: jax.Array, + v: jax.Array, axis: int = 0, - ) -> jnp.ndarray: + ) -> jax.Array: """Output marginal of transportation matrix from scalings.""" u, v = (v, u) if axis == 0 else (u, v) return u * self.apply_kernel(v, eps=self.epsilon, axis=axis) - def transport_from_potentials( - self, f: jnp.ndarray, g: jnp.ndarray - ) -> jnp.ndarray: + def transport_from_potentials(self, f: jax.Array, g: jax.Array) -> jax.Array: """Output transport matrix from potentials.""" return jnp.exp(self._center(f, g) / self.epsilon) - def transport_from_scalings( - self, u: jnp.ndarray, v: jnp.ndarray - ) -> jnp.ndarray: + def transport_from_scalings(self, u: jax.Array, v: jax.Array) -> jax.Array: """Output transport matrix from pair of scalings.""" return self.kernel_matrix * u[:, jnp.newaxis] * v[jnp.newaxis, :] @@ -361,17 +357,17 @@ def transport_from_scalings( def update_potential( self, - f: jnp.ndarray, - g: jnp.ndarray, - log_marginal: jnp.ndarray, + f: jax.Array, + g: jax.Array, + log_marginal: jax.Array, iteration: Optional[int] = None, axis: int = 0, - ) -> jnp.ndarray: + ) -> jax.Array: """Carry out one Sinkhorn update for potentials, i.e. in log space. Args: - f: jnp.ndarray [num_a,] , potential of size num_rows of cost_matrix - g: jnp.ndarray [num_b,] , potential of size num_cols of cost_matrix + f: jax.Array [num_a,] , potential of size num_rows of cost_matrix + g: jax.Array [num_b,] , potential of size num_cols of cost_matrix log_marginal: targeted marginal iteration: used to compute epsilon from schedule, if provided. axis: axis along which the update should be carried out. @@ -385,15 +381,15 @@ def update_potential( def update_scaling( self, - scaling: jnp.ndarray, - marginal: jnp.ndarray, + scaling: jax.Array, + marginal: jax.Array, iteration: Optional[int] = None, axis: int = 0, - ) -> jnp.ndarray: + ) -> jax.Array: """Carry out one Sinkhorn update for scalings, using kernel directly. Args: - scaling: jnp.ndarray of num_a or num_b positive values. + scaling: jax.Array of num_a or num_b positive values. marginal: targeted marginal iteration: used to compute epsilon from schedule, if provided. axis: axis along which the update should be carried out. @@ -406,13 +402,13 @@ def update_scaling( return marginal / jnp.where(app_kernel > 0, app_kernel, 1.0) # Helper functions - def _center(self, f: jnp.ndarray, g: jnp.ndarray) -> jnp.ndarray: + def _center(self, f: jax.Array, g: jax.Array) -> jax.Array: return f[:, jnp.newaxis] + g[jnp.newaxis, :] - self.cost_matrix def _softmax( - self, f: jnp.ndarray, g: jnp.ndarray, eps: float, - vec: Optional[jnp.ndarray], axis: int - ) -> Tuple[jnp.ndarray, jnp.ndarray]: + self, f: jax.Array, g: jax.Array, eps: float, vec: Optional[jax.Array], + axis: int + ) -> Tuple[jax.Array, jax.Array]: """Apply softmax row or column wise, weighted by vec.""" if vec is not None: if axis == 0: @@ -429,8 +425,8 @@ def _softmax( @functools.partial(jax.vmap, in_axes=[None, None, None, 0, None]) def _apply_transport_from_potentials( - self, f: jnp.ndarray, g: jnp.ndarray, vec: jnp.ndarray, axis: int - ) -> jnp.ndarray: + self, f: jax.Array, g: jax.Array, vec: jax.Array, axis: int + ) -> jax.Array: """Apply lse_kernel to arbitrary vector while keeping track of signs.""" lse_res, lse_sgn = self.apply_lse_kernel( f, g, self.epsilon, vec=vec, axis=axis @@ -441,11 +437,11 @@ def _apply_transport_from_potentials( # wrapper to allow default option for axis. def apply_transport_from_potentials( self, - f: jnp.ndarray, - g: jnp.ndarray, - vec: jnp.ndarray, + f: jax.Array, + g: jax.Array, + vec: jax.Array, axis: int = 0 - ) -> jnp.ndarray: + ) -> jax.Array: """Apply transport matrix computed from potentials to a (batched) vec. This approach does not instantiate the transport matrix itself, but uses @@ -456,9 +452,9 @@ def apply_transport_from_potentials( (b=..., return_sign=True) optional parameters of logsumexp. Args: - f: jnp.ndarray [num_a,] , potential of size num_rows of cost_matrix - g: jnp.ndarray [num_b,] , potential of size num_cols of cost_matrix - vec: jnp.ndarray [batch, num_a or num_b], vector that will be multiplied + f: jax.Array [num_a,] , potential of size num_rows of cost_matrix + g: jax.Array [num_b,] , potential of size num_cols of cost_matrix + vec: jax.Array [batch, num_a or num_b], vector that will be multiplied by transport matrix corresponding to potentials f, g, and geom. axis: axis to differentiate left (0) or right (1) multiply. @@ -473,7 +469,7 @@ def apply_transport_from_potentials( @functools.partial(jax.vmap, in_axes=[None, None, None, 0, None]) def _apply_transport_from_scalings( - self, u: jnp.ndarray, v: jnp.ndarray, vec: jnp.ndarray, axis: int + self, u: jax.Array, v: jax.Array, vec: jax.Array, axis: int ): u, v = (u, v * vec) if axis == 1 else (v, u * vec) return u * self.apply_kernel(v, eps=self.epsilon, axis=axis) @@ -481,20 +477,20 @@ def _apply_transport_from_scalings( # wrapper to allow default option for axis def apply_transport_from_scalings( self, - u: jnp.ndarray, - v: jnp.ndarray, - vec: jnp.ndarray, + u: jax.Array, + v: jax.Array, + vec: jax.Array, axis: int = 0 - ) -> jnp.ndarray: + ) -> jax.Array: """Apply transport matrix computed from scalings to a (batched) vec. This approach does not instantiate the transport matrix itself, but relies instead on the apply_kernel function. Args: - u: jnp.ndarray [num_a,] , scaling of size num_rows of cost_matrix - v: jnp.ndarray [num_b,] , scaling of size num_cols of cost_matrix - vec: jnp.ndarray [batch, num_a or num_b], vector that will be multiplied + u: jax.Array [num_a,] , scaling of size num_rows of cost_matrix + v: jax.Array [num_b,] , scaling of size num_cols of cost_matrix + vec: jax.Array [batch, num_a or num_b], vector that will be multiplied by transport matrix corresponding to scalings u, v, and geom. axis: axis to differentiate left (0) or right (1) multiply. @@ -507,7 +503,7 @@ def apply_transport_from_scalings( )[0, :] return self._apply_transport_from_scalings(u, v, vec, axis) - def potential_from_scaling(self, scaling: jnp.ndarray) -> jnp.ndarray: + def potential_from_scaling(self, scaling: jax.Array) -> jax.Array: """Compute dual potential vector from scaling vector. Args: @@ -518,7 +514,7 @@ def potential_from_scaling(self, scaling: jnp.ndarray) -> jnp.ndarray: """ return self.epsilon * jnp.log(scaling) - def scaling_from_potential(self, potential: jnp.ndarray) -> jnp.ndarray: + def scaling_from_potential(self, potential: jax.Array) -> jax.Array: """Compute scaling vector from dual potential. Args: @@ -532,7 +528,7 @@ def scaling_from_potential(self, potential: jnp.ndarray) -> jnp.ndarray: finite, jnp.exp(jnp.where(finite, potential / self.epsilon, 0.0)), 0.0 ) - def apply_square_cost(self, arr: jnp.ndarray, axis: int = 0) -> jnp.ndarray: + def apply_square_cost(self, arr: jax.Array, axis: int = 0) -> jax.Array: """Apply elementwise-square of cost matrix to array (vector or matrix). This function applies the ground geometry's cost matrix, to perform either @@ -553,11 +549,11 @@ def apply_square_cost(self, arr: jnp.ndarray, axis: int = 0) -> jnp.ndarray: def apply_cost( self, - arr: jnp.ndarray, + arr: jax.Array, axis: int = 0, - fn: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, + fn: Optional[Callable[[jax.Array], jax.Array]] = None, **kwargs: Any - ) -> jnp.ndarray: + ) -> jax.Array: """Apply :attr:`cost_matrix` to array (vector or matrix). This function applies the ground geometry's cost matrix, to perform either @@ -566,7 +562,7 @@ def apply_cost( where C is [num_a, num_b] Args: - arr: jnp.ndarray [num_a or num_b, p], vector that will be multiplied by + arr: jax.Array [num_a or num_b, p], vector that will be multiplied by the cost matrix. axis: standard cost matrix if axis=1, transpose if 0 fn: function to apply to cost matrix element-wise before the dot product @@ -583,21 +579,21 @@ def apply_cost( def _apply_cost_to_vec( self, - vec: jnp.ndarray, + vec: jax.Array, axis: int = 0, fn=None, **_: Any, - ) -> jnp.ndarray: + ) -> jax.Array: """Apply ``[num_a, num_b]`` fn(cost) (or transpose) to vector. Args: - vec: jnp.ndarray [num_a,] ([num_b,] if axis=1) vector + vec: jax.Array [num_a,] ([num_b,] if axis=1) vector axis: axis on which the reduction is done. fn: function optionally applied to cost matrix element-wise, before the doc product Returns: - A jnp.ndarray corresponding to cost x vector + A jax.Array corresponding to cost x vector """ matrix = self.cost_matrix.T if axis == 0 else self.cost_matrix matrix = fn(matrix) if fn is not None else matrix @@ -718,7 +714,7 @@ def to_LRCGeometry( ) def subset( - self, src_ixs: Optional[jnp.ndarray], tgt_ixs: Optional[jnp.ndarray], + self, src_ixs: Optional[jax.Array], tgt_ixs: Optional[jax.Array], **kwargs: Any ) -> "Geometry": """Subset rows or columns of a geometry. @@ -733,10 +729,10 @@ def subset( """ def subset_fn( - arr: Optional[jnp.ndarray], - src_ixs: Optional[jnp.ndarray], - tgt_ixs: Optional[jnp.ndarray], - ) -> Optional[jnp.ndarray]: + arr: Optional[jax.Array], + src_ixs: Optional[jax.Array], + tgt_ixs: Optional[jax.Array], + ) -> Optional[jax.Array]: if arr is None: return None if src_ixs is not None: @@ -755,8 +751,8 @@ def subset_fn( def mask( self, - src_mask: Optional[jnp.ndarray], - tgt_mask: Optional[jnp.ndarray], + src_mask: Optional[jax.Array], + tgt_mask: Optional[jax.Array], mask_value: float = 0., ) -> "Geometry": """Mask rows or columns of a geometry. @@ -780,10 +776,10 @@ def mask( """ def mask_fn( - arr: Optional[jnp.ndarray], - src_mask: Optional[jnp.ndarray], - tgt_mask: Optional[jnp.ndarray], - ) -> Optional[jnp.ndarray]: + arr: Optional[jax.Array], + src_mask: Optional[jax.Array], + tgt_mask: Optional[jax.Array], + ) -> Optional[jax.Array]: if arr is None: return arr assert arr.ndim == 2, arr.ndim @@ -801,12 +797,12 @@ def mask_fn( def _mask_subset_helper( self, - src_ixs: Optional[jnp.ndarray], - tgt_ixs: Optional[jnp.ndarray], + src_ixs: Optional[jax.Array], + tgt_ixs: Optional[jax.Array], *, fn: Callable[ - [Optional[jnp.ndarray], Optional[jnp.ndarray], Optional[jnp.ndarray]], - Optional[jnp.ndarray]], + [Optional[jax.Array], Optional[jax.Array], Optional[jax.Array]], + Optional[jax.Array]], propagate_mask: bool, **kwargs: Any, ) -> "Geometry": @@ -825,7 +821,7 @@ def _mask_subset_helper( ) @property - def src_mask(self) -> Optional[jnp.ndarray]: + def src_mask(self) -> Optional[jax.Array]: """Mask of shape ``[num_a,]`` to compute :attr:`cost_matrix` statistics. Specifically, it is used when computing: @@ -837,7 +833,7 @@ def src_mask(self) -> Optional[jnp.ndarray]: return self._normalize_mask(self._src_mask, self.shape[0]) @property - def tgt_mask(self) -> Optional[jnp.ndarray]: + def tgt_mask(self) -> Optional[jax.Array]: """Mask of shape ``[num_b,]`` to compute :attr:`cost_matrix` statistics. Specifically, it is used when computing: @@ -863,22 +859,22 @@ def _masked_geom(self, mask_value: float = 0.) -> "Geometry": return self.mask(src_mask, tgt_mask, mask_value=mask_value) @property - def _n_normed_ones(self) -> jnp.ndarray: + def _n_normed_ones(self) -> jax.Array: """Normalized array of shape ``[num_a,]``.""" mask = self.src_mask arr = jnp.ones(self.shape[0]) if mask is None else mask return arr / jnp.sum(arr) @property - def _m_normed_ones(self) -> jnp.ndarray: + def _m_normed_ones(self) -> jax.Array: """Normalized array of shape ``[num_b,]``.""" mask = self.tgt_mask arr = jnp.ones(self.shape[1]) if mask is None else mask return arr / jnp.sum(arr) @staticmethod - def _normalize_mask(mask: Optional[Union[int, jnp.ndarray]], - size: int) -> Optional[jnp.ndarray]: + def _normalize_mask(mask: Optional[Union[int, jax.Array]], + size: int) -> Optional[jax.Array]: """Convert array of indices to a boolean mask.""" if mask is None: return None diff --git a/src/ott/geometry/graph.py b/src/ott/geometry/graph.py index c7dac0c99..ab0fe8768 100644 --- a/src/ott/geometry/graph.py +++ b/src/ott/geometry/graph.py @@ -48,7 +48,7 @@ class Graph(geometry.Geometry): def __init__( self, - laplacian: jnp.ndarray, + laplacian: jax.Array, t: float = 1e-3, n_steps: int = 100, numerical_scheme: Literal["backward_euler", @@ -66,7 +66,7 @@ def __init__( @classmethod def from_graph( cls, - G: jnp.ndarray, + G: jax.Array, t: Optional[float] = 1e-3, directed: bool = False, normalize: bool = False, @@ -113,10 +113,10 @@ def from_graph( def apply_kernel( self, - scaling: jnp.ndarray, + scaling: jax.Array, eps: Optional[float] = None, axis: int = 0, - ) -> jnp.ndarray: + ) -> jax.Array: r"""Apply :attr:`kernel_matrix` on positive scaling vector. Args: @@ -129,8 +129,8 @@ def apply_kernel( """ def conf_fn( - iteration: int, consts: Tuple[jnp.ndarray, Optional[jnp.ndarray]], - old_new: Tuple[jnp.ndarray, jnp.ndarray] + iteration: int, consts: Tuple[jax.Array, Optional[jax.Array]], + old_new: Tuple[jax.Array, jax.Array] ) -> bool: del iteration, consts @@ -143,9 +143,9 @@ def conf_fn( return (jnp.nanmax(f) - jnp.nanmin(f)) > self.tol def body_fn( - iteration: int, consts: Tuple[jnp.ndarray, Optional[jnp.ndarray]], - old_new: Tuple[jnp.ndarray, jnp.ndarray], compute_errors: bool - ) -> Tuple[jnp.ndarray, jnp.ndarray]: + iteration: int, consts: Tuple[jax.Array, Optional[jax.Array]], + old_new: Tuple[jax.Array, jax.Array], compute_errors: bool + ) -> Tuple[jax.Array, jax.Array]: del iteration, compute_errors L, scaled_lap = consts @@ -186,7 +186,7 @@ def body_fn( )[1] @property - def kernel_matrix(self) -> jnp.ndarray: # noqa: D102 + def kernel_matrix(self) -> jax.Array: # noqa: D102 n, _ = self.shape kernel = self.apply_kernel(jnp.eye(n)) # force symmetry because of numerical imprecision @@ -194,7 +194,7 @@ def kernel_matrix(self) -> jnp.ndarray: # noqa: D102 return (kernel + kernel.T) * 0.5 @property - def cost_matrix(self) -> jnp.ndarray: # noqa: D102 + def cost_matrix(self) -> jax.Array: # noqa: D102 return -self.t * mu.safe_log(self.kernel_matrix) @property @@ -209,12 +209,12 @@ def _scale(self) -> float: ) @property - def _scaled_laplacian(self) -> jnp.ndarray: + def _scaled_laplacian(self) -> jax.Array: """Laplacian scaled by a constant, depending on the numerical scheme.""" return self._scale * self.laplacian @property - def _M(self) -> jnp.ndarray: + def _M(self) -> jax.Array: n, _ = self.shape return self._scaled_laplacian + jnp.eye(n) @@ -230,29 +230,27 @@ def is_symmetric(self) -> bool: # noqa: D102 def dtype(self) -> jnp.dtype: # noqa: D102 return self.laplacian.dtype - def transport_from_potentials( - self, f: jnp.ndarray, g: jnp.ndarray - ) -> jnp.ndarray: + def transport_from_potentials(self, f: jax.Array, g: jax.Array) -> jax.Array: """Not implemented.""" raise ValueError("Not implemented.") def apply_transport_from_potentials( self, - f: jnp.ndarray, - g: jnp.ndarray, - vec: jnp.ndarray, + f: jax.Array, + g: jax.Array, + vec: jax.Array, axis: int = 0 - ) -> jnp.ndarray: + ) -> jax.Array: """Since applying from potentials is not feasible in grids, use scalings.""" u, v = self.scaling_from_potential(f), self.scaling_from_potential(g) return self.apply_transport_from_scalings(u, v, vec, axis=axis) def marginal_from_potentials( self, - f: jnp.ndarray, - g: jnp.ndarray, + f: jax.Array, + g: jax.Array, axis: int = 0, - ) -> jnp.ndarray: + ) -> jax.Array: """Not implemented.""" raise ValueError("Not implemented.") diff --git a/src/ott/geometry/grid.py b/src/ott/geometry/grid.py index fd64500c9..3401f52c7 100644 --- a/src/ott/geometry/grid.py +++ b/src/ott/geometry/grid.py @@ -71,7 +71,7 @@ class Grid(geometry.Geometry): def __init__( self, - x: Optional[Sequence[jnp.ndarray]] = None, + x: Optional[Sequence[jax.Array]] = None, grid_size: Optional[Sequence[int]] = None, cost_fns: Optional[Sequence[costs.CostFn]] = None, num_a: Optional[int] = None, @@ -146,12 +146,12 @@ def is_symmetric(self) -> bool: # noqa: D102 # Reimplemented functions to be used in regularized OT def apply_lse_kernel( self, - f: jnp.ndarray, - g: jnp.ndarray, + f: jax.Array, + g: jax.Array, eps: float, - vec: Optional[jnp.ndarray] = None, + vec: Optional[jax.Array] = None, axis: int = 0 - ) -> jnp.ndarray: + ) -> jax.Array: """Apply grid kernel in log space. See notes in parent class for use case. Reshapes vector inputs below as grids, applies kernels onto each slice, and @@ -160,10 +160,10 @@ def apply_lse_kernel( More implementation details in :cite:`schmitz:18`. Args: - f: jnp.ndarray, a vector of potentials - g: jnp.ndarray, a vector of potentials + f: jax.Array, a vector of potentials + g: jax.Array, a vector of potentials eps: float, regularization strength - vec: jnp.ndarray, if needed, a vector onto which apply the kernel weighted + vec: jax.Array, if needed, a vector onto which apply the kernel weighted by f and g. axis: axis (0 or 1) along which summation should be carried out. @@ -209,8 +209,8 @@ def _apply_lse_kernel_one_dimension(self, dimension, f, g, eps, vec=None): return jnp.transpose(softmax_res, indices), None def _apply_cost_to_vec( - self, vec: jnp.ndarray, axis: int = 0, fn=None - ) -> jnp.ndarray: + self, vec: jax.Array, axis: int = 0, fn=None + ) -> jax.Array: r"""Apply grid's cost matrix (without instantiating it) to a vector. The `apply_cost` operation on grids rests on the following identity. @@ -229,13 +229,13 @@ def _apply_cost_to_vec( summation while keeping dimensions. Args: - vec: jnp.ndarray, flat vector of total size prod(grid_size). + vec: jax.Array, flat vector of total size prod(grid_size). axis: axis 0 if applying transpose costs, 1 if using the original cost. fn: function optionally applied to cost matrix element-wise, before the dot product. Returns: - A jnp.ndarray corresponding to cost x matrix + A jax.Array corresponding to cost x matrix """ vec = jnp.reshape(vec, self.grid_size) accum_vec = jnp.zeros_like(vec) @@ -255,10 +255,10 @@ def _apply_cost_to_vec( def apply_kernel( self, - scaling: jnp.ndarray, + scaling: jax.Array, eps: Optional[float] = None, axis: Optional[int] = None - ) -> jnp.ndarray: + ) -> jax.Array: """Apply grid kernel on scaling vector. See notes in parent class for use. @@ -269,7 +269,7 @@ def apply_kernel( More implementation details in :cite:`schmitz:18`, Args: - scaling: jnp.ndarray, a vector of scaling (>0) values. + scaling: jax.Array, a vector of scaling (>0) values. eps: float, regularization strength axis: axis (0 or 1) along which summation should be carried out. @@ -289,7 +289,7 @@ def apply_kernel( return scaling.ravel() def transport_from_potentials( - self, f: jnp.ndarray, g: jnp.ndarray, axis: int = 0 + self, f: jax.Array, g: jax.Array, axis: int = 0 ) -> NoReturn: """Not implemented, use :meth:`apply_transport_from_potentials` instead.""" raise ValueError( @@ -300,7 +300,7 @@ def transport_from_potentials( ) def transport_from_scalings( - self, f: jnp.ndarray, g: jnp.ndarray, axis: int = 0 + self, f: jax.Array, g: jax.Array, axis: int = 0 ) -> NoReturn: """Not implemented, use :meth:`apply_transport_from_scalings` instead.""" raise ValueError( @@ -311,15 +311,15 @@ def transport_from_scalings( ) def subset( - self, src_ixs: Optional[jnp.ndarray], tgt_ixs: Optional[jnp.ndarray] + self, src_ixs: Optional[jax.Array], tgt_ixs: Optional[jax.Array] ) -> NoReturn: """Not implemented.""" raise NotImplementedError("Subsetting is not implemented for grids.") def mask( self, - src_mask: Optional[jnp.ndarray], - tgt_mask: Optional[jnp.ndarray], + src_mask: Optional[jax.Array], + tgt_mask: Optional[jax.Array], mask_value: float = 0., ) -> NoReturn: """Not implemented.""" diff --git a/src/ott/geometry/low_rank.py b/src/ott/geometry/low_rank.py index e759b4cb9..750d8db62 100644 --- a/src/ott/geometry/low_rank.py +++ b/src/ott/geometry/low_rank.py @@ -33,8 +33,8 @@ class LRCGeometry(geometry.Geometry): if :math:`C = AB^T` and :math:`D = EF^T` then :math:`C + D = [A,E][B,F]^T` Args: - cost_1: jnp.ndarray[num_a, r] - cost_2: jnp.ndarray[num_b, r] + cost_1: jax.Array[num_a, r] + cost_2: jax.Array[num_b, r] bias: constant added to entire cost matrix. scale: Value used to rescale the factors of the low-rank geometry. scale_cost: option to rescale the cost matrix. Implemented scalings are @@ -51,8 +51,8 @@ class LRCGeometry(geometry.Geometry): def __init__( self, - cost_1: jnp.ndarray, - cost_2: jnp.ndarray, + cost_1: jax.Array, + cost_2: jax.Array, bias: float = 0.0, scale_factor: float = 1.0, scale_cost: Union[bool, int, float, Literal["mean", "max_bound", @@ -69,13 +69,13 @@ def __init__( self.batch_size = batch_size @property - def cost_1(self) -> jnp.ndarray: + def cost_1(self) -> jax.Array: """First factor of the :attr:`cost_matrix`.""" scale_factor = jnp.sqrt(self._scale_factor * self.inv_scale_cost) return scale_factor * self._cost_1 @property - def cost_2(self) -> jnp.ndarray: + def cost_2(self) -> jax.Array: """Second factor of the :attr:`cost_matrix`.""" scale_factor = jnp.sqrt(self._scale_factor * self.inv_scale_cost) return scale_factor * self._cost_2 @@ -90,7 +90,7 @@ def cost_rank(self) -> int: # noqa: D102 return self._cost_1.shape[1] @property - def cost_matrix(self) -> jnp.ndarray: + def cost_matrix(self) -> jax.Array: """Materialize the cost matrix.""" return jnp.matmul(self.cost_1, self.cost_2.T) + self.bias @@ -124,7 +124,7 @@ def inv_scale_cost(self) -> float: # noqa: D102 return 1.0 / self.compute_max_cost() raise ValueError(f"Scaling {self._scale_cost} not implemented.") - def apply_square_cost(self, arr: jnp.ndarray, axis: int = 0) -> jnp.ndarray: + def apply_square_cost(self, arr: jax.Array, axis: int = 0) -> jax.Array: """Apply elementwise-square of cost matrix to array (vector or matrix).""" (n, m), r = self.shape, self.cost_rank # When applying square of a LRCGeometry, one can either elementwise square @@ -142,15 +142,15 @@ def apply_square_cost(self, arr: jnp.ndarray, axis: int = 0) -> jnp.ndarray: def _apply_cost_to_vec( self, - vec: jnp.ndarray, + vec: jax.Array, axis: int = 0, - fn: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, + fn: Optional[Callable[[jax.Array], jax.Array]] = None, is_linear: bool = False, - ) -> jnp.ndarray: + ) -> jax.Array: """Apply [num_a, num_b] fn(cost) (or transpose) to vector. Args: - vec: jnp.ndarray [num_a,] ([num_b,] if axis=1) vector + vec: jax.Array [num_a,] ([num_b,] if axis=1) vector axis: axis on which the reduction is done. fn: function optionally applied to cost matrix element-wise, before the doc product @@ -159,12 +159,12 @@ def _apply_cost_to_vec( for a heuristic to help determine if a function is linear. Returns: - A jnp.ndarray corresponding to cost x vector + A jax.Array corresponding to cost x vector """ def linear_apply( - vec: jnp.ndarray, axis: int, fn: Callable[[jnp.ndarray], jnp.ndarray] - ) -> jnp.ndarray: + vec: jax.Array, axis: int, fn: Callable[[jax.Array], jax.Array] + ) -> jax.Array: c1 = self.cost_1 if axis == 1 else self.cost_2 c2 = self.cost_2 if axis == 1 else self.cost_1 c2 = fn(c2) if fn is not None else c2 @@ -241,14 +241,14 @@ def can_LRC(self): # noqa: D102 return True def subset( # noqa: D102 - self, src_ixs: Optional[jnp.ndarray], tgt_ixs: Optional[jnp.ndarray], + self, src_ixs: Optional[jax.Array], tgt_ixs: Optional[jax.Array], **kwargs: Any ) -> "LRCGeometry": def subset_fn( - arr: Optional[jnp.ndarray], - ixs: Optional[jnp.ndarray], - ) -> jnp.ndarray: + arr: Optional[jax.Array], + ixs: Optional[jax.Array], + ) -> jax.Array: return arr if arr is None or ixs is None else arr[jnp.atleast_1d(ixs)] return self._mask_subset_helper( @@ -257,15 +257,15 @@ def subset_fn( def mask( # noqa: D102 self, - src_mask: Optional[jnp.ndarray], - tgt_mask: Optional[jnp.ndarray], + src_mask: Optional[jax.Array], + tgt_mask: Optional[jax.Array], mask_value: float = 0., ) -> "LRCGeometry": def mask_fn( - arr: Optional[jnp.ndarray], - mask: Optional[jnp.ndarray], - ) -> Optional[jnp.ndarray]: + arr: Optional[jax.Array], + mask: Optional[jax.Array], + ) -> Optional[jax.Array]: if arr is None or mask is None: return arr return jnp.where(mask[:, None], arr, mask_value) @@ -278,11 +278,11 @@ def mask_fn( def _mask_subset_helper( self, - src_ixs: Optional[jnp.ndarray], - tgt_ixs: Optional[jnp.ndarray], + src_ixs: Optional[jax.Array], + tgt_ixs: Optional[jax.Array], *, - fn: Callable[[Optional[jnp.ndarray], Optional[jnp.ndarray]], - Optional[jnp.ndarray]], + fn: Callable[[Optional[jax.Array], Optional[jax.Array]], + Optional[jax.Array]], propagate_mask: bool, **kwargs: Any, ) -> "LRCGeometry": diff --git a/src/ott/geometry/pointcloud.py b/src/ott/geometry/pointcloud.py index e7f46a020..c5d48a096 100644 --- a/src/ott/geometry/pointcloud.py +++ b/src/ott/geometry/pointcloud.py @@ -56,8 +56,8 @@ class PointCloud(geometry.Geometry): def __init__( self, - x: jnp.ndarray, - y: Optional[jnp.ndarray] = None, + x: jax.Array, + y: Optional[jax.Array] = None, cost_fn: Optional[costs.CostFn] = None, batch_size: Optional[int] = None, scale_cost: Union[bool, int, float, @@ -77,13 +77,13 @@ def __init__( self._scale_cost = "mean" if scale_cost is True else scale_cost @property - def _norm_x(self) -> Union[float, jnp.ndarray]: + def _norm_x(self) -> Union[float, jax.Array]: if self._axis_norm == 0: return self.cost_fn.norm(self.x) return 0. @property - def _norm_y(self) -> Union[float, jnp.ndarray]: + def _norm_y(self) -> Union[float, jax.Array]: if self._axis_norm == 0: return self.cost_fn.norm(self.y) return 0. @@ -98,14 +98,14 @@ def _check_LRC_dim(self): return n * m > (n + m) * d @property - def cost_matrix(self) -> Optional[jnp.ndarray]: # noqa: D102 + def cost_matrix(self) -> Optional[jax.Array]: # noqa: D102 if self.is_online: return None cost_matrix = self._compute_cost_matrix() return cost_matrix * self.inv_scale_cost @property - def kernel_matrix(self) -> Optional[jnp.ndarray]: # noqa: D102 + def kernel_matrix(self) -> Optional[jax.Array]: # noqa: D102 if self.is_online: return None return jnp.exp(-self.cost_matrix / self.epsilon) @@ -183,7 +183,7 @@ def inv_scale_cost(self) -> float: # noqa: D102 ) raise ValueError(f"Scaling {self._scale_cost} not implemented.") - def _compute_cost_matrix(self) -> jnp.ndarray: + def _compute_cost_matrix(self) -> jax.Array: cost_matrix = self.cost_fn.all_pairs_pairwise(self.x, self.y) if self._axis_norm is not None: cost_matrix += self._norm_x[:, jnp.newaxis] + self._norm_y[jnp.newaxis, :] @@ -191,12 +191,12 @@ def _compute_cost_matrix(self) -> jnp.ndarray: def apply_lse_kernel( # noqa: D102 self, - f: jnp.ndarray, - g: jnp.ndarray, + f: jax.Array, + g: jax.Array, eps: float, - vec: Optional[jnp.ndarray] = None, + vec: Optional[jax.Array] = None, axis: int = 0 - ) -> jnp.ndarray: + ) -> jax.Array: def body0(carry, i: int): f, g, eps, vec = carry @@ -278,10 +278,10 @@ def finalize(i: int): def apply_kernel( # noqa: D102 self, - scaling: jnp.ndarray, + scaling: jax.Array, eps: Optional[float] = None, axis: int = 0 - ) -> jnp.ndarray: + ) -> jax.Array: if eps is None: eps = self.epsilon @@ -303,8 +303,8 @@ def apply_kernel( # noqa: D102 ) def transport_from_potentials( # noqa: D102 - self, f: jnp.ndarray, g: jnp.ndarray - ) -> jnp.ndarray: + self, f: jax.Array, g: jax.Array + ) -> jax.Array: if not self.is_online: return super().transport_from_potentials(f, g) transport = jax.vmap( @@ -317,8 +317,8 @@ def transport_from_potentials( # noqa: D102 ) def transport_from_scalings( # noqa: D102 - self, u: jnp.ndarray, v: jnp.ndarray - ) -> jnp.ndarray: + self, u: jax.Array, v: jax.Array + ) -> jax.Array: if not self.is_online: return super().transport_from_scalings(u, v) transport = jax.vmap( @@ -342,11 +342,11 @@ def transport_from_scalings( # noqa: D102 def apply_cost( self, - arr: jnp.ndarray, + arr: jax.Array, axis: int = 0, - fn: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, + fn: Optional[Callable[[jax.Array], jax.Array]] = None, is_linear: bool = False, - ) -> jnp.ndarray: + ) -> jax.Array: """Apply cost matrix to array (vector or matrix). This function applies the geometry's cost matrix, to perform either @@ -356,7 +356,7 @@ def apply_cost( application of fn to each entry of the :attr:`cost_matrix`. Args: - arr: jnp.ndarray [num_a or num_b, batch], vector that will be multiplied + arr: jax.Array [num_a or num_b, batch], vector that will be multiplied by the cost matrix. axis: standard cost matrix if axis=1, transpose if 0. fn: function optionally applied to cost matrix element-wise, before the @@ -367,7 +367,7 @@ def apply_cost( for a heuristic to help determine if a function is linear. Returns: - A jnp.ndarray, [num_b, batch] if axis=0 or [num_a, batch] if axis=1 + A jax.Array, [num_b, batch] if axis=0 or [num_a, batch] if axis=1 """ # switch to efficient computation for the squared euclidean case. if self.is_squared_euclidean and (fn is None or is_linear): @@ -375,9 +375,7 @@ def apply_cost( return self._apply_cost(arr, axis, fn=fn) - def _apply_cost( - self, arr: jnp.ndarray, axis: int = 0, fn=None - ) -> jnp.ndarray: + def _apply_cost(self, arr: jax.Array, axis: int = 0, fn=None) -> jax.Array: """See :meth:`apply_cost`.""" if not self.is_online: return super().apply_cost(arr, axis, fn) @@ -401,24 +399,24 @@ def _apply_cost( def vec_apply_cost( self, - arr: jnp.ndarray, + arr: jax.Array, axis: int = 0, - fn: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None - ) -> jnp.ndarray: + fn: Optional[Callable[[jax.Array], jax.Array]] = None + ) -> jax.Array: """Apply the geometry's cost matrix in a vectorized way. This function can be used when the cost matrix is squared euclidean and ``fn`` is a linear function. Args: - arr: jnp.ndarray [num_a or num_b, p], vector that will be multiplied + arr: jax.Array [num_a or num_b, p], vector that will be multiplied by the cost matrix. axis: standard cost matrix if axis=1, transport if 0. fn: function optionally applied to cost matrix element-wise, before the application. Returns: - A jnp.ndarray, [num_b, p] if axis=0 or [num_a, p] if axis=1 + A jax.Array, [num_b, p] if axis=0 or [num_a, p] if axis=1 """ assert self.is_squared_euclidean, "Cost matrix is not a squared Euclidean." rank = arr.ndim @@ -434,7 +432,7 @@ def vec_apply_cost( applied_cost = fn(applied_cost) return self.inv_scale_cost * applied_cost - def _leading_slice(self, t: jnp.ndarray, i: int) -> jnp.ndarray: + def _leading_slice(self, t: jax.Array, i: int) -> jax.Array: start_indices = [i * self.batch_size] + (t.ndim - 1) * [0] slice_sizes = [self.batch_size] + list(t.shape[1:]) return jax.lax.dynamic_slice(t, start_indices, slice_sizes) @@ -525,18 +523,18 @@ def finalize(i: int): f"Scaling method {summary} does not exist for online mode." ) - def barycenter(self, weights: jnp.ndarray) -> jnp.ndarray: + def barycenter(self, weights: jax.Array) -> jax.Array: """Compute barycenter of points in self.x using weights.""" return self.cost_fn.barycenter(self.x, weights)[0] @classmethod def prepare_divergences( cls, - x: jnp.ndarray, - y: jnp.ndarray, + x: jax.Array, + y: jax.Array, static_b: bool = False, - src_mask: Optional[jnp.ndarray] = None, - tgt_mask: Optional[jnp.ndarray] = None, + src_mask: Optional[jax.Array] = None, + tgt_mask: Optional[jax.Array] = None, **kwargs: Any ) -> Tuple["PointCloud", ...]: """Instantiate the geometries used for a divergence computation.""" @@ -640,14 +638,14 @@ def _sqeucl_to_lr(self, scale: float = 1.0) -> low_rank.LRCGeometry: ) def subset( # noqa: D102 - self, src_ixs: Optional[jnp.ndarray], tgt_ixs: Optional[jnp.ndarray], + self, src_ixs: Optional[jax.Array], tgt_ixs: Optional[jax.Array], **kwargs: Any ) -> "PointCloud": def subset_fn( - arr: Optional[jnp.ndarray], - ixs: Optional[jnp.ndarray], - ) -> jnp.ndarray: + arr: Optional[jax.Array], + ixs: Optional[jax.Array], + ) -> jax.Array: return arr if arr is None or ixs is None else arr[jnp.atleast_1d(ixs)] return self._mask_subset_helper( @@ -656,15 +654,15 @@ def subset_fn( def mask( # noqa: D102 self, - src_mask: Optional[jnp.ndarray], - tgt_mask: Optional[jnp.ndarray], + src_mask: Optional[jax.Array], + tgt_mask: Optional[jax.Array], mask_value: float = 0., ) -> "PointCloud": def mask_fn( - arr: Optional[jnp.ndarray], - mask: Optional[jnp.ndarray], - ) -> Optional[jnp.ndarray]: + arr: Optional[jax.Array], + mask: Optional[jax.Array], + ) -> Optional[jax.Array]: if arr is None or mask is None: return arr return jnp.where(mask[:, None], arr, mask_value) @@ -677,11 +675,11 @@ def mask_fn( def _mask_subset_helper( self, - src_ixs: Optional[jnp.ndarray], - tgt_ixs: Optional[jnp.ndarray], + src_ixs: Optional[jax.Array], + tgt_ixs: Optional[jax.Array], *, - fn: Callable[[Optional[jnp.ndarray], Optional[jnp.ndarray]], - Optional[jnp.ndarray]], + fn: Callable[[Optional[jax.Array], Optional[jax.Array]], + Optional[jax.Array]], propagate_mask: bool, **kwargs: Any, ) -> "PointCloud": @@ -767,18 +765,18 @@ def _apply_cost_xy(x, y, norm_x, norm_y, vec, cost_fn, scale_cost, fn=None): fn(cost) matrix (or transpose) to vector. Args: - x: jnp.ndarray [num_a, d], first pointcloud - y: jnp.ndarray [num_b, d], second pointcloud - norm_x: jnp.ndarray [num_a,], (squared) norm as defined in by cost_fn - norm_y: jnp.ndarray [num_b,], (squared) norm as defined in by cost_fn - vec: jnp.ndarray [num_a,] ([num_b,] if axis=1 from `apply_cost`) vector + x: jax.Array [num_a, d], first pointcloud + y: jax.Array [num_b, d], second pointcloud + norm_x: jax.Array [num_a,], (squared) norm as defined in by cost_fn + norm_y: jax.Array [num_b,], (squared) norm as defined in by cost_fn + vec: jax.Array [num_a,] ([num_b,] if axis=1 from `apply_cost`) vector cost_fn: a CostFn function between two points in dimension d. scale_cost: scaling factor of the cost matrix. fn: function optionally applied to cost matrix element-wise, before the apply. Returns: - A jnp.ndarray corresponding to cost x vector + A jax.Array corresponding to cost x vector """ c = _cost(x, y, norm_x, norm_y, cost_fn, scale_cost) return jnp.dot(c, vec) if fn is None else jnp.dot(fn(c), vec) diff --git a/src/ott/geometry/segment.py b/src/ott/geometry/segment.py index 20a1ee92b..5e2c764c8 100644 --- a/src/ott/geometry/segment.py +++ b/src/ott/geometry/segment.py @@ -21,15 +21,15 @@ def segment_point_cloud( - x: jnp.ndarray, - a: Optional[jnp.ndarray] = None, + x: jax.Array, + a: Optional[jax.Array] = None, num_segments: Optional[int] = None, max_measure_size: Optional[int] = None, - segment_ids: Optional[jnp.ndarray] = None, + segment_ids: Optional[jax.Array] = None, indices_are_sorted: bool = False, num_per_segment: Optional[Tuple[int, ...]] = None, - padding_vector: Optional[jnp.ndarray] = None -) -> Tuple[jnp.ndarray, jnp.ndarray]: + padding_vector: Optional[jax.Array] = None +) -> Tuple[jax.Array, jax.Array]: """Segment and pad as needed the entries of a point cloud. There are two interfaces: @@ -129,21 +129,20 @@ def segment_point_cloud( def _segment_interface( - x: jnp.ndarray, - y: jnp.ndarray, - eval_fn: Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray], - jnp.ndarray], + x: jax.Array, + y: jax.Array, + eval_fn: Callable[[jax.Array, jax.Array, jax.Array, jax.Array], jax.Array], num_segments: Optional[int] = None, max_measure_size: Optional[int] = None, - segment_ids_x: Optional[jnp.ndarray] = None, - segment_ids_y: Optional[jnp.ndarray] = None, + segment_ids_x: Optional[jax.Array] = None, + segment_ids_y: Optional[jax.Array] = None, indices_are_sorted: bool = False, - num_per_segment_x: Optional[jnp.ndarray] = None, - num_per_segment_y: Optional[jnp.ndarray] = None, - weights_x: Optional[jnp.ndarray] = None, - weights_y: Optional[jnp.ndarray] = None, - padding_vector: Optional[jnp.ndarray] = None, -) -> jnp.ndarray: + num_per_segment_x: Optional[jax.Array] = None, + num_per_segment_y: Optional[jax.Array] = None, + weights_x: Optional[jax.Array] = None, + weights_y: Optional[jax.Array] = None, + padding_vector: Optional[jax.Array] = None, +) -> jax.Array: """Wrapper to segment two point clouds and return parallel evaluations. Utility function that segments two point clouds using the approach outlined diff --git a/src/ott/initializers/linear/initializers.py b/src/ott/initializers/linear/initializers.py index f3ba93321..58744cfb0 100644 --- a/src/ott/initializers/linear/initializers.py +++ b/src/ott/initializers/linear/initializers.py @@ -37,7 +37,7 @@ def init_dual_a( ot_prob: linear_problem.LinearProblem, lse_mode: bool, rng: Optional[jax.Array] = None, - ) -> jnp.ndarray: + ) -> jax.Array: """Initialize Sinkhorn potential/scaling f_u. Args: @@ -55,7 +55,7 @@ def init_dual_b( ot_prob: linear_problem.LinearProblem, lse_mode: bool, rng: Optional[jax.Array] = None, - ) -> jnp.ndarray: + ) -> jax.Array: """Initialize Sinkhorn potential/scaling g_v. Args: @@ -70,11 +70,11 @@ def init_dual_b( def __call__( self, ot_prob: linear_problem.LinearProblem, - a: Optional[jnp.ndarray], - b: Optional[jnp.ndarray], + a: Optional[jax.Array], + b: Optional[jax.Array], lse_mode: bool, rng: Optional[jax.Array] = None, - ) -> Tuple[jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array]: """Initialize Sinkhorn potentials/scalings f_u and g_v. Args: @@ -129,7 +129,7 @@ def init_dual_a( # noqa: D102 ot_prob: linear_problem.LinearProblem, lse_mode: bool, rng: Optional[jax.Array] = None, - ) -> jnp.ndarray: + ) -> jax.Array: del rng return jnp.zeros_like(ot_prob.a) if lse_mode else jnp.ones_like(ot_prob.a) @@ -138,7 +138,7 @@ def init_dual_b( # noqa: D102 ot_prob: linear_problem.LinearProblem, lse_mode: bool, rng: Optional[jax.Array] = None, - ) -> jnp.ndarray: + ) -> jax.Array: del rng return jnp.zeros_like(ot_prob.b) if lse_mode else jnp.ones_like(ot_prob.b) @@ -159,7 +159,7 @@ def init_dual_a( # noqa: D102 ot_prob: linear_problem.LinearProblem, lse_mode: bool, rng: Optional[jax.Array] = None, - ) -> jnp.ndarray: + ) -> jax.Array: # import Gaussian here due to circular imports from ott.tools.gaussian_mixture import gaussian @@ -207,8 +207,8 @@ def __init__( self.vectorized_update = vectorized_update def _init_sorting_dual( - self, modified_cost: jnp.ndarray, init_f: jnp.ndarray - ) -> jnp.ndarray: + self, modified_cost: jax.Array, init_f: jax.Array + ) -> jax.Array: """Run DualSort algorithm. Args: @@ -221,15 +221,15 @@ def _init_sorting_dual( """ def body_fn( - state: Tuple[jnp.ndarray, float, int] - ) -> Tuple[jnp.ndarray, float, int]: + state: Tuple[jax.Array, float, int] + ) -> Tuple[jax.Array, float, int]: prev_f, _, it = state new_f = fn(prev_f, modified_cost) diff = jnp.sum((new_f - prev_f) ** 2) it += 1 return new_f, diff, it - def cond_fn(state: Tuple[jnp.ndarray, float, int]) -> bool: + def cond_fn(state: Tuple[jax.Array, float, int]) -> bool: _, diff, it = state return jnp.logical_and(diff > self.tolerance, it < self.max_iter) @@ -246,8 +246,8 @@ def init_dual_a( ot_prob: linear_problem.LinearProblem, lse_mode: bool, rng: Optional[jax.Array] = None, - init_f: Optional[jnp.ndarray] = None, - ) -> jnp.ndarray: + init_f: Optional[jax.Array] = None, + ) -> jax.Array: """Apply DualSort algorithm. Args: @@ -325,7 +325,7 @@ def init_dual_a( # noqa: D102 ot_prob: linear_problem.LinearProblem, lse_mode: bool, rng: Optional[jax.Array] = None, - ) -> jnp.ndarray: + ) -> jax.Array: from ott.solvers import linear assert isinstance( @@ -373,9 +373,7 @@ def tree_flatten(self) -> Tuple[Sequence[Any], Dict[str, Any]]: # noqa: D102 }) -def _vectorized_update( - f: jnp.ndarray, modified_cost: jnp.ndarray -) -> jnp.ndarray: +def _vectorized_update(f: jax.Array, modified_cost: jax.Array) -> jax.Array: """Inner loop DualSort Update. Args: @@ -388,9 +386,7 @@ def _vectorized_update( return jnp.min(modified_cost + f[None, :], axis=1) -def _coordinate_update( - f: jnp.ndarray, modified_cost: jnp.ndarray -) -> jnp.ndarray: +def _coordinate_update(f: jax.Array, modified_cost: jax.Array) -> jax.Array: """Coordinate-wise updates within inner loop. Args: @@ -401,7 +397,7 @@ def _coordinate_update( updated potential vector, f. """ - def body_fn(i: int, f: jnp.ndarray) -> jnp.ndarray: + def body_fn(i: int, f: jax.Array) -> jax.Array: new_f = jnp.min(modified_cost[i, :] + f) return f.at[i].set(new_f) diff --git a/src/ott/initializers/linear/initializers_lr.py b/src/ott/initializers/linear/initializers_lr.py index 5c2302156..9eb8e1231 100644 --- a/src/ott/initializers/linear/initializers_lr.py +++ b/src/ott/initializers/linear/initializers_lr.py @@ -69,9 +69,9 @@ def init_q( ot_prob: Problem_t, rng: jax.Array, *, - init_g: jnp.ndarray, + init_g: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: """Initialize the low-rank factor :math:`Q`. Args: @@ -90,9 +90,9 @@ def init_r( ot_prob: Problem_t, rng: jax.Array, *, - init_g: jnp.ndarray, + init_g: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: """Initialize the low-rank factor :math:`R`. Args: @@ -111,7 +111,7 @@ def init_g( ot_prob: Problem_t, rng: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: """Initialize the low-rank factor :math:`g`. Args: @@ -165,13 +165,13 @@ def from_solver( def __call__( self, ot_prob: Problem_t, - q: Optional[jnp.ndarray] = None, - r: Optional[jnp.ndarray] = None, - g: Optional[jnp.ndarray] = None, + q: Optional[jax.Array] = None, + r: Optional[jax.Array] = None, + g: Optional[jax.Array] = None, *, rng: Optional[jax.Array] = None, **kwargs: Any - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array, jax.Array]: """Initialize the factors :math:`Q`, :math:`R` and :math:`g`. Args: @@ -234,9 +234,9 @@ def init_q( # noqa: D102 ot_prob: Problem_t, rng: jax.Array, *, - init_g: jnp.ndarray, + init_g: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: del kwargs, init_g a = ot_prob.a init_q = jnp.abs(jax.random.normal(rng, (a.shape[0], self.rank))) @@ -247,9 +247,9 @@ def init_r( # noqa: D102 ot_prob: Problem_t, rng: jax.Array, *, - init_g: jnp.ndarray, + init_g: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: del kwargs, init_g b = ot_prob.b init_r = jnp.abs(jax.random.normal(rng, (b.shape[0], self.rank))) @@ -260,7 +260,7 @@ def init_g( # noqa: D102 ot_prob: Problem_t, rng: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: del kwargs init_g = jnp.abs(jax.random.uniform(rng, (self.rank,))) + 1. return init_g / jnp.sum(init_g) @@ -278,10 +278,10 @@ class Rank2Initializer(LRInitializer): def _compute_factor( self, ot_prob: Problem_t, - init_g: jnp.ndarray, + init_g: jax.Array, *, which: Literal["q", "r"], - ) -> jnp.ndarray: + ) -> jax.Array: a, b = ot_prob.a, ot_prob.b marginal = a if which == "q" else b n, r = marginal.shape[0], self.rank @@ -307,9 +307,9 @@ def init_q( # noqa: D102 ot_prob: Problem_t, rng: jax.Array, *, - init_g: jnp.ndarray, + init_g: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: del rng, kwargs return self._compute_factor(ot_prob, init_g, which="q") @@ -318,9 +318,9 @@ def init_r( # noqa: D102 ot_prob: Problem_t, rng: jax.Array, *, - init_g: jnp.ndarray, + init_g: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: del rng, kwargs return self._compute_factor(ot_prob, init_g, which="r") @@ -329,7 +329,7 @@ def init_g( # noqa: D102 ot_prob: Problem_t, rng: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: del rng, kwargs return jnp.ones((self.rank,)) / self.rank @@ -364,7 +364,7 @@ def __init__( self._sinkhorn_kwargs = {} if sinkhorn_kwargs is None else sinkhorn_kwargs @staticmethod - def _extract_array(geom: geometry.Geometry, *, first: bool) -> jnp.ndarray: + def _extract_array(geom: geometry.Geometry, *, first: bool) -> jax.Array: if isinstance(geom, pointcloud.PointCloud): return geom.x if first else geom.y if isinstance(geom, low_rank.LRCGeometry): @@ -378,10 +378,10 @@ def _compute_factor( ot_prob: Problem_t, rng: jax.Array, *, - init_g: jnp.ndarray, + init_g: jax.Array, which: Literal["q", "r"], **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: from ott.problems.linear import linear_problem from ott.problems.quadratic import quadratic_problem from ott.solvers.linear import sinkhorn @@ -420,9 +420,9 @@ def init_q( # noqa: D102 ot_prob: Problem_t, rng: jax.Array, *, - init_g: jnp.ndarray, + init_g: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: return self._compute_factor( ot_prob, rng, init_g=init_g, which="q", **kwargs ) @@ -432,9 +432,9 @@ def init_r( # noqa: D102 ot_prob: Problem_t, rng: jax.Array, *, - init_g: jnp.ndarray, + init_g: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: return self._compute_factor( ot_prob, rng, init_g=init_g, which="r", **kwargs ) @@ -444,7 +444,7 @@ def init_g( # noqa: D102 ot_prob: Problem_t, rng: jax.Array, **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: del rng, kwargs return jnp.ones((self.rank,)) / self.rank @@ -498,14 +498,14 @@ def __init__( class Constants(NamedTuple): # noqa: D106 solver: "sinkhorn.Sinkhorn" geom: geometry.Geometry # (n, n) - marginal: jnp.ndarray # (n,) - g: jnp.ndarray # (r,) + marginal: jax.Array # (n,) + g: jax.Array # (r,) gamma: float threshold: float class State(NamedTuple): # noqa: D106 - factor: jnp.ndarray - criterions: jnp.ndarray + factor: jax.Array + criterions: jax.Array crossed_threshold: bool def _compute_factor( @@ -513,10 +513,10 @@ def _compute_factor( ot_prob: Problem_t, rng: jax.Array, *, - init_g: jnp.ndarray, + init_g: jax.Array, which: Literal["q", "r"], **kwargs: Any, - ) -> jnp.ndarray: + ) -> jax.Array: from ott.problems.linear import linear_problem from ott.problems.quadratic import quadratic_problem from ott.solvers.linear import sinkhorn diff --git a/src/ott/initializers/quadratic/initializers.py b/src/ott/initializers/quadratic/initializers.py index 795e81ccc..323570770 100644 --- a/src/ott/initializers/quadratic/initializers.py +++ b/src/ott/initializers/quadratic/initializers.py @@ -125,9 +125,7 @@ class QuadraticInitializer(BaseQuadraticInitializer): defaults to the product coupling :math:`ab^T`. """ - def __init__( - self, init_coupling: Optional[jnp.ndarray] = None, **kwargs: Any - ): + def __init__(self, init_coupling: Optional[jax.Array] = None, **kwargs: Any): super().__init__(**kwargs) self.init_coupling = init_coupling diff --git a/src/ott/math/fixed_point_loop.py b/src/ott/math/fixed_point_loop.py index 9034eba62..5c8b7b94d 100644 --- a/src/ott/math/fixed_point_loop.py +++ b/src/ott/math/fixed_point_loop.py @@ -179,7 +179,7 @@ def fixpoint_iter_bwd( # The tree may contain some python floats g_constants = jax.tree_util.tree_map( lambda x: jnp.zeros_like(x, dtype=x.dtype) - if isinstance(x, (np.ndarray, jnp.ndarray)) else 0, constants + if isinstance(x, (np.ndarray, jax.Array)) else 0, constants ) def bwd_cond_fn(iteration_g_gconst): diff --git a/src/ott/math/matrix_square_root.py b/src/ott/math/matrix_square_root.py index 4a0177780..5089f14a0 100644 --- a/src/ott/math/matrix_square_root.py +++ b/src/ott/math/matrix_square_root.py @@ -25,13 +25,13 @@ @functools.partial(jax.custom_vjp, nondiff_argnums=(1, 2, 3, 4, 5)) def sqrtm( - x: jnp.ndarray, + x: jax.Array, threshold: float = 1e-6, min_iterations: int = 0, inner_iterations: int = 10, max_iterations: int = 1000, regularization: float = 1e-6 -) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: +) -> Tuple[jax.Array, jax.Array, jax.Array]: """Higham algorithm to compute matrix square root of p.d. matrix. See :cite:`higham:97`, eq. 2.6b @@ -118,10 +118,10 @@ def new_err(x, norm_x, y): def solve_sylvester_bartels_stewart( - a: jnp.ndarray, - b: jnp.ndarray, - c: jnp.ndarray, -) -> jnp.ndarray: + a: jax.Array, + b: jax.Array, + c: jax.Array, +) -> jax.Array: """Solve the real Sylvester equation AX - XB = C using Bartels-Stewart.""" # See https://nhigham.com/2020/09/01/what-is-the-sylvester-equation/ for # discussion of the algorithm (but note that in the derivation, the sign on @@ -153,14 +153,13 @@ def solve_sylvester_bartels_stewart( def sqrtm_fwd( - x: jnp.ndarray, + x: jax.Array, threshold: float, min_iterations: int, inner_iterations: int, max_iterations: int, regularization: float, -) -> Tuple[Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray], Tuple[jnp.ndarray, - jnp.ndarray]]: +) -> Tuple[Tuple[jax.Array, jax.Array, jax.Array], Tuple[jax.Array, jax.Array]]: """Forward pass of custom VJP.""" sqrt_x, inv_sqrt_x, errors = sqrtm( x=x, @@ -179,9 +178,9 @@ def sqrtm_bwd( inner_iterations: int, max_iterations: int, regularization: float, - residual: Tuple[jnp.ndarray, jnp.ndarray], - cotangent: Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray], -) -> Tuple[jnp.ndarray]: + residual: Tuple[jax.Array, jax.Array], + cotangent: Tuple[jax.Array, jax.Array, jax.Array], +) -> Tuple[jax.Array]: """Compute the derivative by solving a Sylvester equation.""" del threshold, min_iterations, inner_iterations, \ max_iterations, regularization @@ -237,13 +236,13 @@ def sqrtm_bwd( @functools.partial(jax.custom_vjp, nondiff_argnums=(1, 2, 3, 4, 5)) def sqrtm_only( # noqa: D103 - x: jnp.ndarray, + x: jax.Array, threshold: float = 1e-6, min_iterations: int = 0, inner_iterations: int = 10, max_iterations: int = 1000, regularization: float = 1e-6 -) -> jnp.ndarray: +) -> jax.Array: return sqrtm( x, threshold, min_iterations, inner_iterations, max_iterations, regularization @@ -251,9 +250,9 @@ def sqrtm_only( # noqa: D103 def sqrtm_only_fwd( # noqa: D103 - x: jnp.ndarray, threshold: float, min_iterations: int, + x: jax.Array, threshold: float, min_iterations: int, inner_iterations: int, max_iterations: int, regularization: float -) -> Tuple[jnp.ndarray, jnp.ndarray]: +) -> Tuple[jax.Array, jax.Array]: sqrt_x = sqrtm( x, threshold, min_iterations, inner_iterations, max_iterations, regularization @@ -263,9 +262,9 @@ def sqrtm_only_fwd( # noqa: D103 def sqrtm_only_bwd( # noqa: D103 threshold: float, min_iterations: int, inner_iterations: int, - max_iterations: int, regularization: float, sqrt_x: jnp.ndarray, - cotangent: jnp.ndarray -) -> Tuple[jnp.ndarray]: + max_iterations: int, regularization: float, sqrt_x: jax.Array, + cotangent: jax.Array +) -> Tuple[jax.Array]: del threshold, min_iterations, inner_iterations, \ max_iterations, regularization vjp = jnp.swapaxes( @@ -283,13 +282,13 @@ def sqrtm_only_bwd( # noqa: D103 @functools.partial(jax.custom_vjp, nondiff_argnums=(1, 2, 3, 4, 5)) def inv_sqrtm_only( # noqa: D103 - x: jnp.ndarray, + x: jax.Array, threshold: float = 1e-6, min_iterations: int = 0, inner_iterations: int = 10, max_iterations: int = 1000, regularization: float = 1e-6 -) -> jnp.ndarray: +) -> jax.Array: return sqrtm( x, threshold, min_iterations, inner_iterations, max_iterations, regularization @@ -297,13 +296,13 @@ def inv_sqrtm_only( # noqa: D103 def inv_sqrtm_only_fwd( # noqa: D103 - x: jnp.ndarray, + x: jax.Array, threshold: float, min_iterations: int, inner_iterations: int, max_iterations: int, regularization: float, -) -> Tuple[jnp.ndarray, jnp.ndarray]: +) -> Tuple[jax.Array, jax.Array]: inv_sqrt_x = sqrtm( x, threshold, min_iterations, inner_iterations, max_iterations, regularization @@ -313,9 +312,9 @@ def inv_sqrtm_only_fwd( # noqa: D103 def inv_sqrtm_only_bwd( # noqa: D103 threshold: float, min_iterations: int, inner_iterations: int, - max_iterations: int, regularization: float, residual: jnp.ndarray, - cotangent: jnp.ndarray -) -> Tuple[jnp.ndarray]: + max_iterations: int, regularization: float, residual: jax.Array, + cotangent: jax.Array +) -> Tuple[jax.Array]: del threshold, min_iterations, inner_iterations, \ max_iterations, regularization diff --git a/src/ott/math/unbalanced_functions.py b/src/ott/math/unbalanced_functions.py index fc1aca9f3..2d7baebb7 100644 --- a/src/ott/math/unbalanced_functions.py +++ b/src/ott/math/unbalanced_functions.py @@ -13,31 +13,32 @@ # limitations under the License. from typing import Callable +import jax import jax.numpy as jnp -def phi_star(h: jnp.ndarray, rho: float) -> jnp.ndarray: +def phi_star(h: jax.Array, rho: float) -> jax.Array: """Legendre transform of KL, :cite:`sejourne:19`, p. 9.""" return rho * (jnp.exp(h / rho) - 1) -def derivative_phi_star(f: jnp.ndarray, rho: float) -> jnp.ndarray: +def derivative_phi_star(f: jax.Array, rho: float) -> jax.Array: """Derivative of Legendre transform of phi_starKL, see phi_star.""" # TODO(cuturi): use jax.grad directly. return jnp.exp(f / rho) def grad_of_marginal_fit( - c: jnp.ndarray, h: jnp.ndarray, tau: float, epsilon: float -) -> jnp.ndarray: + c: jax.Array, h: jax.Array, tau: float, epsilon: float +) -> jax.Array: """Compute grad of terms linked to marginals in objective. Computes gradient w.r.t. f ( or g) of terms in :cite:`sejourne:19`, left-hand-side of eq. 15 terms involving phi_star). Args: - c: jnp.ndarray, first target marginal (either a or b in practice) - h: jnp.ndarray, potential (either f or g in practice) + c: jax.Array, first target marginal (either a or b in practice) + h: jax.Array, potential (either f or g in practice) tau: float, strength (in ]0,1]) of regularizer w.r.t. marginal epsilon: regularization @@ -50,14 +51,14 @@ def grad_of_marginal_fit( return jnp.where(c > 0, c * derivative_phi_star(-h, r), 0.0) -def second_derivative_phi_star(f: jnp.ndarray, rho: float) -> jnp.ndarray: +def second_derivative_phi_star(f: jax.Array, rho: float) -> jax.Array: """Second Derivative of Legendre transform of KL, see phi_star.""" return jnp.exp(f / rho) / rho def diag_jacobian_of_marginal_fit( - c: jnp.ndarray, h: jnp.ndarray, tau: float, epsilon: float, - derivative: Callable[[jnp.ndarray, float], jnp.ndarray] + c: jax.Array, h: jax.Array, tau: float, epsilon: float, + derivative: Callable[[jax.Array, float], jax.Array] ): """Compute grad of terms linked to marginals in objective. @@ -65,8 +66,8 @@ def diag_jacobian_of_marginal_fit( left-hand-side of eq. 32 (terms involving phi_star) Args: - c: jnp.ndarray, first target marginal (either a or b in practice) - h: jnp.ndarray, potential (either f or g in practice) + c: jax.Array, first target marginal (either a or b in practice) + h: jax.Array, potential (either f or g in practice) tau: float, strength (in ]0,1]) of regularizer w.r.t. marginal epsilon: regularization derivative: Callable diff --git a/src/ott/math/utils.py b/src/ott/math/utils.py index 8e7ea90ee..188707c10 100644 --- a/src/ott/math/utils.py +++ b/src/ott/math/utils.py @@ -34,10 +34,10 @@ def safe_log( # noqa: D103 - x: jnp.ndarray, + x: jax.Array, *, eps: Optional[float] = None -) -> jnp.ndarray: +) -> jax.Array: if eps is None: eps = jnp.finfo(x.dtype).tiny return jnp.where(x > 0., jnp.log(x), jnp.log(eps)) @@ -46,11 +46,11 @@ def safe_log( # noqa: D103 @functools.partial(jax.custom_jvp, nondiff_argnums=[1, 2, 3]) @functools.partial(jax.jit, static_argnames=("ord", "axis", "keepdims")) def norm( - x: jnp.ndarray, + x: jax.Array, ord: Union[int, str, None] = None, axis: Union[None, Sequence[int], int] = None, keepdims: bool = False -) -> jnp.ndarray: +) -> jax.Array: """Computes order ord norm of vector, using `jnp.linalg` in forward pass. Evaluations of distances between a vector and itself using translation @@ -105,18 +105,18 @@ def norm_jvp(ord, axis, keepdims, primals, tangents): # TODO(michalk8): add axis argument -def kl(p: jnp.ndarray, q: jnp.ndarray) -> float: +def kl(p: jax.Array, q: jax.Array) -> float: """Kullback-Leibler divergence.""" return jnp.vdot(p, (safe_log(p) - safe_log(q))) -def gen_kl(p: jnp.ndarray, q: jnp.ndarray) -> float: +def gen_kl(p: jax.Array, q: jax.Array) -> float: """Generalized Kullback-Leibler divergence.""" return jnp.vdot(p, (safe_log(p) - safe_log(q))) + jnp.sum(q) - jnp.sum(p) # TODO(michalk8): add axis argument -def gen_js(p: jnp.ndarray, q: jnp.ndarray, c: float = 0.5) -> float: +def gen_js(p: jax.Array, q: jax.Array, c: float = 0.5) -> float: """Jensen-Shannon divergence.""" return c * (gen_kl(p, q) + gen_kl(q, p)) @@ -176,8 +176,8 @@ def logsumexp_jvp(axis, keepdims, return_sign, primals, tangents): @functools.partial(jax.custom_vjp, nondiff_argnums=(2,)) def softmin( - x: jnp.ndarray, gamma: float, axis: Optional[int] = None -) -> jnp.ndarray: + x: jax.Array, gamma: float, axis: Optional[int] = None +) -> jax.Array: r"""Soft-min operator. Args: @@ -205,8 +205,8 @@ def softmin( @functools.partial(jax.vmap, in_axes=[0, 0, None]) def barycentric_projection( - matrix: jnp.ndarray, y: jnp.ndarray, cost_fn: "costs.CostFn" -) -> jnp.ndarray: + matrix: jax.Array, y: jax.Array, cost_fn: "costs.CostFn" +) -> jax.Array: """Compute the barycentric projection of a matrix. Args: diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index acceb36c1..0ebfc77a0 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -26,7 +26,7 @@ class ConditionalDataLoader: #TODO(@MUCDK) uncomment, resolve installation issu # self.conditions = dataloaders.keys() # self.p = p - #def __next__(self) -> jnp.ndarray: + #def __next__(self) -> jax.Array: # self.rng, rng = jax.random.split(self.rng, 2) # condition = jax.random.choice(rng, self.conditions, p=self.p) # return next(self.dataloaders[condition]) diff --git a/src/ott/neural/models/conjugate_solvers.py b/src/ott/neural/models/conjugate_solvers.py index 0758cf1ad..4d3d8eea0 100644 --- a/src/ott/neural/models/conjugate_solvers.py +++ b/src/ott/neural/models/conjugate_solvers.py @@ -14,6 +14,7 @@ import abc from typing import Callable, Literal, NamedTuple, Optional +import jax import jax.numpy as jnp from jaxopt import LBFGS @@ -36,7 +37,7 @@ class ConjugateResults(NamedTuple): num_iter: the number of iterations taken by the solver """ val: float - grad: jnp.ndarray + grad: jax.Array num_iter: int @@ -50,9 +51,9 @@ class FenchelConjugateSolver(abc.ABC): @abc.abstractmethod def solve( self, - f: Callable[[jnp.ndarray], jnp.ndarray], - y: jnp.ndarray, - x_init: Optional[jnp.ndarray] = None + f: Callable[[jax.Array], jax.Array], + y: jax.Array, + x_init: Optional[jax.Array] = None ) -> ConjugateResults: """Solve for the conjugate. @@ -90,8 +91,8 @@ class FenchelConjugateLBFGS(FenchelConjugateSolver): def solve( # noqa: D102 self, - f: Callable[[jnp.ndarray], jnp.ndarray], - y: jnp.ndarray, + f: Callable[[jax.Array], jax.Array], + y: jax.Array, x_init: Optional[jnp.array] = None ) -> ConjugateResults: assert y.ndim == 1, y.ndim diff --git a/src/ott/neural/models/layers.py b/src/ott/neural/models/layers.py index dfd222c60..0eac7e626 100644 --- a/src/ott/neural/models/layers.py +++ b/src/ott/neural/models/layers.py @@ -40,9 +40,9 @@ class PositiveDense(nn.Module): bias_init: initializer function for the bias. """ dim_hidden: int - rectifier_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.softplus - inv_rectifier_fn: Callable[[jnp.ndarray], - jnp.ndarray] = lambda x: jnp.log(jnp.exp(x) - 1) + rectifier_fn: Callable[[jax.Array], jax.Array] = nn.softplus + inv_rectifier_fn: Callable[[jax.Array], + jax.Array] = lambda x: jnp.log(jnp.exp(x) - 1) use_bias: bool = True dtype: Any = jnp.float32 precision: Any = None @@ -51,7 +51,7 @@ class PositiveDense(nn.Module): bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = nn.initializers.zeros @nn.compact - def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray: + def __call__(self, inputs: jax.Array) -> jax.Array: """Applies a linear transformation to inputs along the last dimension. Args: @@ -99,7 +99,7 @@ class PosDefPotentials(nn.Module): bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = nn.initializers.zeros @nn.compact - def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray: + def __call__(self, inputs: jax.Array) -> jax.Array: """Apply a few quadratic forms. Args: diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 853a1d69e..5ec8fb292 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -62,9 +62,9 @@ class ICNN(neuraldual.BaseW2NeuralDual): dim_hidden: Sequence[int] init_std: float = 1e-2 init_fn: Callable = jax.nn.initializers.normal - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.relu + act_fn: Callable[[jax.Array], jax.Array] = nn.relu pos_weights: bool = True - gaussian_map_samples: Optional[Tuple[jnp.ndarray, jnp.ndarray]] = None + gaussian_map_samples: Optional[Tuple[jax.Array, jax.Array]] = None @property def is_potential(self) -> bool: # noqa: D102 @@ -146,8 +146,8 @@ def setup(self) -> None: # noqa: D102 @staticmethod def _compute_gaussian_map_params( - samples: Tuple[jnp.ndarray, jnp.ndarray] - ) -> Tuple[jnp.ndarray, jnp.ndarray]: + samples: Tuple[jax.Array, jax.Array] + ) -> Tuple[jax.Array, jax.Array]: from ott.tools.gaussian_mixture import gaussian source, target = samples g_s = gaussian.Gaussian.from_samples(source) @@ -160,13 +160,13 @@ def _compute_gaussian_map_params( @staticmethod def _compute_identity_map_params( input_dim: int - ) -> Tuple[jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array]: A = jnp.eye(input_dim).reshape((1, input_dim, input_dim)) b = jnp.zeros((1, input_dim)) return A, b @nn.compact - def __call__(self, x: jnp.ndarray) -> float: # noqa: D102 + def __call__(self, x: jax.Array) -> float: # noqa: D102 z = self.act_fn(self.w_xs[0](x)) for i in range(self.num_hidden): z = jnp.add(self.w_zs[i](z), self.w_xs[i + 1](x)) @@ -189,10 +189,10 @@ class MLP(neuraldual.BaseW2NeuralDual): dim_hidden: Sequence[int] is_potential: bool = True - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.leaky_relu + act_fn: Callable[[jax.Array], jax.Array] = nn.leaky_relu @nn.compact - def __call__(self, x: jnp.ndarray) -> jnp.ndarray: # noqa: D102 + def __call__(self, x: jax.Array) -> jax.Array: # noqa: D102 squeeze = x.ndim == 1 if squeeze: x = jnp.expand_dims(x, 0) @@ -289,8 +289,8 @@ def __init__( self.update_impl = self._get_update_fn() def update( - self, state: train_state.TrainState, a: jnp.ndarray, b: jnp.ndarray - ) -> Tuple[jnp.ndarray, jnp.ndarray, train_state.TrainState]: + self, state: train_state.TrainState, a: jax.Array, b: jax.Array + ) -> Tuple[jax.Array, jax.Array, train_state.TrainState]: r"""Update the meta model with the dual objective. The goal is for the model to match the optimal duals, i.e., @@ -329,7 +329,7 @@ def init_dual_a( # noqa: D102 ot_prob: "linear_problem.LinearProblem", lse_mode: bool, rng: Optional[jax.Array] = None, - ) -> jnp.ndarray: + ) -> jax.Array: del rng # Detect if the problem is batched. assert ot_prob.a.ndim in (1, 2) @@ -382,9 +382,9 @@ def update(state, a, b): return update def _compute_f( - self, a: jnp.ndarray, b: jnp.ndarray, - params: frozen_dict.FrozenDict[str, jnp.ndarray] - ) -> jnp.ndarray: + self, a: jax.Array, b: jax.Array, + params: frozen_dict.FrozenDict[str, jax.Array] + ) -> jax.Array: r"""Predict the optimal :math:`f` potential. Args: @@ -427,7 +427,7 @@ def __call__( x: jax.Array, condition: Optional[jax.Array] = None, keys_model: Optional[jax.Array] = None - ) -> jnp.ndarray: # noqa: D102): + ) -> jax.Array: # noqa: D102): pass @@ -439,7 +439,7 @@ class NeuralVectorField(BaseNeuralVectorField): t_embed_dim: Optional[int] = None joint_hidden_dim: Optional[int] = None num_layers_per_block: int = 3 - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu + act_fn: Callable[[jax.Array], jax.Array] = nn.silu n_frequencies: int = 128 def time_encoder(self, t: jax.Array) -> jnp.array: @@ -554,12 +554,12 @@ class Rescaling_MLP(nn.Module): hidden_dim: int cond_dim: int is_potential: bool = False - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.selu + act_fn: Callable[[jax.Array], jax.Array] = nn.selu @nn.compact def __call__( - self, x: jnp.ndarray, condition: Optional[jax.Array] - ) -> jnp.ndarray: # noqa: D102 + self, x: jax.Array, condition: Optional[jax.Array] + ) -> jax.Array: # noqa: D102 x = Block( dim=self.latent_embed_dim, out_dim=self.latent_embed_dim, diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 66d3ecbef..69f510d81 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -81,10 +81,10 @@ def __init__(*args, **kwargs): def _resample_data( self, key: jax.random.KeyArray, - tmat: jnp.ndarray, - source_arrays: Tuple[jnp.ndarray, ...], - target_arrays: Tuple[jnp.ndarray, ...], - ) -> Tuple[jnp.ndarray, ...]: + tmat: jax.Array, + source_arrays: Tuple[jax.Array, ...], + target_arrays: Tuple[jax.Array, ...], + ) -> Tuple[jax.Array, ...]: """Resample a batch according to coupling `tmat`.""" tmat_flattened = tmat.flatten() indices = random.choice( @@ -101,10 +101,10 @@ def _resample_data( def _sample_conditional_indices_from_tmap( self, key: jax.random.PRNGKeyArray, - tmat: jnp.ndarray, - k_samples_per_x: Union[int, jnp.ndarray], - source_arrays: Tuple[jnp.ndarray, ...], - target_arrays: Tuple[jnp.ndarray, ...], + tmat: jax.Array, + k_samples_per_x: Union[int, jax.Array], + source_arrays: Tuple[jax.Array, ...], + target_arrays: Tuple[jax.Array, ...], *, source_is_balanced: bool, ) -> Tuple[jnp.array, jnp.array]: @@ -155,7 +155,7 @@ def _get_sinkhorn_match_fn( def match_pairs( x: jax.Array, y: jax.Array - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]: geom = pointcloud.PointCloud( x, y, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn ) @@ -165,7 +165,7 @@ def match_pairs( def match_pairs_filtered( x_lin: jax.Array, x_quad: jax.Array, y_lin: jax.Array, y_quad: jax.Array - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]: geom = pointcloud.PointCloud( x_lin, y_lin, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn ) @@ -208,9 +208,9 @@ def _get_gromov_match_fn( def match_pairs( x_lin: Optional[jax.Array], - x_quad: Tuple[jnp.ndarray, jnp.ndarray], + x_quad: Tuple[jax.Array, jax.Array], y_lin: Optional[jax.Array], - y_quad: Tuple[jnp.ndarray, jnp.ndarray], + y_quad: Tuple[jax.Array, jax.Array], ) -> Tuple[jnp.array, jnp.array]: geom_xx = pointcloud.PointCloud( x=x_quad, y=x_quad, cost_fn=x_cost_fn, scale_cost=x_scale_cost @@ -288,13 +288,13 @@ def _get_compute_unbalanced_marginals( scale_cost: Union[bool, int, float, Literal["mean", "max_cost", "median"]] = "mean", sinkhorn_kwargs: Dict[str, Any] = MappingProxyType({}), - ) -> Tuple[jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array]: """Compute the unbalanced source and target marginals for a batch.""" @jax.jit def compute_unbalanced_marginals( - batch_source: jnp.ndarray, batch_target: jnp.ndarray - ) -> Tuple[jnp.ndarray, jnp.ndarray]: + batch_source: jax.Array, batch_target: jax.Array + ) -> Tuple[jax.Array, jax.Array]: geom = PointCloud( batch_source, batch_target, @@ -312,9 +312,9 @@ def compute_unbalanced_marginals( def _resample_unbalanced( self, key: jax.random.KeyArray, - batch: Tuple[jnp.ndarray, ...], - marginals: jnp.ndarray, - ) -> Tuple[jnp.ndarray, ...]: + batch: Tuple[jax.Array, ...], + marginals: jax.Array, + ) -> Tuple[jax.Array, ...]: """Resample a batch based upon marginals.""" indices = jax.random.choice( key, a=len(marginals), p=jnp.squeeze(marginals), shape=[len(marginals)] @@ -343,13 +343,12 @@ def _setup(self, source_dim: int, target_dim: int, cond_dim: int): def _get_step_fn(self) -> Callable: # type:ignore[type-arg] def loss_a_fn( - params_eta: Optional[jnp.ndarray], - apply_fn_eta: Callable[[Dict[str, jnp.ndarray], jnp.ndarray], - jnp.ndarray], - x: jnp.ndarray, - a: jnp.ndarray, + params_eta: Optional[jax.Array], + apply_fn_eta: Callable[[Dict[str, jax.Array], jax.Array], jax.Array], + x: jax.Array, + a: jax.Array, expectation_reweighting: float, - ) -> Tuple[float, jnp.ndarray]: + ) -> Tuple[float, jax.Array]: eta_predictions = apply_fn_eta({"params": params_eta}, x) return ( optax.l2_loss(eta_predictions[:, 0], a).mean() + @@ -358,13 +357,12 @@ def loss_a_fn( ) def loss_b_fn( - params_xi: Optional[jnp.ndarray], - apply_fn_xi: Callable[[Dict[str, jnp.ndarray], jnp.ndarray], - jnp.ndarray], - x: jnp.ndarray, - b: jnp.ndarray, + params_xi: Optional[jax.Array], + apply_fn_xi: Callable[[Dict[str, jax.Array], jax.Array], jax.Array], + x: jax.Array, + b: jax.Array, expectation_reweighting: float, - ) -> Tuple[float, jnp.ndarray]: + ) -> Tuple[float, jax.Array]: xi_predictions = apply_fn_xi({"params": params_xi}, x) return ( optax.l2_loss(xi_predictions[:, 0], b).mean() + @@ -374,11 +372,11 @@ def loss_b_fn( @jax.jit def step_fn( - source: jnp.ndarray, - target: jnp.ndarray, - condition: Optional[jnp.ndarray], - a: jnp.ndarray, - b: jnp.ndarray, + source: jax.Array, + target: jax.Array, + condition: Optional[jax.Array], + a: jax.Array, + b: jax.Array, state_eta: Optional[train_state.TrainState] = None, state_xi: Optional[train_state.TrainState] = None, *, diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py index 19c3d2f67..6552048fb 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/solvers/flows.py @@ -141,16 +141,24 @@ def compute_sigma_t(self, t: jax.Array): class BaseTimeSampler(abc.ABC): - """Base class for time samplers.""" + """Base class for time samplers. + + Args: + low: Lower bound of the distribution to sample from. + high: Upper bound of the distribution to sample from . + """ + + def __init__(self, low: float, high: float) -> None: + self.low = low + self.high = high @abc.abstractmethod - def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + def __call__(self, rng: jax.Array, num_samples: int) -> jax.Array: """Generate `num_samples` samples of the time `math`:t:. Args: rng: Random number generator. num_samples: Number of samples to generate. - """ pass @@ -163,11 +171,7 @@ class UniformSampler(BaseTimeSampler): high: Upper bound of the uniform distribution. """ - def __init__(self, low: float = 0.0, high: float = 1.0) -> None: - self.low = low - self.high = high - - def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + def __call__(self, rng: jax.Array, num_samples: int) -> jax.Array: """Generate `num_samples` samples of the time `math`:t:. Args: @@ -194,11 +198,10 @@ class OffsetUniformSampler(BaseTimeSampler): def __init__( self, offset: float, low: float = 0.0, high: float = 1.0 ) -> None: + super().__init__(low=low, high=high) self.offset = offset - self.low = low - self.high = high - def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + def __call__(self, rng: jax.Array, num_samples: int) -> jax.Array: """Generate `num_samples` samples of the time `math`:t:. Args: diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 3d6b3fafb..efdf5af29 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -83,10 +83,10 @@ def __init__( fused_penalty: float = 0.0, tau_a: float = 1.0, tau_b: float = 1.0, - mlp_eta: Callable[[jnp.ndarray], float] = None, - mlp_xi: Callable[[jnp.ndarray], float] = None, + mlp_eta: Callable[[jax.Array], float] = None, + mlp_xi: Callable[[jax.Array], float] = None, unbalanced_kwargs: Dict[str, Any] = {}, - callback: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], + callback: Optional[Callable[[jax.Array, jax.Array, jax.Array], Any]] = None, callback_kwargs: Dict[str, Any] = {}, callback_iters: int = 10, @@ -397,7 +397,7 @@ def transport( rng: random.PRNGKeyArray = random.PRNGKey(0), diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}), forward: bool = True, - ) -> Union[jnp.array, diffrax.Solution, Optional[jnp.ndarray]]: + ) -> Union[jnp.array, diffrax.Solution, Optional[jax.Array]]: """Transport the distribution. Parameters @@ -468,5 +468,5 @@ def training_logs(self) -> Dict[str, Any]: def sample_noise( #TODO: make more general self, key: random.PRNGKey, batch_size: int - ) -> jnp.ndarray: #TODO: make more general + ) -> jax.Array: #TODO: make more general return random.normal(key, shape=(batch_size, self.output_dim)) diff --git a/src/ott/neural/solvers/losses.py b/src/ott/neural/solvers/losses.py index bec0f3916..fbf091b22 100644 --- a/src/ott/neural/solvers/losses.py +++ b/src/ott/neural/solvers/losses.py @@ -25,8 +25,8 @@ def monge_gap( - map_fn: Callable[[jnp.ndarray], jnp.ndarray], - reference_points: jnp.ndarray, + map_fn: Callable[[jax.Array], jax.Array], + reference_points: jax.Array, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, relative_epsilon: Optional[bool] = None, @@ -91,8 +91,8 @@ def monge_gap( def monge_gap_from_samples( - source: jnp.ndarray, - target: jnp.ndarray, + source: jax.Array, + target: jax.Array, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, relative_epsilon: Optional[bool] = None, diff --git a/src/ott/neural/solvers/map_estimator.py b/src/ott/neural/solvers/map_estimator.py index 27745f9ca..53bcdc7dd 100644 --- a/src/ott/neural/solvers/map_estimator.py +++ b/src/ott/neural/solvers/map_estimator.py @@ -79,9 +79,9 @@ def __init__( dim_data: int, model: neuraldual.BaseW2NeuralDual, optimizer: Optional[optax.OptState] = None, - fitting_loss: Optional[Callable[[jnp.ndarray, jnp.ndarray], + fitting_loss: Optional[Callable[[jax.Array, jax.Array], Tuple[float, Optional[Any]]]] = None, - regularizer: Optional[Callable[[jnp.ndarray, jnp.ndarray], + regularizer: Optional[Callable[[jax.Array, jax.Array], Tuple[float, Optional[Any]]]] = None, regularizer_strength: Union[float, Sequence[float]] = 1., num_train_iters: int = 10_000, @@ -126,7 +126,7 @@ def setup( self.step_fn = self._get_step_fn() @property - def regularizer(self) -> Callable[[jnp.ndarray, jnp.ndarray], float]: + def regularizer(self) -> Callable[[jax.Array, jax.Array], float]: """Regularizer added to the fitting loss. Can be e.g. the :func:`~ott.solvers.nn.losses.monge_gap_from_samples`. @@ -139,7 +139,7 @@ def regularizer(self) -> Callable[[jnp.ndarray, jnp.ndarray], float]: return lambda *args, **kwargs: (0., None) @property - def fitting_loss(self) -> Callable[[jnp.ndarray, jnp.ndarray], float]: + def fitting_loss(self) -> Callable[[jax.Array, jax.Array], float]: """Fitting loss to fit the marginal constraint. Can be for instance the @@ -153,9 +153,9 @@ def fitting_loss(self) -> Callable[[jnp.ndarray, jnp.ndarray], float]: @staticmethod def _generate_batch( - loader_source: Iterator[jnp.ndarray], - loader_target: Iterator[jnp.ndarray], - ) -> Dict[str, jnp.ndarray]: + loader_source: Iterator[jax.Array], + loader_target: Iterator[jax.Array], + ) -> Dict[str, jax.Array]: """Generate batches a batch of samples. ``loader_source`` and ``loader_target`` can be training or @@ -168,10 +168,10 @@ def _generate_batch( def train_map_estimator( self, - trainloader_source: Iterator[jnp.ndarray], - trainloader_target: Iterator[jnp.ndarray], - validloader_source: Iterator[jnp.ndarray], - validloader_target: Iterator[jnp.ndarray], + trainloader_source: Iterator[jax.Array], + trainloader_target: Iterator[jax.Array], + validloader_source: Iterator[jax.Array], + validloader_target: Iterator[jax.Array], ) -> Tuple[train_state.TrainState, Dict[str, Any]]: """Training loop.""" # define logs @@ -230,7 +230,7 @@ def _get_step_fn(self) -> Callable: def loss_fn( params: frozen_dict.FrozenDict, apply_fn: Callable, - batch: Dict[str, jnp.ndarray], step: int + batch: Dict[str, jax.Array], step: int ) -> Tuple[float, Dict[str, float]]: """Loss function.""" # map samples with the fitted map @@ -261,8 +261,8 @@ def loss_fn( @functools.partial(jax.jit, static_argnums=3) def step_fn( state_neural_net: train_state.TrainState, - train_batch: Dict[str, jnp.ndarray], - valid_batch: Optional[Dict[str, jnp.ndarray]] = None, + train_batch: Dict[str, jax.Array], + valid_batch: Optional[Dict[str, jax.Array]] = None, is_logging_step: bool = False, step: int = 0 ) -> Tuple[train_state.TrainState, Dict[str, float]]: diff --git a/src/ott/neural/solvers/neuraldual.py b/src/ott/neural/solvers/neuraldual.py index 6ac3d1c79..7d4d5800f 100644 --- a/src/ott/neural/solvers/neuraldual.py +++ b/src/ott/neural/solvers/neuraldual.py @@ -44,8 +44,8 @@ Callback_t = Callable[[int, potentials.DualPotentials], None] Conj_t = Optional[conjugate_solvers.FenchelConjugateSolver] -PotentialValueFn_t = Callable[[jnp.ndarray], jnp.ndarray] -PotentialGradientFn_t = Callable[[jnp.ndarray], jnp.ndarray] +PotentialValueFn_t = Callable[[jax.Array], jax.Array] +PotentialGradientFn_t = Callable[[jax.Array], jax.Array] class W2NeuralTrainState(train_state.TrainState): @@ -60,9 +60,9 @@ class W2NeuralTrainState(train_state.TrainState): potential_gradient_fn: the potential's gradient function """ potential_value_fn: Callable[ - [frozen_dict.FrozenDict[str, jnp.ndarray], Optional[PotentialValueFn_t]], + [frozen_dict.FrozenDict[str, jax.Array], Optional[PotentialValueFn_t]], PotentialValueFn_t] = struct.field(pytree_node=False) - potential_gradient_fn: Callable[[frozen_dict.FrozenDict[str, jnp.ndarray]], + potential_gradient_fn: Callable[[frozen_dict.FrozenDict[str, jax.Array]], PotentialGradientFn_t] = struct.field( pytree_node=False ) @@ -87,7 +87,7 @@ def is_potential(self) -> bool: def potential_value_fn( self, - params: frozen_dict.FrozenDict[str, jnp.ndarray], + params: frozen_dict.FrozenDict[str, jax.Array], other_potential_value_fn: Optional[PotentialValueFn_t] = None, ) -> PotentialValueFn_t: r"""Return a function giving the value of the potential. @@ -119,7 +119,7 @@ def potential_value_fn( "The value of the gradient-based potential depends " \ "on the value of the other potential." - def value_fn(x: jnp.ndarray) -> jnp.ndarray: + def value_fn(x: jax.Array) -> jax.Array: squeeze = x.ndim == 1 if squeeze: x = jnp.expand_dims(x, 0) @@ -132,7 +132,7 @@ def value_fn(x: jnp.ndarray) -> jnp.ndarray: def potential_gradient_fn( self, - params: frozen_dict.FrozenDict[str, jnp.ndarray], + params: frozen_dict.FrozenDict[str, jax.Array], ) -> PotentialGradientFn_t: """Return a function returning a vector or the gradient of the potential. @@ -358,10 +358,10 @@ def setup( def __call__( # noqa: D102 self, - trainloader_source: Iterator[jnp.ndarray], - trainloader_target: Iterator[jnp.ndarray], - validloader_source: Iterator[jnp.ndarray], - validloader_target: Iterator[jnp.ndarray], + trainloader_source: Iterator[jax.Array], + trainloader_target: Iterator[jax.Array], + validloader_source: Iterator[jax.Array], + validloader_target: Iterator[jax.Array], callback: Optional[Callback_t] = None, ) -> Union[potentials.DualPotentials, Tuple[potentials.DualPotentials, Train_t]]: @@ -378,10 +378,10 @@ def __call__( # noqa: D102 def train_neuraldual_parallel( self, - trainloader_source: Iterator[jnp.ndarray], - trainloader_target: Iterator[jnp.ndarray], - validloader_source: Iterator[jnp.ndarray], - validloader_target: Iterator[jnp.ndarray], + trainloader_source: Iterator[jax.Array], + trainloader_target: Iterator[jax.Array], + validloader_source: Iterator[jax.Array], + validloader_target: Iterator[jax.Array], callback: Optional[Callback_t] = None, ) -> Train_t: """Training and validation with parallel updates.""" @@ -453,10 +453,10 @@ def train_neuraldual_parallel( def train_neuraldual_alternating( self, - trainloader_source: Iterator[jnp.ndarray], - trainloader_target: Iterator[jnp.ndarray], - validloader_source: Iterator[jnp.ndarray], - validloader_target: Iterator[jnp.ndarray], + trainloader_source: Iterator[jax.Array], + trainloader_target: Iterator[jax.Array], + validloader_source: Iterator[jax.Array], + validloader_target: Iterator[jax.Array], callback: Optional[Callback_t] = None, ) -> Train_t: """Training and validation with alternating updates.""" @@ -533,7 +533,7 @@ def loss_fn(params_f, params_g, f_value, g_value, g_gradient, batch): init_source_hat = g_gradient(params_g)(target) - def g_value_partial(y: jnp.ndarray) -> jnp.ndarray: + def g_value_partial(y: jax.Array) -> jax.Array: """Lazy way of evaluating g if f's computation needs it.""" return g_value(params_g)(y) @@ -661,7 +661,7 @@ def to_dual_potentials( self.state_g.params, f_value ) - def g_value_finetuned(y: jnp.ndarray) -> jnp.ndarray: + def g_value_finetuned(y: jax.Array) -> jax.Array: x_hat = jax.grad(g_value_prediction)(y) grad_g_y = jax.lax.stop_gradient( self.conjugate_solver.solve(f_value, y, x_init=x_hat).grad @@ -686,7 +686,7 @@ def _clip_weights_icnn(params): return core.freeze(params) @staticmethod - def _penalize_weights_icnn(params: Dict[str, jnp.ndarray]) -> float: + def _penalize_weights_icnn(params: Dict[str, jax.Array]) -> float: penalty = 0.0 for k, param in params.items(): if k.startswith("w_z"): @@ -696,9 +696,9 @@ def _penalize_weights_icnn(params: Dict[str, jnp.ndarray]) -> float: @staticmethod def _update_logs( logs: Dict[str, List[Union[float, str]]], - loss_f: jnp.ndarray, - loss_g: jnp.ndarray, - w_dist: jnp.ndarray, + loss_f: jax.Array, + loss_g: jax.Array, + w_dist: jax.Array, ) -> None: logs["loss_f"].append(float(loss_f)) logs["loss_g"].append(float(loss_g)) diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index ec0be23da..3b5aa3319 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -38,6 +38,35 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): + """Flow matching as introduced in :cite:`TODO, with extension to OT-FM (). + + Args: + neural_vector_field: Neural vector field parameterized by a neural network. + input_dim: Dimension of the input data. + cond_dim: Dimension of the conditioning variable. + iterations: Number of iterations. + valid_freq: Frequency of validation. + ot_solver: OT solver to match samples from the source and the target distribution as proposed in :cite:`TODO`. If `None`, no matching will be performed as proposed in :cite:`TODO`. + flow: Flow between source and target distribution. + time_sampler: Sampler for the time. + optimizer: Optimizer for `neural_vector_field`. + checkpoint_manager: Checkpoint manager. + epsilon: Entropy regularization term for the `ot_solver`. + cost_fn: Cost function for the OT problem solved by the `ot_solver`. + tau_a: If :math:`<1`, defines how much unbalanced the problem is + on the first marginal. + tau_b: If :math:`< 1`, defines how much unbalanced the problem is + on the second marginal. + mlp_eta: Neural network to learn the left rescaling function as suggested in :cite:`TODO`. If `None`, the left rescaling factor is not learnt. + mlp_xi: Neural network to learn the right rescaling function as suggested in :cite:`TODO`. If `None`, the right rescaling factor is not learnt. + unbalanced_kwargs: Keyword arguments for the unbalancedness solver. + callback_fn: Callback function. + rng: Random number generator. + + Returns: + None + + """ def __init__( self, @@ -55,10 +84,10 @@ def __init__( cost_fn: Type[costs.CostFn] = costs.SqEuclidean(), tau_a: float = 1.0, tau_b: float = 1.0, - mlp_eta: Callable[[jnp.ndarray], float] = None, - mlp_xi: Callable[[jnp.ndarray], float] = None, + mlp_eta: Callable[[jax.Array], float] = None, + mlp_xi: Callable[[jax.Array], float] = None, unbalanced_kwargs: Dict[str, Any] = {}, - callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], + callback_fn: Optional[Callable[[jax.Array, jax.Array, jax.Array], Any]] = None, rng: random.PRNGKeyArray = random.PRNGKey(0), ) -> None: @@ -93,6 +122,7 @@ def __init__( self.setup() def setup(self) -> None: + """Setup :class:`OTFlowMatching`.""" self.state_neural_vector_field = self.neural_vector_field.create_train_state( self.rng, self.optimizer, self.input_dim ) @@ -115,13 +145,13 @@ def _get_step_fn(self) -> Callable: def step_fn( key: random.PRNGKeyArray, state_neural_vector_field: train_state.TrainState, - batch: Dict[str, jnp.ndarray], + batch: Dict[str, jax.Array], ) -> Tuple[Any, Any]: def loss_fn( params: jax.Array, t: jax.Array, noise: jax.Array, - batch: Dict[str, jnp.ndarray], keys_model: random.PRNGKeyArray - ) -> jnp.ndarray: + batch: Dict[str, jax.Array], keys_model: random.PRNGKeyArray + ) -> jax.Array: x_t = self.flow.compute_xt(noise, t, batch["source"], batch["target"]) apply_fn = functools.partial( @@ -147,7 +177,16 @@ def loss_fn( return step_fn def __call__(self, train_loader, valid_loader) -> None: - batch: Mapping[str, jnp.ndarray] = {} + """Train :class:`OTFlowMatching`. + + Args; + train_loader: Dataloader for the training data. + valid_loader: Dataloader for the validation data. + + Returns: + None + """ + batch: Mapping[str, jax.Array] = {} for iter in range(self.iterations): rng_resample, rng_step_fn, self.rng = random.split(self.rng, 3) batch["source"], batch["target"], batch["condition"] = next(train_loader) @@ -184,9 +223,26 @@ def transport( forward: bool = True, diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) ) -> diffrax.Solution: + """Transport data with the learnt map. + + This method solves the neural ODE parameterized by the :attr:`~ott.neural.solvers.OTFlowMatching.neural_vector_field` from + :attr:`~ott.neural.flows.BaseTimeSampler.low` to :attr:`~ott.neural.flows.BaseTimeSampler.high` if `forward` is `True`, + else the other way round. + + Args: + data: Initial condition of the ODE. + condition: Condition of the input data. + forward: If `True` integrates forward, otherwise backwards. + diffeqsovle_kwargs: Keyword arguments for the ODE solver. + + Returns: + The push-forward or pull-back distribution defined by the learnt transport plan. + + """ diffeqsolve_kwargs = dict(diffeqsolve_kwargs) - t0, t1 = (0.0, 1.0) if forward else (1.0, 0.0) + t0, t1 = (self.time_sampler.low, self.time_sampler.high + ) if forward else (self.time_sampler.high, self.time_sampler.low) def solve_ode(input: jax.Array, cond: jax.Array): return diffrax.diffeqsolve( @@ -217,18 +273,40 @@ def _valid_step(self, valid_loader, iter) -> None: @property def learn_rescaling(self) -> bool: + """Whether to learn at least one rescaling factor of the marginal distributions.""" return self.mlp_eta is not None or self.mlp_xi is not None def save(self, path: str) -> None: + """Save the model. + + Args: + path: Where to save the model to. + """ raise NotImplementedError def load(self, path: str) -> "OTFlowMatching": + """Load a model. + + Args: + path: Where to load the model from. + + Returns: + An instance of :class:`ott.neural.solvers.OTFlowMatching`. + """ raise NotImplementedError def training_logs(self) -> Dict[str, Any]: + """Logs of the training.""" raise NotImplementedError - def sample_noise( #TODO: make more general - self, key: random.PRNGKey, batch_size: int - ) -> jnp.ndarray: #TODO: make more general + def sample_noise(self, key: random.PRNGKey, batch_size: int) -> jax.Array: + """Sample noise from a standard-normal distribution. + + Args: + key: Random key for seeding. + batch_size: Number of samples to draw. + + Returns: + Samples from the standard normal distribution. + """ return random.normal(key, shape=(batch_size, self.input_dim)) diff --git a/src/ott/problems/linear/barycenter_problem.py b/src/ott/problems/linear/barycenter_problem.py index ca5333a8e..c94cc578d 100644 --- a/src/ott/problems/linear/barycenter_problem.py +++ b/src/ott/problems/linear/barycenter_problem.py @@ -50,9 +50,9 @@ class FreeBarycenterProblem: def __init__( self, - y: jnp.ndarray, - b: Optional[jnp.ndarray] = None, - weights: Optional[jnp.ndarray] = None, + y: jax.Array, + b: Optional[jax.Array] = None, + weights: Optional[jax.Array] = None, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, **kwargs: Any, @@ -76,7 +76,7 @@ def __init__( assert self._b is None or self._y.shape[0] == self._b.shape[0] @property - def segmented_y_b(self) -> Tuple[jnp.ndarray, jnp.ndarray]: + def segmented_y_b(self) -> Tuple[jax.Array, jax.Array]: """Tuple of arrays containing the segmented measures and weights. - Segmented measures of shape ``[num_measures, max_measure_size, ndim]``. @@ -94,14 +94,14 @@ def segmented_y_b(self) -> Tuple[jnp.ndarray, jnp.ndarray]: return y, b @property - def flattened_y(self) -> jnp.ndarray: + def flattened_y(self) -> jax.Array: """Array of shape ``[num_measures * (N_1 + N_2 + ...), ndim]``.""" if self._is_segmented: return self._y.reshape((-1, self._y.shape[-1])) return self._y @property - def flattened_b(self) -> Optional[jnp.ndarray]: + def flattened_b(self) -> Optional[jax.Array]: """Array of shape ``[num_measures * (N_1 + N_2 + ...),]``.""" return None if self._b is None else self._b.ravel() @@ -121,7 +121,7 @@ def ndim(self) -> int: return self._y.shape[-1] @property - def weights(self) -> jnp.ndarray: + def weights(self) -> jax.Array: """Barycenter weights of shape ``[num_measures,]`` that sum to 1.""" if self._weights is None: return jnp.ones((self.num_measures,)) / self.num_measures @@ -165,8 +165,8 @@ class FixedBarycenterProblem: def __init__( self, geom: geometry.Geometry, - a: jnp.ndarray, - weights: Optional[jnp.ndarray] = None, + a: jax.Array, + weights: Optional[jax.Array] = None, ): self.geom = geom self.a = a @@ -178,7 +178,7 @@ def num_measures(self) -> int: return self.a.shape[0] @property - def weights(self) -> jnp.ndarray: + def weights(self) -> jax.Array: """Barycenter weights of shape ``[num_measures,]`` that sum to :math`1`.""" if self._weights is None: return jnp.ones((self.num_measures,)) / self.num_measures diff --git a/src/ott/problems/linear/linear_problem.py b/src/ott/problems/linear/linear_problem.py index 7c206aa63..3e09c0e59 100644 --- a/src/ott/problems/linear/linear_problem.py +++ b/src/ott/problems/linear/linear_problem.py @@ -21,9 +21,8 @@ __all__ = ["LinearProblem"] # TODO(michalk8): move to typing.py when refactoring the types -MarginalFunc = Callable[[jnp.ndarray, jnp.ndarray], jnp.ndarray] -TransportAppFunc = Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray, int], - jnp.ndarray] +MarginalFunc = Callable[[jax.Array, jax.Array], jax.Array] +TransportAppFunc = Callable[[jax.Array, jax.Array, jax.Array, int], jax.Array] @jax.tree_util.register_pytree_node_class @@ -50,8 +49,8 @@ class LinearProblem: def __init__( self, geom: geometry.Geometry, - a: Optional[jnp.ndarray] = None, - b: Optional[jnp.ndarray] = None, + a: Optional[jax.Array] = None, + b: Optional[jax.Array] = None, tau_a: float = 1.0, tau_b: float = 1.0 ): @@ -62,13 +61,13 @@ def __init__( self.tau_b = tau_b @property - def a(self) -> jnp.ndarray: + def a(self) -> jax.Array: """First marginal.""" num_a = self.geom.shape[0] return jnp.ones((num_a,)) / num_a if self._a is None else self._a @property - def b(self) -> jnp.ndarray: + def b(self) -> jax.Array: """Second marginal.""" num_b = self.geom.shape[1] return jnp.ones((num_b,)) / num_b if self._b is None else self._b diff --git a/src/ott/problems/linear/potentials.py b/src/ott/problems/linear/potentials.py index 7ab226072..718aa22a1 100644 --- a/src/ott/problems/linear/potentials.py +++ b/src/ott/problems/linear/potentials.py @@ -37,7 +37,7 @@ mpl = plt = None __all__ = ["DualPotentials", "EntropicPotentials"] -Potential_t = Callable[[jnp.ndarray], float] +Potential_t = Callable[[jax.Array], float] @jtu.register_pytree_node_class @@ -72,7 +72,7 @@ def __init__( self.cost_fn = cost_fn self._corr = corr - def transport(self, vec: jnp.ndarray, forward: bool = True) -> jnp.ndarray: + def transport(self, vec: jax.Array, forward: bool = True) -> jax.Array: r"""Transport ``vec`` according to Brenier formula :cite:`brenier:91`. Uses Theorem 1.17 from :cite:`santambrogio:15` to compute an OT map when @@ -105,7 +105,7 @@ def transport(self, vec: jnp.ndarray, forward: bool = True) -> jnp.ndarray: return vec - self._grad_h_inv(self._grad_f(vec)) return vec - self._grad_h_inv(self._grad_g(vec)) - def distance(self, src: jnp.ndarray, tgt: jnp.ndarray) -> float: + def distance(self, src: jax.Array, tgt: jax.Array) -> float: r"""Evaluate Wasserstein distance between samples using dual potentials. This uses direct estimation of potentials against measures when dual @@ -146,17 +146,17 @@ def g(self) -> Potential_t: return self._g @property - def _grad_f(self) -> Callable[[jnp.ndarray], jnp.ndarray]: + def _grad_f(self) -> Callable[[jax.Array], jax.Array]: """Vectorized gradient of the potential function :attr:`f`.""" return jax.vmap(jax.grad(self.f, argnums=0)) @property - def _grad_g(self) -> Callable[[jnp.ndarray], jnp.ndarray]: + def _grad_g(self) -> Callable[[jax.Array], jax.Array]: """Vectorized gradient of the potential function :attr:`g`.""" return jax.vmap(jax.grad(self.g, argnums=0)) @property - def _grad_h_inv(self) -> Callable[[jnp.ndarray], jnp.ndarray]: + def _grad_h_inv(self) -> Callable[[jax.Array], jax.Array]: from ott.geometry import costs assert isinstance(self.cost_fn, costs.TICost), ( @@ -181,9 +181,9 @@ def tree_unflatten( # noqa: D102 def plot_ot_map( self, - source: jnp.ndarray, - target: jnp.ndarray, - samples: Optional[jnp.ndarray] = None, + source: jax.Array, + target: jax.Array, + samples: Optional[jax.Array] = None, forward: bool = True, ax: Optional["plt.Axes"] = None, legend_kwargs: Optional[Dict[str, Any]] = None, @@ -348,11 +348,11 @@ class EntropicPotentials(DualPotentials): def __init__( self, - f_xy: jnp.ndarray, - g_xy: jnp.ndarray, + f_xy: jax.Array, + g_xy: jax.Array, prob: linear_problem.LinearProblem, - f_xx: Optional[jnp.ndarray] = None, - g_yy: Optional[jnp.ndarray] = None, + f_xx: Optional[jax.Array] = None, + g_yy: Optional[jax.Array] = None, ): # we pass directly the arrays and override the properties # since only the properties need to be callable @@ -373,11 +373,11 @@ def _potential_fn(self, *, kind: Literal["f", "g"]) -> Potential_t: from ott.geometry import pointcloud def callback( - x: jnp.ndarray, + x: jax.Array, *, - potential: jnp.ndarray, - y: jnp.ndarray, - weights: jnp.ndarray, + potential: jax.Array, + y: jax.Array, + weights: jax.Array, epsilon: float, ) -> float: x = jnp.atleast_2d(x) diff --git a/src/ott/problems/quadratic/gw_barycenter.py b/src/ott/problems/quadratic/gw_barycenter.py index dfe562d98..7170f1064 100644 --- a/src/ott/problems/quadratic/gw_barycenter.py +++ b/src/ott/problems/quadratic/gw_barycenter.py @@ -60,11 +60,11 @@ class GWBarycenterProblem(barycenter_problem.FreeBarycenterProblem): def __init__( self, - y: Optional[jnp.ndarray] = None, - b: Optional[jnp.ndarray] = None, - weights: Optional[jnp.ndarray] = None, - costs: Optional[jnp.ndarray] = None, - y_fused: Optional[jnp.ndarray] = None, + y: Optional[jax.Array] = None, + b: Optional[jax.Array] = None, + weights: Optional[jax.Array] = None, + costs: Optional[jax.Array] = None, + y_fused: Optional[jax.Array] = None, fused_penalty: float = 1.0, gw_loss: Literal["sqeucl", "kl"] = "sqeucl", scale_cost: Union[int, float, Literal["mean", "max_cost"]] = 1.0, @@ -98,9 +98,7 @@ def __init__( # TODO(michalk8): in the future, consider checking the other 2 cases # using `segmented_y` and `segmented_y_fused`? - def update_barycenter( - self, transports: jnp.ndarray, a: jnp.ndarray - ) -> jnp.ndarray: + def update_barycenter(self, transports: jax.Array, a: jax.Array) -> jax.Array: """Update the barycenter cost matrix. Uses the eq. 14 and 15 of :cite:`peyre:16`. @@ -116,11 +114,11 @@ def update_barycenter( @functools.partial(jax.vmap, in_axes=[0, 0, 0, None]) def project( - y: jnp.ndarray, - b: jnp.ndarray, - transport: jnp.ndarray, + y: jax.Array, + b: jax.Array, + transport: jax.Array, fn: Optional[quadratic_costs.Loss], - ) -> jnp.ndarray: + ) -> jax.Array: geom = self._create_y_geometry(y, mask=b > 0.) fn, lin = (None, True) if fn is None else (fn.func, fn.is_linear) @@ -146,8 +144,8 @@ def project( return jnp.exp(barycenter) return barycenter - def update_features(self, transports: jnp.ndarray, - a: jnp.ndarray) -> Optional[jnp.ndarray]: + def update_features(self, transports: jax.Array, + a: jax.Array) -> Optional[jax.Array]: """Update the barycenter features in the fused case :cite:`vayer:19`. Uses :cite:`cuturi:14` eq. 8, and is implemented only @@ -181,8 +179,8 @@ def update_features(self, transports: jnp.ndarray, def _create_bary_geometry( self, - cost_matrix: jnp.ndarray, - mask: Optional[jnp.ndarray] = None + cost_matrix: jax.Array, + mask: Optional[jax.Array] = None ) -> geometry.Geometry: return geometry.Geometry( cost_matrix=cost_matrix, @@ -194,8 +192,8 @@ def _create_bary_geometry( def _create_y_geometry( self, - y: jnp.ndarray, - mask: Optional[jnp.ndarray] = None + y: jax.Array, + mask: Optional[jax.Array] = None ) -> geometry.Geometry: if self._y_as_costs: assert y.shape[0] == y.shape[1], y.shape @@ -217,10 +215,10 @@ def _create_y_geometry( def _create_fused_geometry( self, - x: jnp.ndarray, - y: jnp.ndarray, - src_mask: Optional[jnp.ndarray] = None, - tgt_mask: Optional[jnp.ndarray] = None + x: jax.Array, + y: jax.Array, + src_mask: Optional[jax.Array] = None, + tgt_mask: Optional[jax.Array] = None ) -> pointcloud.PointCloud: return pointcloud.PointCloud( x, @@ -235,9 +233,9 @@ def _create_fused_geometry( def _create_problem( self, state: "GWBarycenterState", # noqa: F821 - y: jnp.ndarray, - b: jnp.ndarray, - f: Optional[jnp.ndarray] = None + y: jax.Array, + b: jax.Array, + f: Optional[jax.Array] = None ) -> quadratic_problem.QuadraticProblem: # TODO(michalk8): in future, mask in the problem for convenience? bary_mask = state.a > 0. @@ -269,7 +267,7 @@ def is_fused(self) -> bool: return self._y_fused is not None @property - def segmented_y_fused(self) -> Optional[jnp.ndarray]: + def segmented_y_fused(self) -> Optional[jax.Array]: """Feature array of shape used in the fused case.""" if not self.is_fused or self._y_fused.ndim == 3: return self._y_fused diff --git a/src/ott/problems/quadratic/quadratic_costs.py b/src/ott/problems/quadratic/quadratic_costs.py index 70f2bf5ad..060c3c537 100644 --- a/src/ott/problems/quadratic/quadratic_costs.py +++ b/src/ott/problems/quadratic/quadratic_costs.py @@ -13,6 +13,7 @@ # limitations under the License. from typing import Callable, NamedTuple +import jax import jax.numpy as jnp import jax.scipy as jsp @@ -20,7 +21,7 @@ class Loss(NamedTuple): # noqa: D101 - func: Callable[[jnp.ndarray], jnp.ndarray] + func: Callable[[jax.Array], jax.Array] is_linear: bool diff --git a/src/ott/problems/quadratic/quadratic_problem.py b/src/ott/problems/quadratic/quadratic_problem.py index 5deb4558c..cf5b804a2 100644 --- a/src/ott/problems/quadratic/quadratic_problem.py +++ b/src/ott/problems/quadratic/quadratic_problem.py @@ -91,8 +91,8 @@ def __init__( geom_xy: Optional[geometry.Geometry] = None, fused_penalty: float = 1.0, scale_cost: Optional[Union[bool, float, str]] = False, - a: Optional[jnp.ndarray] = None, - b: Optional[jnp.ndarray] = None, + a: Optional[jax.Array] = None, + b: Optional[jax.Array] = None, loss: Union[Literal["sqeucl", "kl"], quadratic_costs.GWLoss] = "sqeucl", tau_a: float = 1.0, tau_b: float = 1.0, @@ -125,8 +125,8 @@ def __init__( def marginal_dependent_cost( self, - marginal_1: jnp.ndarray, - marginal_2: jnp.ndarray, + marginal_1: jax.Array, + marginal_2: jax.Array, ) -> low_rank.LRCGeometry: r"""Initialize cost term that depends on the marginals of the transport. @@ -169,9 +169,9 @@ def marginal_dependent_cost( def cost_unbalanced_correction( self, - transport_matrix: jnp.ndarray, - marginal_1: jnp.ndarray, - marginal_2: jnp.ndarray, + transport_matrix: jax.Array, + marginal_1: jax.Array, + marginal_2: jax.Array, epsilon: epsilon_scheduler.Epsilon, ) -> float: r"""Calculate cost term from the quadratic divergence when unbalanced. @@ -193,10 +193,10 @@ def cost_unbalanced_correction( :math:`+ epsilon * \sum(KL(P|ab'))` Args: - transport_matrix: jnp.ndarray[num_a, num_b], transport matrix. - marginal_1: jnp.ndarray[num_a,], marginal of the transport matrix + transport_matrix: jax.Array[num_a, num_b], transport matrix. + marginal_1: jax.Array[num_a,], marginal of the transport matrix for samples from :attr:`geom_xx`. - marginal_2: jnp.ndarray[num_b,], marginal of the transport matrix + marginal_2: jax.Array[num_b,], marginal of the transport matrix for samples from :attr:`geom_yy`. epsilon: entropy regularizer. @@ -353,7 +353,7 @@ def update_lr_linearization( ) @property - def _fused_cost_matrix(self) -> Union[float, jnp.ndarray]: + def _fused_cost_matrix(self) -> Union[float, jax.Array]: if not self.is_fused: return 0.0 geom_xy = self.geom_xy @@ -442,13 +442,13 @@ def geom_xy(self) -> Optional[geometry.Geometry]: return self._geom_xy @property - def a(self) -> jnp.ndarray: + def a(self) -> jax.Array: """First marginal.""" num_a = self.geom_xx.shape[0] return jnp.ones((num_a,)) / num_a if self._a is None else self._a @property - def b(self) -> jnp.ndarray: + def b(self) -> jax.Array: """Second marginal.""" num_b = self.geom_yy.shape[0] return jnp.ones((num_b,)) / num_b if self._b is None else self._b @@ -510,7 +510,7 @@ def update_epsilon_unbalanced( # noqa: D103 def apply_cost( # noqa: D103 - geom: geometry.Geometry, arr: jnp.ndarray, *, axis: int, + geom: geometry.Geometry, arr: jax.Array, *, axis: int, fn: quadratic_costs.Loss -) -> jnp.ndarray: +) -> jax.Array: return geom.apply_cost(arr, axis=axis, fn=fn.func, is_linear=fn.is_linear) diff --git a/src/ott/solvers/linear/_solve.py b/src/ott/solvers/linear/_solve.py index 2bca6a825..fad5a4e7d 100644 --- a/src/ott/solvers/linear/_solve.py +++ b/src/ott/solvers/linear/_solve.py @@ -13,7 +13,7 @@ # limitations under the License. from typing import Any, Optional, Union -import jax.numpy as jnp +import jax from ott.geometry import geometry from ott.problems.linear import linear_problem @@ -24,8 +24,8 @@ def solve( geom: geometry.Geometry, - a: Optional[jnp.ndarray] = None, - b: Optional[jnp.ndarray] = None, + a: Optional[jax.Array] = None, + b: Optional[jax.Array] = None, tau_a: float = 1.0, tau_b: float = 1.0, rank: int = -1, diff --git a/src/ott/solvers/linear/acceleration.py b/src/ott/solvers/linear/acceleration.py index 4529e7f78..7ce602194 100644 --- a/src/ott/solvers/linear/acceleration.py +++ b/src/ott/solvers/linear/acceleration.py @@ -34,7 +34,7 @@ class AndersonAcceleration: refresh_every: int = 1 # Recompute interpolation periodically. ridge_identity: float = 1e-2 # Ridge used in the linear system. - def extrapolation(self, xs: jnp.ndarray, fxs: jnp.ndarray) -> jnp.ndarray: + def extrapolation(self, xs: jax.Array, fxs: jax.Array) -> jax.Array: """Compute Anderson extrapolation from past observations.""" # Remove -inf values to instantiate quadratic problem. All others # remain since they might be caused by a valid issue. @@ -161,10 +161,10 @@ def lehmann(self, state: "sinkhorn.SinkhornState") -> float: def __call__( # noqa: D102 self, weight: float, - value: jnp.ndarray, - new_value: jnp.ndarray, + value: jax.Array, + new_value: jax.Array, lse_mode: bool = True - ) -> jnp.ndarray: + ) -> jax.Array: if lse_mode: value = jnp.where(jnp.isfinite(value), value, 0.0) return (1.0 - weight) * value + weight * new_value diff --git a/src/ott/solvers/linear/continuous_barycenter.py b/src/ott/solvers/linear/continuous_barycenter.py index 2d89a74ea..0094c3a3c 100644 --- a/src/ott/solvers/linear/continuous_barycenter.py +++ b/src/ott/solvers/linear/continuous_barycenter.py @@ -41,11 +41,11 @@ class FreeBarycenterState(NamedTuple): a: barycenter weights. """ - costs: Optional[jnp.ndarray] = None - linear_convergence: Optional[jnp.ndarray] = None - errors: Optional[jnp.ndarray] = None - x: Optional[jnp.ndarray] = None - a: Optional[jnp.ndarray] = None + costs: Optional[jax.Array] = None + linear_convergence: Optional[jax.Array] = None + errors: Optional[jax.Array] = None + x: Optional[jax.Array] = None + a: Optional[jax.Array] = None def set(self, **kwargs: Any) -> "FreeBarycenterState": """Return a copy of self, possibly with overwrites.""" @@ -70,7 +70,7 @@ def update( @functools.partial(jax.vmap, in_axes=[None, None, 0, 0]) def solve_linear_ot( - a: Optional[jnp.ndarray], x: jnp.ndarray, b: jnp.ndarray, y: jnp.ndarray + a: Optional[jax.Array], x: jax.Array, b: jax.Array, y: jax.Array ): out = linear_ot_solver( linear_problem.LinearProblem( @@ -129,7 +129,7 @@ def __call__( # noqa: D102 self, bar_prob: barycenter_problem.FreeBarycenterProblem, bar_size: int = 100, - x_init: Optional[jnp.ndarray] = None, + x_init: Optional[jax.Array] = None, rng: Optional[jax.Array] = None, ) -> FreeBarycenterState: # TODO(michalk8): no reason for iterations to be outside this class @@ -140,7 +140,7 @@ def init_state( self, bar_prob: barycenter_problem.FreeBarycenterProblem, bar_size: int, - x_init: Optional[jnp.ndarray] = None, + x_init: Optional[jax.Array] = None, rng: Optional[jax.Array] = None, ) -> FreeBarycenterState: """Initialize the state of the Wasserstein barycenter iterations. @@ -195,7 +195,7 @@ def output_from_state( # noqa: D102 def iterations( solver: FreeWassersteinBarycenter, bar_size: int, - bar_prob: barycenter_problem.FreeBarycenterProblem, x_init: jnp.ndarray, + bar_prob: barycenter_problem.FreeBarycenterProblem, x_init: jax.Array, rng: jax.Array ) -> FreeBarycenterState: """Jittable Wasserstein barycenter outer loop.""" diff --git a/src/ott/solvers/linear/discrete_barycenter.py b/src/ott/solvers/linear/discrete_barycenter.py index dcfdc1470..85adaa795 100644 --- a/src/ott/solvers/linear/discrete_barycenter.py +++ b/src/ott/solvers/linear/discrete_barycenter.py @@ -26,10 +26,10 @@ class SinkhornBarycenterOutput(NamedTuple): # noqa: D101 - f: jnp.ndarray - g: jnp.ndarray - histogram: jnp.ndarray - errors: jnp.ndarray + f: jax.Array + g: jax.Array + histogram: jax.Array + errors: jax.Array @jax.tree_util.register_pytree_node_class @@ -79,7 +79,7 @@ def __init__( def __call__( self, fixed_bp: barycenter_problem.FixedBarycenterProblem, - dual_initialization: Optional[jnp.ndarray] = None, + dual_initialization: Optional[jax.Array] = None, ) -> SinkhornBarycenterOutput: """Solve barycenter problem, possibly using clever initialization. @@ -128,10 +128,10 @@ def tree_unflatten(cls, aux_data, children): # noqa: D102 @functools.partial(jax.jit, static_argnums=(5, 6, 7, 8, 9, 10, 11, 12)) def _discrete_barycenter( - geom: geometry.Geometry, a: jnp.ndarray, weights: jnp.ndarray, - dual_initialization: jnp.ndarray, threshold: float, - norm_error: Sequence[int], inner_iterations: int, min_iterations: int, - max_iterations: int, lse_mode: bool, debiased: bool, num_a: int, num_b: int + geom: geometry.Geometry, a: jax.Array, weights: jax.Array, + dual_initialization: jax.Array, threshold: float, norm_error: Sequence[int], + inner_iterations: int, min_iterations: int, max_iterations: int, + lse_mode: bool, debiased: bool, num_a: int, num_b: int ) -> SinkhornBarycenterOutput: """Jit'able function to compute discrete barycenters.""" if lse_mode: diff --git a/src/ott/solvers/linear/implicit_differentiation.py b/src/ott/solvers/linear/implicit_differentiation.py index fbf98ce81..c5e7cb0f3 100644 --- a/src/ott/solvers/linear/implicit_differentiation.py +++ b/src/ott/solvers/linear/implicit_differentiation.py @@ -23,9 +23,8 @@ if TYPE_CHECKING: from ott.problems.linear import linear_problem -LinOp_t = Callable[[jnp.ndarray], jnp.ndarray] -Solver_t = Callable[[LinOp_t, jnp.ndarray, Optional[LinOp_t], bool], - jnp.ndarray] +LinOp_t = Callable[[jax.Array], jax.Array] +Solver_t = Callable[[LinOp_t, jax.Array, Optional[LinOp_t], bool], jax.Array] __all__ = ["ImplicitDiff", "solve_jax_cg"] @@ -70,16 +69,16 @@ class ImplicitDiff: solver: Optional[Solver_t] = None solver_kwargs: Optional[Dict[str, Any]] = None symmetric: bool = False - precondition_fun: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None + precondition_fun: Optional[Callable[[jax.Array], jax.Array]] = None def solve( self, - gr: Tuple[jnp.ndarray, jnp.ndarray], + gr: Tuple[jax.Array, jax.Array], ot_prob: "linear_problem.LinearProblem", - f: jnp.ndarray, - g: jnp.ndarray, + f: jax.Array, + g: jax.Array, lse_mode: bool, - ) -> jnp.ndarray: + ) -> jax.Array: r"""Apply minus inverse of [hessian ``reg_ot_cost`` w.r.t. ``f``, ``g``]. This function is used to carry out implicit differentiation of ``sinkhorn`` @@ -224,7 +223,7 @@ def solve( return jnp.concatenate((-vjp_gr_f, -vjp_gr_g)) def first_order_conditions( - self, prob, f: jnp.ndarray, g: jnp.ndarray, lse_mode: bool + self, prob, f: jax.Array, g: jax.Array, lse_mode: bool ): r"""Compute vector of first order conditions for the reg-OT problem. @@ -238,12 +237,12 @@ def first_order_conditions( Args: prob: definition of the linear optimal transport problem. - f: jnp.ndarray, first potential - g: jnp.ndarray, second potential + f: jax.Array, first potential + g: jax.Array, second potential lse_mode: bool Returns: - a jnp.ndarray of size (size of ``n + m``) quantifying deviation to + a jax.Array of size (size of ``n + m``) quantifying deviation to optimality for variables ``f`` and ``g``. """ geom = prob.geom @@ -266,8 +265,8 @@ def first_order_conditions( return jnp.concatenate((result_a, result_b)) def gradient( - self, prob: "linear_problem.LinearProblem", f: jnp.ndarray, - g: jnp.ndarray, lse_mode: bool, gr: Tuple[jnp.ndarray, jnp.ndarray] + self, prob: "linear_problem.LinearProblem", f: jax.Array, g: jax.Array, + lse_mode: bool, gr: Tuple[jax.Array, jax.Array] ) -> "linear_problem.LinearProblem": """Apply VJP to recover gradient in reverse mode differentiation.""" # Applies first part of vjp to gr: inverse part of implicit function theorem @@ -287,13 +286,13 @@ def replace(self, **kwargs: Any) -> "ImplicitDiff": # noqa: D102 def solve_jax_cg( lin: LinOp_t, - b: jnp.ndarray, + b: jax.Array, lin_t: Optional[LinOp_t] = None, symmetric: bool = False, ridge_identity: float = 0.0, ridge_kernel: float = 0.0, **kwargs: Any -) -> jnp.ndarray: +) -> jax.Array: """Wrapper around JAX native linear solvers. Args: diff --git a/src/ott/solvers/linear/lineax_implicit.py b/src/ott/solvers/linear/lineax_implicit.py index 79b9e7c95..ac3978462 100644 --- a/src/ott/solvers/linear/lineax_implicit.py +++ b/src/ott/solvers/linear/lineax_implicit.py @@ -46,14 +46,14 @@ def transpose(self): def solve_lineax( lin: Callable, - b: jnp.ndarray, + b: jax.Array, lin_t: Optional[Callable] = None, symmetric: bool = False, nonsym_solver: Optional[lx.AbstractLinearSolver] = None, ridge_identity: float = 0.0, ridge_kernel: float = 0.0, **kwargs: Any -) -> jnp.ndarray: +) -> jax.Array: """Wrapper around lineax solvers. Args: diff --git a/src/ott/solvers/linear/lr_utils.py b/src/ott/solvers/linear/lr_utils.py index 8ade265c9..2eb4c32ed 100644 --- a/src/ott/solvers/linear/lr_utils.py +++ b/src/ott/solvers/linear/lr_utils.py @@ -24,27 +24,27 @@ class State(NamedTuple): # noqa: D101 - v1: jnp.ndarray - v2: jnp.ndarray - u1: jnp.ndarray - u2: jnp.ndarray - g: jnp.ndarray + v1: jax.Array + v2: jax.Array + u1: jax.Array + u2: jax.Array + g: jax.Array err: float class Constants(NamedTuple): # noqa: D101 - a: jnp.ndarray - b: jnp.ndarray + a: jax.Array + b: jax.Array rho_a: float rho_b: float - supp_a: Optional[jnp.ndarray] = None - supp_b: Optional[jnp.ndarray] = None + supp_a: Optional[jax.Array] = None + supp_b: Optional[jax.Array] = None def unbalanced_dykstra_lse( - c_q: jnp.ndarray, - c_r: jnp.ndarray, - c_g: jnp.ndarray, + c_q: jax.Array, + c_r: jax.Array, + c_g: jax.Array, gamma: float, ot_prob: linear_problem.LinearProblem, translation_invariant: bool = True, @@ -52,7 +52,7 @@ def unbalanced_dykstra_lse( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 -) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: +) -> Tuple[jax.Array, jax.Array, jax.Array]: """Dykstra's algorithm for the unbalanced :class:`~ott.solvers.linear.sinkhorn_lr.LRSinkhorn` in LSE mode. @@ -74,10 +74,10 @@ def unbalanced_dykstra_lse( """ # noqa: D205 def _softm( - v: jnp.ndarray, - c: jnp.ndarray, + v: jax.Array, + c: jax.Array, axis: int, - ) -> jnp.ndarray: + ) -> jax.Array: v = jnp.expand_dims(v, axis=1 - axis) return jsp.special.logsumexp(v + c, axis=axis) @@ -181,9 +181,9 @@ def body_fn( def unbalanced_dykstra_kernel( - k_q: jnp.ndarray, - k_r: jnp.ndarray, - k_g: jnp.ndarray, + k_q: jax.Array, + k_r: jax.Array, + k_g: jax.Array, gamma: float, ot_prob: linear_problem.LinearProblem, translation_invariant: bool = True, @@ -191,7 +191,7 @@ def unbalanced_dykstra_kernel( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 -) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: +) -> Tuple[jax.Array, jax.Array, jax.Array]: """Dykstra's algorithm for the unbalanced :class:`~ott.solvers.linear.sinkhorn_lr.LRSinkhorn` in kernel mode. @@ -317,7 +317,7 @@ def body_fn( def compute_lambdas( - const: Constants, state: State, gamma: float, g: jnp.ndarray, *, + const: Constants, state: State, gamma: float, g: jax.Array, *, lse_mode: bool ) -> Tuple[float, float]: """TODO.""" diff --git a/src/ott/solvers/linear/sinkhorn.py b/src/ott/solvers/linear/sinkhorn.py index 7ab17e870..44afe1833 100644 --- a/src/ott/solvers/linear/sinkhorn.py +++ b/src/ott/solvers/linear/sinkhorn.py @@ -52,11 +52,11 @@ class SinkhornState(NamedTuple): """Holds the state variables used to solve OT with Sinkhorn.""" - errors: Optional[jnp.ndarray] = None - fu: Optional[jnp.ndarray] = None - gv: Optional[jnp.ndarray] = None - old_fus: Optional[jnp.ndarray] = None - old_mapped_fus: Optional[jnp.ndarray] = None + errors: Optional[jax.Array] = None + fu: Optional[jax.Array] = None + gv: Optional[jax.Array] = None + old_fus: Optional[jax.Array] = None + old_mapped_fus: Optional[jax.Array] = None def set(self, **kwargs: Any) -> "SinkhornState": """Return a copy of self, with potential overwrites.""" @@ -70,7 +70,7 @@ def solution_error( lse_mode: bool, parallel_dual_updates: bool, recenter: bool, - ) -> jnp.ndarray: + ) -> jax.Array: """State dependent function to return error.""" fu, gv = self.fu, self.gv if recenter and lse_mode: @@ -92,10 +92,10 @@ def compute_kl_reg_cost( # noqa: D102 def recenter( self, - f: jnp.ndarray, - g: jnp.ndarray, + f: jax.Array, + g: jax.Array, ot_prob: linear_problem.LinearProblem, - ) -> Tuple[jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array]: """Re-center dual potentials. If the ``ot_prob`` is balanced, the ``f`` potential is zero-centered. @@ -132,14 +132,14 @@ def recenter( def solution_error( - f_u: jnp.ndarray, - g_v: jnp.ndarray, + f_u: jax.Array, + g_v: jax.Array, ot_prob: linear_problem.LinearProblem, *, norm_error: Sequence[int], lse_mode: bool, parallel_dual_updates: bool, -) -> jnp.ndarray: +) -> jax.Array: """Given two potential/scaling solutions, computes deviation to optimality. When the ``ot_prob`` problem is balanced and the usual Sinkhorn updates are @@ -153,8 +153,8 @@ def solution_error( additional quantities to qualify optimality must be taken into account. Args: - f_u: jnp.ndarray, potential or scaling - g_v: jnp.ndarray, potential or scaling + f_u: jax.Array, potential or scaling + g_v: jax.Array, potential or scaling ot_prob: linear OT problem norm_error: int, p-norm used to compute error. lse_mode: True if log-sum-exp operations, False if kernel vector products. @@ -196,9 +196,9 @@ def solution_error( def marginal_error( - f_u: jnp.ndarray, - g_v: jnp.ndarray, - target: jnp.ndarray, + f_u: jax.Array, + g_v: jax.Array, + target: jax.Array, geom: geometry.Geometry, axis: int = 0, norm_error: Sequence[int] = (1,), @@ -229,7 +229,7 @@ def marginal_error( def compute_kl_reg_cost( - f: jnp.ndarray, g: jnp.ndarray, ot_prob: linear_problem.LinearProblem, + f: jax.Array, g: jax.Array, ot_prob: linear_problem.LinearProblem, lse_mode: bool ) -> float: r"""Compute objective of Sinkhorn for OT problem given dual solutions. @@ -243,8 +243,8 @@ def compute_kl_reg_cost( values, ``jnp.where`` is used to cancel these contributions. Args: - f: jnp.ndarray, potential - g: jnp.ndarray, potential + f: jax.Array, potential + g: jax.Array, potential ot_prob: linear optimal transport problem. lse_mode: bool, whether to compute total mass in lse or kernel mode. @@ -320,12 +320,12 @@ class SinkhornOutput(NamedTuple): computations of errors. """ - f: Optional[jnp.ndarray] = None - g: Optional[jnp.ndarray] = None - errors: Optional[jnp.ndarray] = None + f: Optional[jax.Array] = None + g: Optional[jax.Array] = None + errors: Optional[jax.Array] = None reg_ot_cost: Optional[float] = None ot_prob: Optional[linear_problem.LinearProblem] = None - threshold: Optional[jnp.ndarray] = None + threshold: Optional[jax.Array] = None converged: Optional[bool] = None inner_iterations: Optional[int] = None @@ -342,7 +342,7 @@ def set_cost( # noqa: D102 return self.set(reg_ot_cost=compute_kl_reg_cost(f, g, ot_prob, lse_mode)) @property - def dual_cost(self) -> jnp.ndarray: + def dual_cost(self) -> jax.Array: """Return dual transport cost, without considering regularizer.""" a, b = self.ot_prob.a, self.ot_prob.b dual_cost = jnp.sum(jnp.where(a > 0.0, a * self.f, 0)) @@ -399,9 +399,7 @@ def kl_reg_cost(self) -> float: """ return self.reg_ot_cost - def transport_cost_at_geom( - self, other_geom: geometry.Geometry - ) -> jnp.ndarray: + def transport_cost_at_geom(self, other_geom: geometry.Geometry) -> jax.Array: r"""Return bare transport cost of current solution at any geometry. In order to compute cost, we check first if the geometry can be converted @@ -428,11 +426,11 @@ def geom(self) -> geometry.Geometry: # noqa: D102 return self.ot_prob.geom @property - def a(self) -> jnp.ndarray: # noqa: D102 + def a(self) -> jax.Array: # noqa: D102 return self.ot_prob.a @property - def b(self) -> jnp.ndarray: # noqa: D102 + def b(self) -> jax.Array: # noqa: D102 return self.ot_prob.b @property @@ -441,13 +439,13 @@ def n_iters(self) -> int: # noqa: D102 return jnp.sum(self.errors != -1) * self.inner_iterations @property - def scalings(self) -> Tuple[jnp.ndarray, jnp.ndarray]: # noqa: D102 + def scalings(self) -> Tuple[jax.Array, jax.Array]: # noqa: D102 u = self.ot_prob.geom.scaling_from_potential(self.f) v = self.ot_prob.geom.scaling_from_potential(self.g) return u, v @property - def matrix(self) -> jnp.ndarray: + def matrix(self) -> jax.Array: """Transport matrix if it can be instantiated.""" try: return self.ot_prob.geom.transport_from_potentials(self.f, self.g) @@ -459,13 +457,13 @@ def transport_mass(self) -> float: """Sum of transport matrix.""" return self.marginal(0).sum() - def apply(self, inputs: jnp.ndarray, axis: int = 0) -> jnp.ndarray: + def apply(self, inputs: jax.Array, axis: int = 0) -> jax.Array: """Apply the transport to a ndarray; axis=1 for its transpose.""" return self.ot_prob.geom.apply_transport_from_potentials( self.f, self.g, inputs, axis=axis ) - def marginal(self, axis: int) -> jnp.ndarray: # noqa: D102 + def marginal(self, axis: int) -> jax.Array: # noqa: D102 return self.ot_prob.geom.marginal_from_potentials(self.f, self.g, axis=axis) def cost_at_geom(self, other_geom: geometry.Geometry) -> float: @@ -832,7 +830,7 @@ def __init__( def __call__( self, ot_prob: linear_problem.LinearProblem, - init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray]] = (None, None), + init: Tuple[Optional[jax.Array], Optional[jax.Array]] = (None, None), rng: Optional[jax.Array] = None, ) -> SinkhornOutput: """Run Sinkhorn algorithm. @@ -868,9 +866,7 @@ def xi(tau_i: float, tau_j: float) -> float: k_ij = k(tau_i, tau_j) return k_ij / (1. - k_ij) - def smin( - potential: jnp.ndarray, marginal: jnp.ndarray, tau: float - ) -> float: + def smin(potential: jax.Array, marginal: jax.Array, tau: float) -> float: rho = uf.rho(ot_prob.epsilon, tau) return -rho * mu.logsumexp(-potential / rho, b=marginal) @@ -1015,8 +1011,8 @@ def outer_iterations(self) -> int: return np.ceil(self.max_iterations / self.inner_iterations).astype(int) def init_state( - self, ot_prob: linear_problem.LinearProblem, init: Tuple[jnp.ndarray, - jnp.ndarray] + self, ot_prob: linear_problem.LinearProblem, init: Tuple[jax.Array, + jax.Array] ) -> SinkhornState: """Return the initial state of the loop.""" fu, gv = init @@ -1124,7 +1120,7 @@ def tree_unflatten(cls, aux_data, children): # noqa: D102 def run( ot_prob: linear_problem.LinearProblem, solver: Sinkhorn, - init: Tuple[jnp.ndarray, ...] + init: Tuple[jax.Array, ...] ) -> SinkhornOutput: """Run loop of the solver, outputting a state upgraded to an output.""" iter_fun = _iterations_implicit if solver.implicit_diff else iterations @@ -1137,7 +1133,7 @@ def run( def iterations( ot_prob: linear_problem.LinearProblem, solver: Sinkhorn, - init: Tuple[jnp.ndarray, ...] + init: Tuple[jax.Array, ...] ) -> SinkhornOutput: """Jittable Sinkhorn loop. args contain initialization variables.""" @@ -1174,8 +1170,8 @@ def body_fn( def _iterations_taped( ot_prob: linear_problem.LinearProblem, solver: Sinkhorn, - init: Tuple[jnp.ndarray, ...] -) -> Tuple[SinkhornOutput, Tuple[jnp.ndarray, jnp.ndarray, + init: Tuple[jax.Array, ...] +) -> Tuple[SinkhornOutput, Tuple[jax.Array, jax.Array, linear_problem.LinearProblem, Sinkhorn]]: """Run forward pass of the Sinkhorn algorithm storing side information.""" state = iterations(ot_prob, solver, init) @@ -1194,7 +1190,7 @@ def _iterations_implicit_bwd(res, gr): considered. Returns: - a tuple of gradients: PyTree for geom, one jnp.ndarray for each of a and b. + a tuple of gradients: PyTree for geom, one jax.Array for each of a and b. """ f, g, ot_prob, solver = res gr = gr[:2] diff --git a/src/ott/solvers/linear/sinkhorn_lr.py b/src/ott/solvers/linear/sinkhorn_lr.py index db948cf8b..b6732f76f 100644 --- a/src/ott/solvers/linear/sinkhorn_lr.py +++ b/src/ott/solvers/linear/sinkhorn_lr.py @@ -43,12 +43,12 @@ class LRSinkhornState(NamedTuple): """State of the Low Rank Sinkhorn algorithm.""" - q: jnp.ndarray - r: jnp.ndarray - g: jnp.ndarray + q: jax.Array + r: jax.Array + g: jax.Array gamma: float - costs: jnp.ndarray - errors: jnp.ndarray + costs: jax.Array + errors: jax.Array crossed_threshold: bool def compute_error( # noqa: D102 @@ -79,7 +79,7 @@ def reg_ot_cost( # noqa: D102 def solution_error( # noqa: D102 self, ot_prob: linear_problem.LinearProblem, norm_error: Tuple[int, ...] - ) -> jnp.ndarray: + ) -> jax.Array: return solution_error(self.q, self.r, ot_prob, norm_error) def set(self, **kwargs: Any) -> "LRSinkhornState": @@ -88,9 +88,9 @@ def set(self, **kwargs: Any) -> "LRSinkhornState": def compute_reg_ot_cost( - q: jnp.ndarray, - r: jnp.ndarray, - g: jnp.ndarray, + q: jax.Array, + r: jax.Array, + g: jax.Array, ot_prob: linear_problem.LinearProblem, epsilon: float, use_danskin: bool = False @@ -110,7 +110,7 @@ def compute_reg_ot_cost( regularized OT cost, the (primal) transport cost of the low-rank solution. """ - def ent(x: jnp.ndarray) -> float: + def ent(x: jax.Array) -> float: # generalized entropy return jnp.sum(jsp.special.entr(x) + x) @@ -131,9 +131,9 @@ def ent(x: jnp.ndarray) -> float: def solution_error( - q: jnp.ndarray, r: jnp.ndarray, ot_prob: linear_problem.LinearProblem, + q: jax.Array, r: jax.Array, ot_prob: linear_problem.LinearProblem, norm_error: Tuple[int, ...] -) -> jnp.ndarray: +) -> jax.Array: """Compute solution error. Since only balanced case is available for LR, this is marginal deviation. @@ -166,13 +166,13 @@ def solution_error( class LRSinkhornOutput(NamedTuple): """Transport interface for a low-rank Sinkhorn solution.""" - q: jnp.ndarray - r: jnp.ndarray - g: jnp.ndarray - costs: jnp.ndarray + q: jax.Array + r: jax.Array + g: jax.Array + costs: jax.Array # TODO(michalk8): must be called `errors`, because of `store_inner_errors` # in future, enforce via class hierarchy - errors: jnp.ndarray + errors: jax.Array ot_prob: linear_problem.LinearProblem epsilon: float inner_iterations: int @@ -211,11 +211,11 @@ def geom(self) -> geometry.Geometry: # noqa: D102 return self.ot_prob.geom @property - def a(self) -> jnp.ndarray: # noqa: D102 + def a(self) -> jax.Array: # noqa: D102 return self.ot_prob.a @property - def b(self) -> jnp.ndarray: # noqa: D102 + def b(self) -> jax.Array: # noqa: D102 return self.ot_prob.b @property @@ -229,17 +229,17 @@ def converged(self) -> bool: # noqa: D102 ) @property - def matrix(self) -> jnp.ndarray: + def matrix(self) -> jax.Array: """Transport matrix if it can be instantiated.""" return (self.q * self._inv_g) @ self.r.T - def apply(self, inputs: jnp.ndarray, axis: int = 0) -> jnp.ndarray: + def apply(self, inputs: jax.Array, axis: int = 0) -> jax.Array: """Apply the transport to a array; axis=1 for its transpose.""" q, r = (self.q, self.r) if axis == 1 else (self.r, self.q) # for `axis=0`: (batch, m), (m, r), (r,), (r, n) return ((inputs @ r) * self._inv_g) @ q.T - def marginal(self, axis: int) -> jnp.ndarray: # noqa: D102 + def marginal(self, axis: int) -> jax.Array: # noqa: D102 length = self.q.shape[0] if axis == 0 else self.r.shape[0] return self.apply(jnp.ones(length,), axis=axis) @@ -262,7 +262,7 @@ def transport_mass(self) -> float: return self.marginal(0).sum() @property - def _inv_g(self) -> jnp.ndarray: + def _inv_g(self) -> jax.Array: return 1. / self.g @@ -341,8 +341,8 @@ def __init__( def __call__( self, ot_prob: linear_problem.LinearProblem, - init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], - Optional[jnp.ndarray]] = (None, None, None), + init: Tuple[Optional[jax.Array], Optional[jax.Array], + Optional[jax.Array]] = (None, None, None), rng: Optional[jax.Array] = None, **kwargs: Any, ) -> LRSinkhornOutput: @@ -371,7 +371,7 @@ def _get_costs( self, ot_prob: linear_problem.LinearProblem, state: LRSinkhornState, - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, float]: + ) -> Tuple[jax.Array, jax.Array, jax.Array, float]: log_q, log_r, log_g = ( mu.safe_log(state.q), mu.safe_log(state.r), mu.safe_log(state.g) ) @@ -407,9 +407,9 @@ def _get_costs( # TODO(michalk8): move to `lr_utils` when refactoring this def dykstra_update_lse( self, - c_q: jnp.ndarray, - c_r: jnp.ndarray, - h: jnp.ndarray, + c_q: jax.Array, + c_r: jax.Array, + h: jax.Array, gamma: float, ot_prob: linear_problem.LinearProblem, min_entry_value: float = 1e-6, @@ -417,7 +417,7 @@ def dykstra_update_lse( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array, jax.Array]: """Run Dykstra's algorithm.""" # shortcuts for problem's definition. r = self.rank @@ -435,24 +435,24 @@ def dykstra_update_lse( constants = c_q, c_r, loga, logb def cond_fn( - iteration: int, constants: Tuple[jnp.ndarray, ...], - state_inner: Tuple[jnp.ndarray, ...] + iteration: int, constants: Tuple[jax.Array, ...], + state_inner: Tuple[jax.Array, ...] ) -> bool: del iteration, constants *_, err = state_inner return err > tolerance def _softm( - f: jnp.ndarray, g: jnp.ndarray, c: jnp.ndarray, axis: int - ) -> jnp.ndarray: + f: jax.Array, g: jax.Array, c: jax.Array, axis: int + ) -> jax.Array: return jsp.special.logsumexp( gamma * (f[:, None] + g[None, :] - c), axis=axis ) def body_fn( - iteration: int, constants: Tuple[jnp.ndarray, ...], - state_inner: Tuple[jnp.ndarray, ...], compute_error: bool - ) -> Tuple[jnp.ndarray, ...]: + iteration: int, constants: Tuple[jax.Array, ...], + state_inner: Tuple[jax.Array, ...], compute_error: bool + ) -> Tuple[jax.Array, ...]: # TODO(michalk8): in the future, use `NamedTuple` f1, f2, g1_old, g2_old, h_old, w_gi, w_gp, w_q, w_r, err = state_inner c_q, c_r, loga, logb = constants @@ -501,15 +501,15 @@ def body_fn( return f1, f2, g1_old, g2_old, h_old, w_gi, w_gp, w_q, w_r, err def recompute_couplings( - f1: jnp.ndarray, - g1: jnp.ndarray, - c_q: jnp.ndarray, - f2: jnp.ndarray, - g2: jnp.ndarray, - c_r: jnp.ndarray, - h: jnp.ndarray, + f1: jax.Array, + g1: jax.Array, + c_q: jax.Array, + f2: jax.Array, + g2: jax.Array, + c_r: jax.Array, + h: jax.Array, gamma: float, - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array, jax.Array]: q = jnp.exp(gamma * (f1[:, None] + g1[None, :] - c_q)) r = jnp.exp(gamma * (f2[:, None] + g2[None, :] - c_r)) g = jnp.exp(gamma * h) @@ -524,9 +524,9 @@ def recompute_couplings( def dykstra_update_kernel( self, - k_q: jnp.ndarray, - k_r: jnp.ndarray, - k_g: jnp.ndarray, + k_q: jax.Array, + k_r: jax.Array, + k_g: jax.Array, gamma: float, ot_prob: linear_problem.LinearProblem, min_entry_value: float = 1e-6, @@ -534,7 +534,7 @@ def dykstra_update_kernel( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array, jax.Array]: """Run Dykstra's algorithm.""" # shortcuts for problem's definition. rank = self.rank @@ -553,17 +553,17 @@ def dykstra_update_kernel( constants = k_q, k_r, k_g, a, b def cond_fn( - iteration: int, constants: Tuple[jnp.ndarray, ...], - state_inner: Tuple[jnp.ndarray, ...] + iteration: int, constants: Tuple[jax.Array, ...], + state_inner: Tuple[jax.Array, ...] ) -> bool: del iteration, constants *_, err = state_inner return err > tolerance def body_fn( - iteration: int, constants: Tuple[jnp.ndarray, ...], - state_inner: Tuple[jnp.ndarray, ...], compute_error: bool - ) -> Tuple[jnp.ndarray, ...]: + iteration: int, constants: Tuple[jax.Array, ...], + state_inner: Tuple[jax.Array, ...], compute_error: bool + ) -> Tuple[jax.Array, ...]: # TODO(michalk8): in the future, use `NamedTuple` u1, u2, v1_old, v2_old, g_old, q_gi, q_gp, q_q, q_r, err = state_inner k_q, k_r, k_g, a, b = constants @@ -600,14 +600,14 @@ def body_fn( return u1, u2, v1_old, v2_old, g_old, q_gi, q_gp, q_q, q_r, err def recompute_couplings( - u1: jnp.ndarray, - v1: jnp.ndarray, - k_q: jnp.ndarray, - u2: jnp.ndarray, - v2: jnp.ndarray, - k_r: jnp.ndarray, - g: jnp.ndarray, - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: + u1: jax.Array, + v1: jax.Array, + k_q: jax.Array, + u2: jax.Array, + v2: jax.Array, + k_r: jax.Array, + g: jax.Array, + ) -> Tuple[jax.Array, jax.Array, jax.Array]: q = u1.reshape((-1, 1)) * k_q * v1.reshape((1, -1)) r = u2.reshape((-1, 1)) * k_r * v2.reshape((1, -1)) return q, r, g @@ -736,7 +736,7 @@ def create_initializer( def init_state( self, ot_prob: linear_problem.LinearProblem, - init: Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray] + init: Tuple[jax.Array, jax.Array, jax.Array] ) -> LRSinkhornState: """Return the initial state of the loop.""" q, r, g = init @@ -811,8 +811,7 @@ def _diverged(self, state: LRSinkhornState, iteration: int) -> bool: def run( ot_prob: linear_problem.LinearProblem, solver: LRSinkhorn, - init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], - Optional[jnp.ndarray]], + init: Tuple[Optional[jax.Array], Optional[jax.Array], Optional[jax.Array]], ) -> LRSinkhornOutput: """Run loop of the solver, outputting a state upgraded to an output.""" out = sinkhorn.iterations(ot_prob, solver, init) diff --git a/src/ott/solvers/linear/univariate.py b/src/ott/solvers/linear/univariate.py index 2b6392227..1f2a47b6f 100644 --- a/src/ott/solvers/linear/univariate.py +++ b/src/ott/solvers/linear/univariate.py @@ -53,7 +53,7 @@ class UnivariateSolver: def __init__( self, - sort_fn: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, + sort_fn: Optional[Callable[[jax.Array], jax.Array]] = None, cost_fn: Optional[costs.CostFn] = None, method: Literal["subsample", "quantile", "wasserstein", "equal"] = "subsample", @@ -66,10 +66,10 @@ def __init__( def __call__( self, - x: jnp.ndarray, - y: jnp.ndarray, - a: Optional[jnp.ndarray] = None, - b: Optional[jnp.ndarray] = None + x: jax.Array, + y: jax.Array, + a: Optional[jax.Array] = None, + b: Optional[jax.Array] = None ) -> float: """Computes the Univariate OT Distance between `x` and `y`. @@ -113,8 +113,8 @@ def __call__( return self.cost_fn.pairwise(xx, yy) * (n / xx.shape[0]) def _cdf_distance( - self, x: jnp.ndarray, y: jnp.ndarray, a: Optional[jnp.ndarray], - b: Optional[jnp.ndarray] + self, x: jax.Array, y: jax.Array, a: Optional[jax.Array], + b: Optional[jax.Array] ): # Implementation based on `scipy` implementation for # :func: diff --git a/src/ott/solvers/quadratic/_solve.py b/src/ott/solvers/quadratic/_solve.py index 9cdefec93..986680637 100644 --- a/src/ott/solvers/quadratic/_solve.py +++ b/src/ott/solvers/quadratic/_solve.py @@ -13,7 +13,7 @@ # limitations under the License. from typing import Any, Literal, Optional, Union -import jax.numpy as jnp +import jax from ott.geometry import geometry from ott.problems.quadratic import quadratic_costs, quadratic_problem @@ -28,8 +28,8 @@ def solve( geom_yy: geometry.Geometry, geom_xy: Optional[geometry.Geometry] = None, fused_penalty: float = 1.0, - a: Optional[jnp.ndarray] = None, - b: Optional[jnp.ndarray] = None, + a: Optional[jax.Array] = None, + b: Optional[jax.Array] = None, tau_a: float = 1.0, tau_b: float = 1.0, loss: Union[Literal["sqeucl", "kl"], quadratic_costs.GWLoss] = "sqeucl", diff --git a/src/ott/solvers/quadratic/gromov_wasserstein.py b/src/ott/solvers/quadratic/gromov_wasserstein.py index a7890e1c9..554cdaaed 100644 --- a/src/ott/solvers/quadratic/gromov_wasserstein.py +++ b/src/ott/solvers/quadratic/gromov_wasserstein.py @@ -63,10 +63,10 @@ class GWOutput(NamedTuple): old_transport_mass: Holds total mass of transport at previous iteration. """ - costs: Optional[jnp.ndarray] = None - linear_convergence: Optional[jnp.ndarray] = None + costs: Optional[jax.Array] = None + linear_convergence: Optional[jax.Array] = None converged: bool = False - errors: Optional[jnp.ndarray] = None + errors: Optional[jax.Array] = None linear_state: Optional[LinearOutput] = None geom: Optional[geometry.Geometry] = None # Intermediate values. @@ -77,11 +77,11 @@ def set(self, **kwargs: Any) -> "GWOutput": return self._replace(**kwargs) @property - def matrix(self) -> jnp.ndarray: + def matrix(self) -> jax.Array: """Transport matrix.""" return self._rescale_factor * self.linear_state.matrix - def apply(self, inputs: jnp.ndarray, axis: int = 0) -> jnp.ndarray: + def apply(self, inputs: jax.Array, axis: int = 0) -> jax.Array: """Apply the transport to an array; axis=1 for its transpose.""" return self._rescale_factor * self.linear_state.apply(inputs, axis=axis) @@ -124,13 +124,13 @@ class GWState(NamedTuple): at each iteration. """ - costs: jnp.ndarray - linear_convergence: jnp.ndarray + costs: jax.Array + linear_convergence: jax.Array linear_state: LinearOutput linear_pb: linear_problem.LinearProblem old_transport_mass: float rngs: Optional[jax.Array] = None - errors: Optional[jnp.ndarray] = None + errors: Optional[jax.Array] = None def set(self, **kwargs: Any) -> "GWState": """Return a copy of self, possibly with overwrites.""" diff --git a/src/ott/solvers/quadratic/gromov_wasserstein_lr.py b/src/ott/solvers/quadratic/gromov_wasserstein_lr.py index 214853f4c..62a5592bc 100644 --- a/src/ott/solvers/quadratic/gromov_wasserstein_lr.py +++ b/src/ott/solvers/quadratic/gromov_wasserstein_lr.py @@ -46,12 +46,12 @@ class LRGWState(NamedTuple): """State of the low-rank GW algorithm.""" - q: jnp.ndarray - r: jnp.ndarray - g: jnp.ndarray + q: jax.Array + r: jax.Array + g: jax.Array gamma: float - costs: jnp.ndarray - errors: jnp.ndarray + costs: jax.Array + errors: jax.Array crossed_threshold: bool def compute_error( # noqa: D102 @@ -85,9 +85,9 @@ def set(self, **kwargs: Any) -> "LRGWState": def compute_reg_gw_cost( - q: jnp.ndarray, - r: jnp.ndarray, - g: jnp.ndarray, + q: jax.Array, + r: jax.Array, + g: jax.Array, ot_prob: quadratic_problem.QuadraticProblem, epsilon: float, use_danskin: bool = False @@ -107,7 +107,7 @@ def compute_reg_gw_cost( regularized OT cost, the (primal) transport cost of the low-rank solution. """ - def ent(x: jnp.ndarray) -> float: + def ent(x: jax.Array) -> float: # generalized entropy return jnp.sum(jsp.special.entr(x) + x) @@ -139,13 +139,13 @@ def ent(x: jnp.ndarray) -> float: class LRGWOutput(NamedTuple): """Transport interface for a low-rank GW solution.""" - q: jnp.ndarray - r: jnp.ndarray - g: jnp.ndarray - costs: jnp.ndarray + q: jax.Array + r: jax.Array + g: jax.Array + costs: jax.Array # TODO(michalk8): must be called `errors`, because of `store_inner_errors` # in future, enforce via class hierarchy - errors: jnp.ndarray + errors: jax.Array ot_prob: quadratic_problem.QuadraticProblem epsilon: float inner_iterations: int @@ -184,11 +184,11 @@ def geom(self) -> geometry.Geometry: # noqa: D102 return _linearized_geometry(self.ot_prob, q=self.q, r=self.r, g=self.g) @property - def a(self) -> jnp.ndarray: # noqa: D102 + def a(self) -> jax.Array: # noqa: D102 return self.ot_prob.a @property - def b(self) -> jnp.ndarray: # noqa: D102 + def b(self) -> jax.Array: # noqa: D102 return self.ot_prob.b @property @@ -202,17 +202,17 @@ def converged(self) -> bool: # noqa: D102 ) @property - def matrix(self) -> jnp.ndarray: + def matrix(self) -> jax.Array: """Transport matrix if it can be instantiated.""" return (self.q * self._inv_g) @ self.r.T - def apply(self, inputs: jnp.ndarray, axis: int = 0) -> jnp.ndarray: + def apply(self, inputs: jax.Array, axis: int = 0) -> jax.Array: """Apply the transport to a array; axis=1 for its transpose.""" q, r = (self.q, self.r) if axis == 1 else (self.r, self.q) # for `axis=0`: (batch, m), (m, r), (r,), (r, n) return ((inputs @ r) * self._inv_g) @ q.T - def marginal(self, axis: int) -> jnp.ndarray: # noqa: D102 + def marginal(self, axis: int) -> jax.Array: # noqa: D102 length = self.q.shape[0] if axis == 0 else self.r.shape[0] return self.apply(jnp.ones(length,), axis=axis) @@ -250,7 +250,7 @@ def transport_mass(self) -> float: return self.marginal(0).sum() @property - def _inv_g(self) -> jnp.ndarray: + def _inv_g(self) -> jax.Array: return 1.0 / self.g @@ -334,8 +334,8 @@ def __init__( def __call__( self, ot_prob: quadratic_problem.QuadraticProblem, - init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], - Optional[jnp.ndarray]] = (None, None, None), + init: Tuple[Optional[jax.Array], Optional[jax.Array], + Optional[jax.Array]] = (None, None, None), rng: Optional[jax.Array] = None, **kwargs: Any, ) -> LRGWOutput: @@ -370,7 +370,7 @@ def _get_costs( self, ot_prob: quadratic_problem.QuadraticProblem, state: LRGWState, - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, float]: + ) -> Tuple[jax.Array, jax.Array, jax.Array, float]: q, r, g = state.q, state.r, state.g log_q, log_r, log_g = mu.safe_log(q), mu.safe_log(r), mu.safe_log(g) inv_g = 1.0 / g[None, :] @@ -427,9 +427,9 @@ def _get_costs( # TODO(michalk8): move to `lr_utils` when refactoring this the future def dykstra_update_lse( self, - c_q: jnp.ndarray, - c_r: jnp.ndarray, - h: jnp.ndarray, + c_q: jax.Array, + c_r: jax.Array, + h: jax.Array, gamma: float, ot_prob: quadratic_problem.QuadraticProblem, min_entry_value: float = 1e-6, @@ -437,7 +437,7 @@ def dykstra_update_lse( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array, jax.Array]: """Run Dykstra's algorithm.""" # shortcuts for problem's definition. r = self.rank @@ -455,24 +455,24 @@ def dykstra_update_lse( constants = c_q, c_r, loga, logb def cond_fn( - iteration: int, constants: Tuple[jnp.ndarray, ...], - state_inner: Tuple[jnp.ndarray, ...] + iteration: int, constants: Tuple[jax.Array, ...], + state_inner: Tuple[jax.Array, ...] ) -> bool: del iteration, constants *_, err = state_inner return err > tolerance def _softm( - f: jnp.ndarray, g: jnp.ndarray, c: jnp.ndarray, axis: int - ) -> jnp.ndarray: + f: jax.Array, g: jax.Array, c: jax.Array, axis: int + ) -> jax.Array: return jsp.special.logsumexp( gamma * (f[:, None] + g[None, :] - c), axis=axis ) def body_fn( - iteration: int, constants: Tuple[jnp.ndarray, ...], - state_inner: Tuple[jnp.ndarray, ...], compute_error: bool - ) -> Tuple[jnp.ndarray, ...]: + iteration: int, constants: Tuple[jax.Array, ...], + state_inner: Tuple[jax.Array, ...], compute_error: bool + ) -> Tuple[jax.Array, ...]: # TODO(michalk8): in the future, use `NamedTuple` f1, f2, g1_old, g2_old, h_old, w_gi, w_gp, w_q, w_r, err = state_inner c_q, c_r, loga, logb = constants @@ -522,15 +522,15 @@ def body_fn( return f1, f2, g1_old, g2_old, h_old, w_gi, w_gp, w_q, w_r, err def recompute_couplings( - f1: jnp.ndarray, - g1: jnp.ndarray, - c_q: jnp.ndarray, - f2: jnp.ndarray, - g2: jnp.ndarray, - c_r: jnp.ndarray, - h: jnp.ndarray, + f1: jax.Array, + g1: jax.Array, + c_q: jax.Array, + f2: jax.Array, + g2: jax.Array, + c_r: jax.Array, + h: jax.Array, gamma: float, - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array, jax.Array]: q = jnp.exp(gamma * (f1[:, None] + g1[None, :] - c_q)) r = jnp.exp(gamma * (f2[:, None] + g2[None, :] - c_r)) g = jnp.exp(gamma * h) @@ -545,9 +545,9 @@ def recompute_couplings( def dykstra_update_kernel( self, - k_q: jnp.ndarray, - k_r: jnp.ndarray, - k_g: jnp.ndarray, + k_q: jax.Array, + k_r: jax.Array, + k_g: jax.Array, gamma: float, ot_prob: quadratic_problem.QuadraticProblem, min_entry_value: float = 1e-6, @@ -555,7 +555,7 @@ def dykstra_update_kernel( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array, jax.Array]: """Run Dykstra's algorithm.""" # shortcuts for problem's definition. del gamma @@ -575,17 +575,17 @@ def dykstra_update_kernel( constants = k_q, k_r, k_g, a, b def cond_fn( - iteration: int, constants: Tuple[jnp.ndarray, ...], - state_inner: Tuple[jnp.ndarray, ...] + iteration: int, constants: Tuple[jax.Array, ...], + state_inner: Tuple[jax.Array, ...] ) -> bool: del iteration, constants *_, err = state_inner return err > tolerance def body_fn( - iteration: int, constants: Tuple[jnp.ndarray, ...], - state_inner: Tuple[jnp.ndarray, ...], compute_error: bool - ) -> Tuple[jnp.ndarray, ...]: + iteration: int, constants: Tuple[jax.Array, ...], + state_inner: Tuple[jax.Array, ...], compute_error: bool + ) -> Tuple[jax.Array, ...]: # TODO(michalk8): in the future, use `NamedTuple` u1, u2, v1_old, v2_old, g_old, q_gi, q_gp, q_q, q_r, err = state_inner k_q, k_r, k_g, a, b = constants @@ -623,14 +623,14 @@ def body_fn( return u1, u2, v1_old, v2_old, g_old, q_gi, q_gp, q_q, q_r, err def recompute_couplings( - u1: jnp.ndarray, - v1: jnp.ndarray, - k_q: jnp.ndarray, - u2: jnp.ndarray, - v2: jnp.ndarray, - k_r: jnp.ndarray, - g: jnp.ndarray, - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: + u1: jax.Array, + v1: jax.Array, + k_q: jax.Array, + u2: jax.Array, + v2: jax.Array, + k_r: jax.Array, + g: jax.Array, + ) -> Tuple[jax.Array, jax.Array, jax.Array]: q = u1.reshape((-1, 1)) * k_q * v1.reshape((1, -1)) r = u2.reshape((-1, 1)) * k_r * v2.reshape((1, -1)) return q, r, g @@ -762,7 +762,7 @@ def create_initializer( def init_state( self, ot_prob: quadratic_problem.QuadraticProblem, - init: Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray] + init: Tuple[jax.Array, jax.Array, jax.Array] ) -> LRGWState: """Return the initial state of the loop.""" q, r, g = init @@ -837,8 +837,7 @@ def _diverged(self, state: LRGWState, iteration: int) -> bool: def run( ot_prob: quadratic_problem.QuadraticProblem, solver: LRGromovWasserstein, - init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], - Optional[jnp.ndarray]], + init: Tuple[Optional[jax.Array], Optional[jax.Array], Optional[jax.Array]], ) -> LRGWOutput: """Run loop of the solver, outputting a state upgraded to an output.""" out = sinkhorn.iterations(ot_prob, solver, init) @@ -849,9 +848,9 @@ def run( def dykstra_solution_error( - q: jnp.ndarray, r: jnp.ndarray, ot_prob: quadratic_problem.QuadraticProblem, + q: jax.Array, r: jax.Array, ot_prob: quadratic_problem.QuadraticProblem, norm_error: Tuple[int, ...] -) -> jnp.ndarray: +) -> jax.Array: """Compute solution error. Since only balanced case is available for LR, this is marginal deviation. @@ -884,9 +883,9 @@ def dykstra_solution_error( def _linearized_geometry( prob: quadratic_problem.QuadraticProblem, *, - q: jnp.ndarray, - r: jnp.ndarray, - g: jnp.ndarray, + q: jax.Array, + r: jax.Array, + g: jax.Array, ) -> low_rank.LRCGeometry: inv_sqrt_g = 1.0 / jnp.sqrt(g[None, :]) diff --git a/src/ott/solvers/quadratic/gw_barycenter.py b/src/ott/solvers/quadratic/gw_barycenter.py index f0d350b08..0f753793e 100644 --- a/src/ott/solvers/quadratic/gw_barycenter.py +++ b/src/ott/solvers/quadratic/gw_barycenter.py @@ -45,13 +45,13 @@ class GWBarycenterState(NamedTuple): gw_convergence: Array of shape ``[max_iter,]`` containing the convergence of all GW problems at each iteration. """ - cost: Optional[jnp.ndarray] = None - x: Optional[jnp.ndarray] = None - a: Optional[jnp.ndarray] = None - errors: Optional[jnp.ndarray] = None - costs: Optional[jnp.ndarray] = None - costs_bary: Optional[jnp.ndarray] = None - gw_convergence: Optional[jnp.ndarray] = None + cost: Optional[jax.Array] = None + x: Optional[jax.Array] = None + a: Optional[jax.Array] = None + errors: Optional[jax.Array] = None + costs: Optional[jax.Array] = None + costs_bary: Optional[jax.Array] = None + gw_convergence: Optional[jax.Array] = None def set(self, **kwargs: Any) -> "GWBarycenterState": """Return a copy of self, possibly with overwrites.""" @@ -133,9 +133,8 @@ def init_state( self, problem: gw_barycenter.GWBarycenterProblem, bar_size: int, - bar_init: Optional[Union[jnp.ndarray, Tuple[jnp.ndarray, - jnp.ndarray]]] = None, - a: Optional[jnp.ndarray] = None, + bar_init: Optional[Union[jax.Array, Tuple[jax.Array, jax.Array]]] = None, + a: Optional[jax.Array] = None, rng: Optional[jax.Array] = None, ) -> GWBarycenterState: """Initialize the (fused) Gromov-Wasserstein barycenter state. @@ -210,13 +209,13 @@ def update_state( iteration: int, problem: gw_barycenter.GWBarycenterProblem, store_errors: bool = True, - ) -> Tuple[float, bool, jnp.ndarray, Optional[jnp.ndarray]]: + ) -> Tuple[float, bool, jax.Array, Optional[jax.Array]]: """Solve the (fused) Gromov-Wasserstein barycenter problem.""" def solve_gw( - state: GWBarycenterState, b: jnp.ndarray, y: jnp.ndarray, - f: Optional[jnp.ndarray] - ) -> Tuple[float, bool, jnp.ndarray, Optional[jnp.ndarray]]: + state: GWBarycenterState, b: jax.Array, y: jax.Array, + f: Optional[jax.Array] + ) -> Tuple[float, bool, jax.Array, Optional[jax.Array]]: quad_problem = problem._create_problem(state, y=y, b=b, f=f) out = self._quad_solver(quad_problem) return ( @@ -282,9 +281,8 @@ def tree_unflatten( # noqa: D102 @partial(jax.vmap, in_axes=[None, 0, None, 0, None]) def init_transports( - solver, rng: jax.Array, a: jnp.ndarray, b: jnp.ndarray, - epsilon: Optional[float] -) -> jnp.ndarray: + solver, rng: jax.Array, a: jax.Array, b: jax.Array, epsilon: Optional[float] +) -> jax.Array: """Initialize random 2D point cloud and solve the linear OT problem. Args: diff --git a/src/ott/tools/gaussian_mixture/fit_gmm.py b/src/ott/tools/gaussian_mixture/fit_gmm.py index 4c62bded7..45d8e0935 100644 --- a/src/ott/tools/gaussian_mixture/fit_gmm.py +++ b/src/ott/tools/gaussian_mixture/fit_gmm.py @@ -62,8 +62,8 @@ def get_assignment_probs( - gmm: gaussian_mixture.GaussianMixture, points: jnp.ndarray -) -> jnp.ndarray: + gmm: gaussian_mixture.GaussianMixture, points: jax.Array +) -> jax.Array: r"""Get component assignment probabilities used in the E step of EM. Here we compute the component assignment probabilities p(Z|X, \Theta^{(t)}) @@ -81,9 +81,9 @@ def get_assignment_probs( def get_q( gmm: gaussian_mixture.GaussianMixture, - assignment_probs: jnp.ndarray, - points: jnp.ndarray, - point_weights: Optional[jnp.ndarray] = None, + assignment_probs: jax.Array, + points: jax.Array, + point_weights: Optional[jax.Array] = None, ) -> float: r"""Get Q(\Theta|\Theta^{(t)}). @@ -109,8 +109,8 @@ def get_q( def log_prob_loss( gmm: gaussian_mixture.GaussianMixture, - points: jnp.ndarray, - point_weights: Optional[jnp.ndarray] = None, + points: jax.Array, + point_weights: Optional[jax.Array] = None, ) -> float: """Loss function: weighted mean of (-log prob of observations). @@ -130,8 +130,8 @@ def log_prob_loss( def fit_model_em( gmm: gaussian_mixture.GaussianMixture, - points: jnp.ndarray, - point_weights: Optional[jnp.ndarray], + points: jax.Array, + point_weights: Optional[jax.Array], steps: int, jit: bool = True, verbose: bool = False, @@ -184,10 +184,10 @@ def fit_model_em( # See https://en.wikipedia.org/wiki/K-means%2B%2B for details -def _get_dist_sq(points: jnp.ndarray, loc: jnp.ndarray) -> jnp.ndarray: +def _get_dist_sq(points: jax.Array, loc: jax.Array) -> jax.Array: """Get the squared distance from each point to each loc.""" - def _dist_sq_one_loc(points: jnp.ndarray, loc: jnp.ndarray) -> jnp.ndarray: + def _dist_sq_one_loc(points: jax.Array, loc: jax.Array) -> jax.Array: return jnp.sum((points - loc[None]) ** 2., axis=-1) dist_sq_fn = jax.vmap(_dist_sq_one_loc, in_axes=(None, 0), out_axes=1) @@ -195,8 +195,8 @@ def _dist_sq_one_loc(points: jnp.ndarray, loc: jnp.ndarray) -> jnp.ndarray: def _get_locs( - rng: jax.Array, points: jnp.ndarray, n_components: int -) -> jnp.ndarray: + rng: jax.Array, points: jax.Array, n_components: int +) -> jax.Array: """Get the initial component means. Args: @@ -230,8 +230,8 @@ def _get_locs( def from_kmeans_plusplus( rng: jax.Array, - points: jnp.ndarray, - point_weights: Optional[jnp.ndarray], + points: jax.Array, + point_weights: Optional[jax.Array], n_components: int, ) -> gaussian_mixture.GaussianMixture: """Initialize a GMM via a single pass of K-means++. @@ -266,8 +266,8 @@ def from_kmeans_plusplus( def initialize( rng: jax.Array, - points: jnp.ndarray, - point_weights: Optional[jnp.ndarray], + points: jax.Array, + point_weights: Optional[jax.Array], n_components: int, n_attempts: int = 50, verbose: bool = False diff --git a/src/ott/tools/gaussian_mixture/fit_gmm_pair.py b/src/ott/tools/gaussian_mixture/fit_gmm_pair.py index 7ecde263c..35222caf9 100644 --- a/src/ott/tools/gaussian_mixture/fit_gmm_pair.py +++ b/src/ott/tools/gaussian_mixture/fit_gmm_pair.py @@ -98,9 +98,9 @@ class Observations(NamedTuple): """Weighted observations and their E-step assignment probabilities.""" - points: jnp.ndarray - point_weights: jnp.ndarray - assignment_probs: jnp.ndarray + points: jax.Array + point_weights: jax.Array + assignment_probs: jax.Array # Model fit @@ -108,7 +108,7 @@ class Observations(NamedTuple): def get_q( gmm: gaussian_mixture.GaussianMixture, obs: Observations -) -> jnp.ndarray: +) -> jax.Array: r"""Get Q(\Theta|\Theta^{(t)}). Here Q is the log likelihood for our observations based on the current @@ -159,7 +159,7 @@ def _objective_fn( pair: gaussian_mixture_pair.GaussianMixturePair, obs0: Observations, obs1: Observations, - ) -> jnp.ndarray: + ) -> jax.Array: """Compute the objective function for a pair of GMMs. Args: @@ -204,11 +204,11 @@ def print_losses( def do_e_step( # noqa: D103 - e_step_fn: Callable[[gaussian_mixture.GaussianMixture, jnp.ndarray], - jnp.ndarray], + e_step_fn: Callable[[gaussian_mixture.GaussianMixture, jax.Array], + jax.Array], gmm: gaussian_mixture.GaussianMixture, - points: jnp.ndarray, - point_weights: jnp.ndarray, + points: jax.Array, + point_weights: jax.Array, ) -> Observations: assignment_probs = e_step_fn(gmm, points) return Observations( @@ -307,10 +307,10 @@ def get_fit_model_em_fn( def _fit_model_em( pair: gaussian_mixture_pair.GaussianMixturePair, - points0: jnp.ndarray, - points1: jnp.ndarray, - point_weights0: Optional[jnp.ndarray], - point_weights1: Optional[jnp.ndarray], + points0: jax.Array, + points1: jax.Array, + point_weights0: Optional[jax.Array], + point_weights1: Optional[jax.Array], em_steps: int, m_steps: int = 50, verbose: bool = False, diff --git a/src/ott/tools/gaussian_mixture/gaussian.py b/src/ott/tools/gaussian_mixture/gaussian.py index 6e0a8ccb7..b8c8e227b 100644 --- a/src/ott/tools/gaussian_mixture/gaussian.py +++ b/src/ott/tools/gaussian_mixture/gaussian.py @@ -28,15 +28,15 @@ class Gaussian: """Normal distribution.""" - def __init__(self, loc: jnp.ndarray, scale: scale_tril.ScaleTriL): + def __init__(self, loc: jax.Array, scale: scale_tril.ScaleTriL): self._loc = loc self._scale = scale @classmethod def from_samples( cls, - points: jnp.ndarray, - weights: Optional[jnp.ndarray] = None + points: jax.Array, + weights: Optional[jax.Array] = None ) -> "Gaussian": """Construct a Gaussian from weighted samples. @@ -67,7 +67,7 @@ def from_random( n_dimensions: int, stdev_mean: float = 0.1, stdev_cov: float = 0.1, - ridge: Union[float, jnp.ndarray] = 0, + ridge: Union[float, jax.Array] = 0, dtype: Optional[jnp.dtype] = None ) -> "Gaussian": """Construct a random Gaussian. @@ -94,13 +94,13 @@ def from_random( return cls(loc=loc, scale=scale) @classmethod - def from_mean_and_cov(cls, mean: jnp.ndarray, cov: jnp.ndarray) -> "Gaussian": + def from_mean_and_cov(cls, mean: jax.Array, cov: jax.Array) -> "Gaussian": """Construct a Gaussian from a mean and covariance.""" scale = scale_tril.ScaleTriL.from_covariance(cov) return cls(loc=mean, scale=scale) @property - def loc(self) -> jnp.ndarray: + def loc(self) -> jax.Array: """Mean of the Gaussian.""" return self._loc @@ -114,22 +114,22 @@ def n_dimensions(self) -> int: """Dimensionality of the Gaussian.""" return self.loc.shape[-1] - def covariance(self) -> jnp.ndarray: + def covariance(self) -> jax.Array: """Covariance of the Gaussian.""" return self.scale.covariance() - def to_z(self, x: jnp.ndarray) -> jnp.ndarray: + def to_z(self, x: jax.Array) -> jax.Array: r"""Transform :math:`x` to :math:`z = \frac{x - loc}{scale}`.""" return self.scale.centered_to_z(x_centered=x - self.loc) - def from_z(self, z: jnp.ndarray) -> jnp.ndarray: + def from_z(self, z: jax.Array) -> jax.Array: r"""Transform :math:`z` to :math:`x = loc + scale \cdot z`.""" return self.scale.z_to_centered(z=z) + self.loc def log_prob( self, - x: jnp.ndarray, # (?, d) - ) -> jnp.ndarray: # (?, d) + x: jax.Array, # (?, d) + ) -> jax.Array: # (?, d) """Log probability for a Gaussian with a diagonal covariance.""" d = x.shape[-1] z = self.to_z(x) @@ -138,7 +138,7 @@ def log_prob( -0.5 * (d * LOG2PI + log_det[None] + jnp.sum(z ** 2., axis=-1)) ) # (?, k) - def sample(self, rng: jax.Array, size: int) -> jnp.ndarray: + def sample(self, rng: jax.Array, size: int) -> jax.Array: """Generate samples from the distribution.""" std_samples_t = jax.random.normal(key=rng, shape=(self.n_dimensions, size)) return self.loc[None] + ( @@ -149,7 +149,7 @@ def sample(self, rng: jax.Array, size: int) -> jnp.ndarray: ) ) - def w2_dist(self, other: "Gaussian") -> jnp.ndarray: + def w2_dist(self, other: "Gaussian") -> jax.Array: r"""Wasserstein distance :math:`W_2^2` to another Gaussian. .. math:: @@ -167,7 +167,7 @@ def w2_dist(self, other: "Gaussian") -> jnp.ndarray: delta_sigma = self.scale.w2_dist(other.scale) return delta_mean + delta_sigma - def f_potential(self, dest: "Gaussian", points: jnp.ndarray) -> jnp.ndarray: + def f_potential(self, dest: "Gaussian", points: jax.Array) -> jax.Array: """Optimal potential for W2 distance between Gaussians. Evaluated on points. Args: @@ -191,7 +191,7 @@ def batch_inner_product(x, y): points.dot(dest.loc) ) - def transport(self, dest: "Gaussian", points: jnp.ndarray) -> jnp.ndarray: + def transport(self, dest: "Gaussian", points: jax.Array) -> jax.Array: """Transport points according to map between two Gaussian measures. Args: diff --git a/src/ott/tools/gaussian_mixture/gaussian_mixture.py b/src/ott/tools/gaussian_mixture/gaussian_mixture.py index 313689939..a9cb2b326 100644 --- a/src/ott/tools/gaussian_mixture/gaussian_mixture.py +++ b/src/ott/tools/gaussian_mixture/gaussian_mixture.py @@ -27,9 +27,8 @@ def get_summary_stats_from_points_and_assignment_probs( - points: jnp.ndarray, point_weights: jnp.ndarray, - assignment_probs: jnp.ndarray -) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: + points: jax.Array, point_weights: jax.Array, assignment_probs: jax.Array +) -> Tuple[jax.Array, jax.Array, jax.Array]: """Get component summary stats from points and component probabilities. Args: @@ -68,7 +67,7 @@ class GaussianMixture: """Gaussian Mixture model.""" def __init__( - self, loc: jnp.ndarray, scale_params: jnp.ndarray, + self, loc: jax.Array, scale_params: jax.Array, component_weight_ob: probabilities.Probabilities ): self._loc = loc @@ -113,7 +112,7 @@ def from_random( @classmethod def from_mean_cov_component_weights( - cls, mean: jnp.ndarray, cov: jnp.ndarray, component_weights: jnp.ndarray + cls, mean: jax.Array, cov: jax.Array, component_weights: jax.Array ): """Construct a GMM from means, covariances, and component weights.""" scale_params = [] @@ -128,9 +127,9 @@ def from_mean_cov_component_weights( @classmethod def from_points_and_assignment_probs( cls, - points: jnp.ndarray, - point_weights: jnp.ndarray, - assignment_probs: jnp.ndarray, + points: jax.Array, + point_weights: jax.Array, + assignment_probs: jax.Array, ) -> "GaussianMixture": """Estimate a GMM from points and a set of component probabilities.""" mean, cov, wts = get_summary_stats_from_points_and_assignment_probs( @@ -158,17 +157,17 @@ def n_components(self): return self._loc.shape[-2] @property - def loc(self) -> jnp.ndarray: + def loc(self) -> jax.Array: """Location parameters of the GMM.""" return self._loc @property - def scale_params(self) -> jnp.ndarray: + def scale_params(self) -> jax.Array: """Scale parameters of the GMM.""" return self._scale_params @property - def cholesky(self) -> jnp.ndarray: + def cholesky(self) -> jax.Array: """Cholesky decomposition of the GMM covariance matrices.""" size = self.n_dimensions @@ -178,7 +177,7 @@ def _get_cholesky(scale_params): return jax.vmap(_get_cholesky, in_axes=0, out_axes=0)(self.scale_params) @property - def covariance(self) -> jnp.ndarray: + def covariance(self) -> jax.Array: """Covariance matrices of the GMM.""" size = self.n_dimensions @@ -193,16 +192,16 @@ def component_weight_ob(self) -> probabilities.Probabilities: return self._component_weight_ob @property - def component_weights(self) -> jnp.ndarray: + def component_weights(self) -> jax.Array: """Component weights probabilities.""" return self._component_weight_ob.probs() - def log_component_weights(self) -> jnp.ndarray: + def log_component_weights(self) -> jax.Array: """Log component weights probabilities.""" return self._component_weight_ob.log_probs() def _get_normal( - self, loc: jnp.ndarray, scale_params: jnp.ndarray + self, loc: jax.Array, scale_params: jax.Array ) -> gaussian.Gaussian: size = loc.shape[-1] return gaussian.Gaussian( @@ -219,7 +218,7 @@ def components(self) -> List[gaussian.Gaussian]: """List of all GMM components.""" return [self.get_component(i) for i in range(self.n_components)] - def sample(self, rng: jax.Array, size: int) -> jnp.ndarray: + def sample(self, rng: jax.Array, size: int) -> jax.Array: """Generate samples from the distribution.""" subrng0, subrng1 = jax.random.split(rng) component = self.component_weight_ob.sample(rng=subrng0, size=size) @@ -244,7 +243,7 @@ def _transform_single_value(single_component, single_x): axis=0 ) - def conditional_log_prob(self, x: jnp.ndarray) -> jnp.ndarray: + def conditional_log_prob(self, x: jax.Array) -> jax.Array: """Compute the component-conditional log probability of x. Args: @@ -256,7 +255,7 @@ def conditional_log_prob(self, x: jnp.ndarray) -> jnp.ndarray: """ def _log_prob_single_component( - loc: jnp.ndarray, scale_params: jnp.ndarray, x: jnp.ndarray + loc: jax.Array, scale_params: jax.Array, x: jax.Array ): norm = self._get_normal(loc=loc, scale_params=scale_params) return norm.log_prob(x) @@ -266,7 +265,7 @@ def _log_prob_single_component( ) return conditional_log_prob_fn(self._loc, self._scale_params, x) - def log_prob(self, x: jnp.ndarray) -> jnp.ndarray: + def log_prob(self, x: jax.Array) -> jax.Array: """Compute the log probability of the observations x. Args: @@ -282,7 +281,7 @@ def log_prob(self, x: jnp.ndarray) -> jnp.ndarray: log_prob_conditional + log_component_weight[None, :], axis=-1 ) - def get_log_component_posterior(self, x: jnp.ndarray) -> jnp.ndarray: + def get_log_component_posterior(self, x: jax.Array) -> jax.Array: """Compute the posterior probability that x came from each component. Args: diff --git a/src/ott/tools/gaussian_mixture/gaussian_mixture_pair.py b/src/ott/tools/gaussian_mixture/gaussian_mixture_pair.py index b24506fcc..21d4dbaf1 100644 --- a/src/ott/tools/gaussian_mixture/gaussian_mixture_pair.py +++ b/src/ott/tools/gaussian_mixture/gaussian_mixture_pair.py @@ -128,12 +128,12 @@ def get_bures_geometry(self) -> pointcloud.PointCloud: epsilon=self.epsilon ) - def get_cost_matrix(self) -> jnp.ndarray: + def get_cost_matrix(self) -> jax.Array: """Get matrix of :math:`W_2^2` costs between all pairs of components.""" return self.get_bures_geometry().cost_matrix def get_sinkhorn( - self, cost_matrix: jnp.ndarray, **kwargs: Any + self, cost_matrix: jax.Array, **kwargs: Any ) -> sinkhorn.SinkhornOutput: """Get the output of Sinkhorn's method for a given cost matrix.""" # We use a Geometry here rather than the PointCloud created in @@ -152,7 +152,7 @@ def get_sinkhorn( def get_normalized_sinkhorn_coupling( self, sinkhorn_output: sinkhorn.SinkhornOutput, - ) -> jnp.ndarray: + ) -> jax.Array: """Get the normalized coupling matrix for the specified Sinkhorn output. Args: diff --git a/src/ott/tools/gaussian_mixture/linalg.py b/src/ott/tools/gaussian_mixture/linalg.py index 8e71369f3..2a5114d69 100644 --- a/src/ott/tools/gaussian_mixture/linalg.py +++ b/src/ott/tools/gaussian_mixture/linalg.py @@ -18,9 +18,9 @@ def get_mean_and_var( - points: jnp.ndarray, # (n, d) - weights: jnp.ndarray, # (n,) -) -> Tuple[jnp.ndarray, jnp.ndarray]: + points: jax.Array, # (n, d) + weights: jax.Array, # (n,) +) -> Tuple[jax.Array, jax.Array]: """Get the mean and variance of a weighted set of points.""" weights_sum = jnp.sum(weights, axis=-1) # (1,) mean = ( @@ -37,9 +37,9 @@ def get_mean_and_var( def get_mean_and_cov( - points: jnp.ndarray, # (n, d) - weights: jnp.ndarray, # (n,) -) -> Tuple[jnp.ndarray, jnp.ndarray]: + points: jax.Array, # (n, d) + weights: jax.Array, # (n,) +) -> Tuple[jax.Array, jax.Array]: """Get the mean and covariance of a weighted set of points.""" weights_sum = jnp.sum(weights, axis=-1, keepdims=True) # (1,) mean = ( @@ -59,7 +59,7 @@ def get_mean_and_cov( return mean, cov -def flat_to_tril(x: jnp.ndarray, size: int) -> jnp.ndarray: +def flat_to_tril(x: jax.Array, size: int) -> jax.Array: """Map flat values to lower triangular matrices. Args: @@ -76,7 +76,7 @@ def flat_to_tril(x: jnp.ndarray, size: int) -> jnp.ndarray: return m.at[..., tril[0], tril[1]].set(x) -def tril_to_flat(m: jnp.ndarray) -> jnp.ndarray: +def tril_to_flat(m: jax.Array) -> jax.Array: """Flatten lower triangular matrices. Args: @@ -91,8 +91,8 @@ def tril_to_flat(m: jnp.ndarray) -> jnp.ndarray: def apply_to_diag( - m: jnp.ndarray, fn: Callable[[jnp.ndarray], jnp.ndarray] -) -> jnp.ndarray: + m: jax.Array, fn: Callable[[jax.Array], jax.Array] +) -> jax.Array: """Apply a function to the diagonal of a matrix.""" size = m.shape[-1] diag = jnp.diagonal(m, axis1=-2, axis2=-1) @@ -101,9 +101,9 @@ def apply_to_diag( def matrix_powers( - m: jnp.ndarray, + m: jax.Array, powers: Iterable[float], -) -> List[jnp.ndarray]: +) -> List[jax.Array]: """Raise a real, symmetric matrix to multiple powers.""" eigs, q = jnp.linalg.eigh(m) qt = jnp.swapaxes(q, axis1=-2, axis2=-1) @@ -113,9 +113,7 @@ def matrix_powers( return ret -def invmatvectril( - m: jnp.ndarray, x: jnp.ndarray, lower: bool = True -) -> jnp.ndarray: +def invmatvectril(m: jax.Array, x: jax.Array, lower: bool = True) -> jax.Array: """Multiply x by the inverse of a triangular matrix. Args: @@ -133,7 +131,7 @@ def invmatvectril( def get_random_orthogonal( rng: jax.Array, dim: int, dtype: Optional[jnp.dtype] = None -) -> jnp.ndarray: +) -> jax.Array: """Get a random orthogonal matrix with the specified dimension.""" m = jax.random.normal(key=rng, shape=[dim, dim], dtype=dtype) q, _ = jnp.linalg.qr(m) diff --git a/src/ott/tools/gaussian_mixture/probabilities.py b/src/ott/tools/gaussian_mixture/probabilities.py index 6df3bb023..c3bb253a5 100644 --- a/src/ott/tools/gaussian_mixture/probabilities.py +++ b/src/ott/tools/gaussian_mixture/probabilities.py @@ -27,7 +27,7 @@ class Probabilities: to a length n simplex by appending a 0 and taking a softmax. """ - _params: jnp.ndarray + _params: jax.Array def __init__(self, params): self._params = params @@ -47,7 +47,7 @@ def from_random( ) @classmethod - def from_probs(cls, probs: jnp.ndarray) -> "Probabilities": + def from_probs(cls, probs: jax.Array) -> "Probabilities": """Construct Probabilities from a vector of probabilities.""" log_probs = jnp.log(probs) log_probs_normalized, norm = log_probs[:-1], log_probs[-1] @@ -62,21 +62,21 @@ def params(self): # noqa: D102 def dtype(self): # noqa: D102 return self._params.dtype - def unnormalized_log_probs(self) -> jnp.ndarray: + def unnormalized_log_probs(self) -> jax.Array: """Get the unnormalized log probabilities.""" return jnp.concatenate([self._params, jnp.zeros((1,), dtype=self.dtype)], axis=-1) - def log_probs(self) -> jnp.ndarray: + def log_probs(self) -> jax.Array: """Get the log probabilities.""" return jax.nn.log_softmax(self.unnormalized_log_probs()) - def probs(self) -> jnp.ndarray: + def probs(self) -> jax.Array: """Get the probabilities.""" return jax.nn.softmax(self.unnormalized_log_probs()) - def sample(self, rng: jax.Array, size: int) -> jnp.ndarray: + def sample(self, rng: jax.Array, size: int) -> jax.Array: """Sample from the distribution.""" return jax.random.categorical( key=rng, logits=self.unnormalized_log_probs(), shape=(size,) diff --git a/src/ott/tools/gaussian_mixture/scale_tril.py b/src/ott/tools/gaussian_mixture/scale_tril.py index b286cc74e..ee708d5ac 100644 --- a/src/ott/tools/gaussian_mixture/scale_tril.py +++ b/src/ott/tools/gaussian_mixture/scale_tril.py @@ -27,16 +27,16 @@ class ScaleTriL: """Pytree for a lower triangular Cholesky-factored covariance matrix.""" - def __init__(self, params: jnp.ndarray, size: int): + def __init__(self, params: jax.Array, size: int): self._params = params self._size = size @classmethod def from_points_and_weights( cls, - points: jnp.ndarray, - weights: jnp.ndarray, - ) -> Tuple[jnp.ndarray, "ScaleTriL"]: + points: jax.Array, + weights: jax.Array, + ) -> Tuple[jax.Array, "ScaleTriL"]: """Get a mean and a ScaleTriL from a set of points and weights.""" mean, cov = linalg.get_mean_and_cov(points=points, weights=weights) return mean, cls.from_covariance(cov) @@ -80,7 +80,7 @@ def from_random( return cls(params=flat, size=n_dimensions) @classmethod - def from_cholesky(cls, cholesky: jnp.ndarray) -> "ScaleTriL": + def from_cholesky(cls, cholesky: jax.Array) -> "ScaleTriL": """Construct ScaleTriL from a Cholesky factor of a covariance matrix.""" m = linalg.apply_to_diag(cholesky, jnp.log) flat = linalg.tril_to_flat(m) @@ -89,14 +89,14 @@ def from_cholesky(cls, cholesky: jnp.ndarray) -> "ScaleTriL": @classmethod def from_covariance( cls, - covariance: jnp.ndarray, + covariance: jax.Array, ) -> "ScaleTriL": """Construct ScaleTriL from a covariance matrix.""" cholesky = jnp.linalg.cholesky(covariance) return cls.from_cholesky(cholesky) @property - def params(self) -> jnp.ndarray: + def params(self) -> jax.Array: """Internal representation.""" return self._params @@ -110,34 +110,34 @@ def dtype(self): """Data type of the covariance matrix.""" return self._params.dtype - def cholesky(self) -> jnp.ndarray: + def cholesky(self) -> jax.Array: """Get a lower triangular Cholesky factor for the covariance matrix.""" m = linalg.flat_to_tril(self._params, size=self._size) return linalg.apply_to_diag(m, jnp.exp) - def covariance(self) -> jnp.ndarray: + def covariance(self) -> jax.Array: """Get the covariance matrix.""" cholesky = self.cholesky() return cholesky @ cholesky.T - def covariance_sqrt(self) -> jnp.ndarray: + def covariance_sqrt(self) -> jax.Array: """Get the square root of the covariance matrix.""" return linalg.matrix_powers(self.covariance(), (0.5,))[0] - def log_det_covariance(self) -> jnp.ndarray: + def log_det_covariance(self) -> jax.Array: """Get the log of the determinant of the covariance matrix.""" diag = jnp.diagonal(self.cholesky(), axis1=-2, axis2=-1) return 2. * jnp.sum(jnp.log(diag), axis=-1) - def centered_to_z(self, x_centered: jnp.ndarray) -> jnp.ndarray: + def centered_to_z(self, x_centered: jax.Array) -> jax.Array: """Map centered points to standardized centered points (i.e. cov(z) = I).""" return linalg.invmatvectril(m=self.cholesky(), x=x_centered, lower=True) - def z_to_centered(self, z: jnp.ndarray) -> jnp.ndarray: + def z_to_centered(self, z: jax.Array) -> jax.Array: """Scale standardized points to points with the specified covariance.""" return (self.cholesky() @ z.T).T - def w2_dist(self, other: "ScaleTriL") -> jnp.ndarray: + def w2_dist(self, other: "ScaleTriL") -> jax.Array: r"""Wasserstein distance W_2^2 to another Gaussian with same mean. Args: @@ -148,7 +148,7 @@ def w2_dist(self, other: "ScaleTriL") -> jnp.ndarray: """ dimension = self.size - def _flatten_cov(cov: jnp.ndarray) -> jnp.ndarray: + def _flatten_cov(cov: jax.Array) -> jax.Array: cov = cov.reshape(cov.shape[:-2] + (dimension * dimension,)) return jnp.concatenate([jnp.zeros(dimension), cov], axis=-1) @@ -159,7 +159,7 @@ def _flatten_cov(cov: jnp.ndarray) -> jnp.ndarray: ..., ] - def gaussian_map(self, dest_scale: "ScaleTriL") -> jnp.ndarray: + def gaussian_map(self, dest_scale: "ScaleTriL") -> jax.Array: """Scaling matrix used in transport between 0-mean Gaussians. Sigma_mu^{-1/2} @ @@ -179,9 +179,7 @@ def gaussian_map(self, dest_scale: "ScaleTriL") -> jnp.ndarray: ) return jnp.matmul(sqrt0_inv, jnp.matmul(m, sqrt0_inv)) - def transport( - self, dest_scale: "ScaleTriL", points: jnp.ndarray - ) -> jnp.ndarray: + def transport(self, dest_scale: "ScaleTriL", points: jax.Array) -> jax.Array: """Apply Monge map, computed between two 0-mean Gaussians, to points. Args: diff --git a/src/ott/tools/k_means.py b/src/ott/tools/k_means.py index 986b919d0..c8fc8189d 100644 --- a/src/ott/tools/k_means.py +++ b/src/ott/tools/k_means.py @@ -25,29 +25,29 @@ __all__ = ["k_means", "KMeansOutput"] Init_t = Union[Literal["k-means++", "random"], - Callable[[pointcloud.PointCloud, int, jnp.ndarray], jnp.ndarray]] + Callable[[pointcloud.PointCloud, int, jax.Array], jax.Array]] class KPPState(NamedTuple): # noqa: D101 rng: jax.Array - centroids: jnp.ndarray - centroid_dists: jnp.ndarray + centroids: jax.Array + centroid_dists: jax.Array class KMeansState(NamedTuple): # noqa: D101 - centroids: jnp.ndarray - prev_assignment: jnp.ndarray - assignment: jnp.ndarray - errors: jnp.ndarray + centroids: jax.Array + prev_assignment: jax.Array + assignment: jax.Array + errors: jax.Array center_shift: float class KMeansConst(NamedTuple): # noqa: D101 geom: pointcloud.PointCloud - x_weights: jnp.ndarray + x_weights: jax.Array @property - def x(self) -> jnp.ndarray: + def x(self) -> jax.Array: """Array of shape ``[n, ndim]`` containing the unweighted point cloud.""" return self.geom.x @@ -57,7 +57,7 @@ def weighted_x(self): return self.x_weights[:, :-1] @property - def weights(self) -> jnp.ndarray: + def weights(self) -> jax.Array: """Array of shape ``[n, 1]`` containing weights for each point.""" return self.x_weights[:, -1:] @@ -75,12 +75,12 @@ class KMeansOutput(NamedTuple): inner_errors: Array of shape ``[max_iterations,]`` containing the ``error`` at every iteration. """ - centroids: jnp.ndarray - assignment: jnp.ndarray + centroids: jax.Array + assignment: jax.Array converged: bool iteration: int error: float - inner_errors: Optional[jnp.ndarray] + inner_errors: Optional[jax.Array] @classmethod def _from_state( @@ -110,7 +110,7 @@ def _from_state( def _random_init( geom: pointcloud.PointCloud, k: int, rng: jax.Array -) -> jnp.ndarray: +) -> jax.Array: ixs = jnp.arange(geom.shape[0]) ixs = jax.random.choice(rng, ixs, shape=(k,), replace=False) return geom.subset(ixs, None).x @@ -121,7 +121,7 @@ def _k_means_plus_plus( k: int, rng: jax.Array, n_local_trials: Optional[int] = None, -) -> jnp.ndarray: +) -> jax.Array: def init_fn(geom: pointcloud.PointCloud, rng: jax.Array) -> KPPState: rng, next_rng = jax.random.split(rng, 2) @@ -131,7 +131,7 @@ def init_fn(geom: pointcloud.PointCloud, rng: jax.Array) -> KPPState: return KPPState(rng=next_rng, centroids=centroids, centroid_dists=dists) def body_fn( - iteration: int, const: Tuple[pointcloud.PointCloud, jnp.ndarray], + iteration: int, const: Tuple[pointcloud.PointCloud, jax.Array], state: KPPState, compute_error: bool ) -> KPPState: del compute_error @@ -177,10 +177,10 @@ def body_fn( @functools.partial(jax.vmap, in_axes=[None, 0, 0, 0], out_axes=0) def _reallocate_centroids( const: KMeansConst, - ix: jnp.ndarray, - centroid: jnp.ndarray, - weight: jnp.ndarray, -) -> Tuple[jnp.ndarray, jnp.ndarray]: + ix: jax.Array, + centroid: jax.Array, + weight: jax.Array, +) -> Tuple[jax.Array, jax.Array]: is_empty = weight <= 0. new_centroid = (1 - is_empty) * centroid + is_empty * const.x[ix] # (ndim,) centroid_to_remove = is_empty * const.weighted_x[ix] # (ndim,) @@ -190,8 +190,8 @@ def _reallocate_centroids( def _update_assignment( const: KMeansConst, - centroids: jnp.ndarray, -) -> Tuple[jnp.ndarray, jnp.ndarray]: + centroids: jax.Array, +) -> Tuple[jax.Array, jax.Array]: (x, _, *args), aux_data = const.geom.tree_flatten() cost_matrix = type( const.geom @@ -203,9 +203,9 @@ def _update_assignment( def _update_centroids( - const: KMeansConst, k: int, assignment: jnp.ndarray, - dist_to_centers: jnp.ndarray -) -> jnp.ndarray: + const: KMeansConst, k: int, assignment: jax.Array, + dist_to_centers: jax.Array +) -> jax.Array: # TODO(michalk8): # cannot put `k` into `const`, see https://github.com/ott-jax/ott/issues/129 x_weights = jax.ops.segment_sum(const.x_weights, assignment, num_segments=k) @@ -227,7 +227,7 @@ def _k_means( rng: jax.Array, geom: pointcloud.PointCloud, k: int, - weights: Optional[jnp.ndarray] = None, + weights: Optional[jax.Array] = None, init: Init_t = "k-means++", n_local_trials: Optional[int] = None, tol: float = 1e-4, @@ -342,9 +342,9 @@ def finalize_fn(const: KMeansConst, state: KMeansState) -> KMeansState: def k_means( - geom: Union[jnp.ndarray, pointcloud.PointCloud], + geom: Union[jax.Array, pointcloud.PointCloud], k: int, - weights: Optional[jnp.ndarray] = None, + weights: Optional[jax.Array] = None, init: Init_t = "k-means++", n_init: int = 10, n_local_trials: Optional[int] = None, @@ -386,7 +386,7 @@ def k_means( """ assert geom.shape[ 0] >= k, f"Cannot cluster `{geom.shape[0]}` points into `{k}` clusters." - if isinstance(geom, jnp.ndarray): + if isinstance(geom, jax.Array): geom = pointcloud.PointCloud(geom) if isinstance(geom.cost_fn, costs.Cosine): geom = geom._cosine_to_sqeucl() diff --git a/src/ott/tools/plot.py b/src/ott/tools/plot.py index bd1f42e91..d83868fd5 100644 --- a/src/ott/tools/plot.py +++ b/src/ott/tools/plot.py @@ -13,6 +13,7 @@ # limitations under the License. from typing import List, Optional, Sequence, Tuple, Union +import jax import jax.numpy as jnp import numpy as np import scipy @@ -32,8 +33,7 @@ gromov_wasserstein.GWOutput] -def bidimensional(x: jnp.ndarray, - y: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: +def bidimensional(x: jax.Array, y: jax.Array) -> Tuple[jax.Array, jax.Array]: """Apply PCA to reduce to bi-dimensional data.""" if x.shape[1] < 3: return x, y @@ -121,7 +121,7 @@ def _scatter(self, ot: Transport): scales_y = b * self._scale * b.shape[0] return x, y, scales_x, scales_y - def _mapping(self, x: jnp.ndarray, y: jnp.ndarray, matrix: jnp.ndarray): + def _mapping(self, x: jax.Array, y: jax.Array, matrix: jax.Array): """Compute the lines representing the mapping between the 2 point clouds.""" # Only plot the lines with a cost above the threshold. u, v = jnp.where(matrix > self._threshold) diff --git a/src/ott/tools/segment_sinkhorn.py b/src/ott/tools/segment_sinkhorn.py index 223f2a30f..ca5e5c228 100644 --- a/src/ott/tools/segment_sinkhorn.py +++ b/src/ott/tools/segment_sinkhorn.py @@ -14,7 +14,7 @@ from types import MappingProxyType from typing import Any, Mapping, Optional, Tuple -import jax.numpy as jnp +import jax from ott.geometry import costs, pointcloud, segment from ott.problems.linear import linear_problem @@ -22,21 +22,21 @@ def segment_sinkhorn( - x: jnp.ndarray, - y: jnp.ndarray, + x: jax.Array, + y: jax.Array, num_segments: Optional[int] = None, max_measure_size: Optional[int] = None, cost_fn: Optional[costs.CostFn] = None, - segment_ids_x: Optional[jnp.ndarray] = None, - segment_ids_y: Optional[jnp.ndarray] = None, + segment_ids_x: Optional[jax.Array] = None, + segment_ids_y: Optional[jax.Array] = None, indices_are_sorted: bool = False, num_per_segment_x: Optional[Tuple[int, ...]] = None, num_per_segment_y: Optional[Tuple[int, ...]] = None, - weights_x: Optional[jnp.ndarray] = None, - weights_y: Optional[jnp.ndarray] = None, + weights_x: Optional[jax.Array] = None, + weights_y: Optional[jax.Array] = None, sinkhorn_kwargs: Mapping[str, Any] = MappingProxyType({}), **kwargs: Any -) -> jnp.ndarray: +) -> jax.Array: """Compute regularized OT cost between subsets of vectors in `x` and `y`. Helper function designed to compute Sinkhorn regularized OT cost between @@ -104,10 +104,10 @@ def segment_sinkhorn( padding_vector = cost_fn._padder(dim=dim) def eval_fn( - padded_x: jnp.ndarray, - padded_y: jnp.ndarray, - padded_weight_x: jnp.ndarray, - padded_weight_y: jnp.ndarray, + padded_x: jax.Array, + padded_y: jax.Array, + padded_weight_x: jax.Array, + padded_weight_y: jax.Array, ) -> float: mask_x = padded_weight_x > 0. mask_y = padded_weight_y > 0. diff --git a/src/ott/tools/sinkhorn_divergence.py b/src/ott/tools/sinkhorn_divergence.py index 51de97613..2ff1cbc4e 100644 --- a/src/ott/tools/sinkhorn_divergence.py +++ b/src/ott/tools/sinkhorn_divergence.py @@ -14,6 +14,7 @@ from types import MappingProxyType from typing import Any, Mapping, Optional, Tuple, Type +import jax import jax.numpy as jnp from ott import utils @@ -27,7 +28,7 @@ "SinkhornDivergenceOutput" ] -Potentials_t = Tuple[jnp.ndarray, jnp.ndarray] +Potentials_t = Tuple[jax.Array, jax.Array] @utils.register_pytree_node @@ -35,11 +36,10 @@ class SinkhornDivergenceOutput: # noqa: D101 divergence: float potentials: Tuple[Potentials_t, Potentials_t, Potentials_t] geoms: Tuple[geometry.Geometry, geometry.Geometry, geometry.Geometry] - errors: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], - Optional[jnp.ndarray]] + errors: Tuple[Optional[jax.Array], Optional[jax.Array], Optional[jax.Array]] converged: Tuple[bool, bool, bool] - a: jnp.ndarray - b: jnp.ndarray + a: jax.Array + b: jax.Array n_iters: Tuple[int, int, int] def to_dual_potentials(self) -> "potentials.EntropicPotentials": @@ -73,8 +73,8 @@ def tree_unflatten_foo(cls, aux_data, children): # noqa: D102 def sinkhorn_divergence( geom: Type[geometry.Geometry], *args: Any, - a: Optional[jnp.ndarray] = None, - b: Optional[jnp.ndarray] = None, + a: Optional[jax.Array] = None, + b: Optional[jax.Array] = None, sinkhorn_kwargs: Mapping[str, Any] = MappingProxyType({}), static_b: bool = False, share_epsilon: bool = True, @@ -138,8 +138,8 @@ def _sinkhorn_divergence( geometry_xy: geometry.Geometry, geometry_xx: geometry.Geometry, geometry_yy: Optional[geometry.Geometry], - a: jnp.ndarray, - b: jnp.ndarray, + a: jax.Array, + b: jax.Array, symmetric_sinkhorn: bool, **kwargs: Any, ) -> SinkhornDivergenceOutput: @@ -155,9 +155,9 @@ def _sinkhorn_divergence( between elements of the view X. geometry_yy: a Cost object able to apply kernels with a certain epsilon, between elements of the view Y. - a: jnp.ndarray[n]: the weight of each input point. The sum of + a: jax.Array[n]: the weight of each input point. The sum of all elements of ``b`` must match that of ``a`` to converge. - b: jnp.ndarray[m]: the weight of each target point. The sum of + b: jax.Array[m]: the weight of each target point. The sum of all elements of ``b`` must match that of ``a`` to converge. symmetric_sinkhorn: Use Sinkhorn updates in Eq. 25 of :cite:`feydy:19` for symmetric terms comparing x/x and y/y. @@ -219,24 +219,24 @@ def _sinkhorn_divergence( def segment_sinkhorn_divergence( - x: jnp.ndarray, - y: jnp.ndarray, + x: jax.Array, + y: jax.Array, num_segments: Optional[int] = None, max_measure_size: Optional[int] = None, cost_fn: Optional[costs.CostFn] = None, - segment_ids_x: Optional[jnp.ndarray] = None, - segment_ids_y: Optional[jnp.ndarray] = None, + segment_ids_x: Optional[jax.Array] = None, + segment_ids_y: Optional[jax.Array] = None, indices_are_sorted: bool = False, num_per_segment_x: Optional[Tuple[int, ...]] = None, num_per_segment_y: Optional[Tuple[int, ...]] = None, - weights_x: Optional[jnp.ndarray] = None, - weights_y: Optional[jnp.ndarray] = None, + weights_x: Optional[jax.Array] = None, + weights_y: Optional[jax.Array] = None, sinkhorn_kwargs: Mapping[str, Any] = MappingProxyType({}), static_b: bool = False, share_epsilon: bool = True, symmetric_sinkhorn: bool = False, **kwargs: Any -) -> jnp.ndarray: +) -> jax.Array: """Compute Sinkhorn divergence between subsets of vectors in `x` and `y`. Helper function designed to compute Sinkhorn divergences between several point @@ -313,10 +313,10 @@ def segment_sinkhorn_divergence( padding_vector = cost_fn._padder(dim=dim) def eval_fn( - padded_x: jnp.ndarray, - padded_y: jnp.ndarray, - padded_weight_x: jnp.ndarray, - padded_weight_y: jnp.ndarray, + padded_x: jax.Array, + padded_y: jax.Array, + padded_weight_x: jax.Array, + padded_weight_y: jax.Array, ) -> float: mask_x = padded_weight_x > 0. mask_y = padded_weight_y > 0. diff --git a/src/ott/tools/soft_sort.py b/src/ott/tools/soft_sort.py index 646b3eb0c..b5b33e183 100644 --- a/src/ott/tools/soft_sort.py +++ b/src/ott/tools/soft_sort.py @@ -30,14 +30,14 @@ "quantize", "topk_mask", "multivariate_cdf_quantile_maps" ] -Func_t = Callable[[jnp.ndarray], jnp.ndarray] +Func_t = Callable[[jax.Array], jax.Array] def transport_for_sort( - inputs: jnp.ndarray, - weights: Optional[jnp.ndarray] = None, - target_weights: Optional[jnp.ndarray] = None, - squashing_fun: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, + inputs: jax.Array, + weights: Optional[jax.Array] = None, + target_weights: Optional[jax.Array] = None, + squashing_fun: Optional[Callable[[jax.Array], jax.Array]] = None, epsilon: float = 1e-2, **kwargs: Any, ) -> sinkhorn.SinkhornOutput: @@ -83,7 +83,7 @@ def transport_for_sort( return solver(prob) -def apply_on_axis(op, inputs, axis, *args, **kwargs: Any) -> jnp.ndarray: +def apply_on_axis(op, inputs, axis, *args, **kwargs: Any) -> jax.Array: """Apply a differentiable operator on a given axis of the input. Args: @@ -120,8 +120,8 @@ def apply_on_axis(op, inputs, axis, *args, **kwargs: Any) -> jnp.ndarray: def _sort( - inputs: jnp.ndarray, topk: int, num_targets: Optional[int], **kwargs: Any -) -> jnp.ndarray: + inputs: jax.Array, topk: int, num_targets: Optional[int], **kwargs: Any +) -> jax.Array: """Apply the soft sort operator on a one dimensional array.""" num_points = inputs.shape[0] a = jnp.ones((num_points,)) / num_points @@ -145,12 +145,12 @@ def _sort( def sort( - inputs: jnp.ndarray, + inputs: jax.Array, axis: int = -1, topk: int = -1, num_targets: Optional[int] = None, **kwargs: Any, -) -> jnp.ndarray: +) -> jax.Array: r"""Apply the soft sort operator on a given axis of the input. For instance: @@ -203,8 +203,8 @@ def sort( def _ranks( - inputs: jnp.ndarray, num_targets, target_weights, **kwargs: Any -) -> jnp.ndarray: + inputs: jax.Array, num_targets, target_weights, **kwargs: Any +) -> jax.Array: """Apply the soft ranks operator on a one dimensional array.""" num_points = inputs.shape[0] if target_weights is None: @@ -220,12 +220,12 @@ def _ranks( def ranks( - inputs: jnp.ndarray, + inputs: jax.Array, axis: int = -1, num_targets: Optional[int] = None, - target_weights: Optional[jnp.ndarray] = None, + target_weights: Optional[jax.Array] = None, **kwargs: Any, -) -> jnp.ndarray: +) -> jax.Array: r"""Apply the soft rank operator on input tensor. For instance: @@ -278,11 +278,11 @@ def ranks( def topk_mask( - inputs: jnp.ndarray, + inputs: jax.Array, axis: int = -1, k: int = 1, **kwargs: Any, -) -> jnp.ndarray: +) -> jax.Array: r"""Soft :math:`\text{top-}k` selection mask. For instance: @@ -337,12 +337,12 @@ def topk_mask( def quantile( - inputs: jnp.ndarray, - q: Optional[Union[float, jnp.ndarray]], + inputs: jax.Array, + q: Optional[Union[float, jax.Array]], axis: Union[int, Tuple[int, ...]] = -1, - weight: Optional[Union[float, jnp.ndarray]] = None, + weight: Optional[Union[float, jax.Array]] = None, **kwargs: Any, -) -> jnp.ndarray: +) -> jax.Array: r"""Apply the soft quantiles operator on the input tensor. For instance: @@ -395,8 +395,8 @@ def quantile( """ def _quantile( - inputs: jnp.ndarray, q: float, weight: float, **kwargs - ) -> jnp.ndarray: + inputs: jax.Array, q: float, weight: float, **kwargs + ) -> jax.Array: num_points = inputs.shape[0] q = jnp.array([0.2, 0.5, 0.8]) if q is None else jnp.atleast_1d(q) num_quantiles = q.shape[0] @@ -456,15 +456,15 @@ def _quantile( def multivariate_cdf_quantile_maps( - inputs: jnp.ndarray, + inputs: jax.Array, target_sampler: Optional[Callable[[jax.Array, Tuple[int, int]], - jnp.ndarray]] = None, + jax.Array]] = None, rng: Optional[jax.Array] = None, num_target_samples: Optional[int] = None, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, - input_weights: Optional[jnp.ndarray] = None, - target_weights: Optional[jnp.ndarray] = None, + input_weights: Optional[jax.Array] = None, + target_weights: Optional[jax.Array] = None, **kwargs: Any ) -> Tuple[Func_t, Func_t]: r"""Returns multivariate CDF and quantile maps, given input samples. @@ -534,8 +534,8 @@ def multivariate_cdf_quantile_maps( def _quantile_normalization( - inputs: jnp.ndarray, targets: jnp.ndarray, weights: float, **kwargs: Any -) -> jnp.ndarray: + inputs: jax.Array, targets: jax.Array, weights: float, **kwargs: Any +) -> jax.Array: """Apply soft quantile normalization on a one dimensional array.""" num_points = inputs.shape[0] a = jnp.ones((num_points,)) / num_points @@ -544,12 +544,12 @@ def _quantile_normalization( def quantile_normalization( - inputs: jnp.ndarray, - targets: jnp.ndarray, - weights: Optional[jnp.ndarray] = None, + inputs: jax.Array, + targets: jax.Array, + weights: Optional[jax.Array] = None, axis: int = -1, **kwargs: Any, -) -> jnp.ndarray: +) -> jax.Array: r"""Re-normalize inputs so that its quantiles match those of targets/weights. Quantile normalization rearranges the values in inputs to values that match @@ -600,11 +600,11 @@ def quantile_normalization( def sort_with( - inputs: jnp.ndarray, - criterion: jnp.ndarray, + inputs: jax.Array, + criterion: jax.Array, topk: int = -1, **kwargs: Any, -) -> jnp.ndarray: +) -> jax.Array: r"""Sort a multidimensional array according to a real valued criterion. Given ``batch`` vectors of dimension `dim`, to which, for each, a real value @@ -655,7 +655,7 @@ def sort_with( return sort_fn(inputs) -def _quantize(inputs: jnp.ndarray, num_q: int, **kwargs: Any) -> jnp.ndarray: +def _quantize(inputs: jax.Array, num_q: int, **kwargs: Any) -> jax.Array: """Apply the soft quantization operator on a one dimensional array.""" num_points = inputs.shape[0] a = jnp.ones((num_points,)) / num_points @@ -665,11 +665,11 @@ def _quantize(inputs: jnp.ndarray, num_q: int, **kwargs: Any) -> jnp.ndarray: def quantize( - inputs: jnp.ndarray, + inputs: jax.Array, num_levels: int = 10, axis: int = -1, **kwargs: Any, -) -> jnp.ndarray: +) -> jax.Array: r"""Soft quantizes an input according using ``num_levels`` values along axis. The quantization operator consists in concentrating several values around diff --git a/src/ott/types.py b/src/ott/types.py index 7a4c88716..5c4609ec2 100644 --- a/src/ott/types.py +++ b/src/ott/types.py @@ -13,7 +13,7 @@ # limitations under the License. from typing import Protocol -import jax.numpy as jnp +import jax __all__ = ["Transport"] @@ -28,11 +28,11 @@ class can however be used in type hints to support duck typing. """ @property - def matrix(self) -> jnp.ndarray: + def matrix(self) -> jax.Array: ... - def apply(self, inputs: jnp.ndarray, axis: int) -> jnp.ndarray: + def apply(self, inputs: jax.Array, axis: int) -> jax.Array: ... - def marginal(self, axis: int = 0) -> jnp.ndarray: + def marginal(self, axis: int = 0) -> jax.Array: ... diff --git a/tests/conftest.py b/tests/conftest.py index bc4570343..a8118845c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,7 +17,6 @@ import jax import jax.experimental -import jax.numpy as jnp import pytest from _pytest.python import Metafunc @@ -69,7 +68,7 @@ def pytest_generate_tests(metafunc: Metafunc) -> None: @pytest.fixture(scope="session") -def rng() -> jnp.ndarray: +def rng() -> jax.Array: return jax.random.PRNGKey(0) diff --git a/tests/geometry/costs_test.py b/tests/geometry/costs_test.py index 57a4d8874..47446a4fd 100644 --- a/tests/geometry/costs_test.py +++ b/tests/geometry/costs_test.py @@ -27,7 +27,7 @@ ts_metrics = None -def _proj(matrix: jnp.ndarray) -> jnp.ndarray: +def _proj(matrix: jax.Array) -> jax.Array: u, _, v_h = jnp.linalg.svd(matrix, full_matrices=False) return u.dot(v_h) diff --git a/tests/geometry/graph_test.py b/tests/geometry/graph_test.py index c242b192f..cda2900a8 100644 --- a/tests/geometry/graph_test.py +++ b/tests/geometry/graph_test.py @@ -35,7 +35,7 @@ def random_graph( *, return_laplacian: bool = False, directed: bool = False, -) -> jnp.ndarray: +) -> jax.Array: G = random_graphs.fast_gnp_random_graph(n, p, seed=seed, directed=directed) if not directed: assert nx.is_connected(G), "Generated graph is not connected." @@ -51,7 +51,7 @@ def random_graph( return jnp.asarray(G.toarray()) -def gt_geometry(G: jnp.ndarray, *, epsilon: float = 1e-2) -> geometry.Geometry: +def gt_geometry(G: jax.Array, *, epsilon: float = 1e-2) -> geometry.Geometry: if not isinstance(G, nx.Graph): G = nx.from_numpy_array(np.asarray(G)) @@ -160,7 +160,7 @@ def test_crank_nicolson_more_stable(self, t: Optional[float], n_steps: int): @pytest.mark.parametrize(("jit", "normalize"), [(False, True), (True, False)]) def test_directed_graph(self, jit: bool, normalize: bool): - def create_graph(G: jnp.ndarray) -> graph.Graph: + def create_graph(G: jax.Array) -> graph.Graph: return graph.Graph.from_graph(G, directed=True, normalize=normalize) G = random_graph(16, p=0.25, directed=True) @@ -181,7 +181,7 @@ def create_graph(G: jnp.ndarray) -> graph.Graph: @pytest.mark.parametrize("normalize", [False, True]) def test_normalize_laplacian(self, directed: bool, normalize: bool): - def laplacian(G: jnp.ndarray) -> jnp.ndarray: + def laplacian(G: jax.Array) -> jax.Array: if directed: G = G + G.T @@ -250,8 +250,8 @@ def test_dense_graph_differentiability( ): def callback( - data: jnp.ndarray, rows: jnp.ndarray, cols: jnp.ndarray, - shape: Tuple[int, int] + data: jax.Array, rows: jax.Array, cols: jax.Array, shape: Tuple[int, + int] ) -> float: G = sparse.BCOO((data, jnp.c_[rows, cols]), shape=shape).todense() diff --git a/tests/geometry/low_rank_test.py b/tests/geometry/low_rank_test.py index 87dd98db2..6b3c36edd 100644 --- a/tests/geometry/low_rank_test.py +++ b/tests/geometry/low_rank_test.py @@ -160,7 +160,7 @@ def test_add_lr_geoms_scale_factor( @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("fn", [lambda x: x + 10, lambda x: x * 2]) def test_apply_affine_function_efficient( - self, rng: jax.Array, fn: Callable[[jnp.ndarray], jnp.ndarray], axis: int + self, rng: jax.Array, fn: Callable[[jax.Array], jax.Array], axis: int ): n, m, d = 21, 13, 3 rngs = jax.random.split(rng, 3) diff --git a/tests/geometry/scaling_cost_test.py b/tests/geometry/scaling_cost_test.py index 94ce97cf4..b60805d34 100644 --- a/tests/geometry/scaling_cost_test.py +++ b/tests/geometry/scaling_cost_test.py @@ -53,7 +53,7 @@ def test_scale_cost_pointcloud( """Test various scale cost options for pointcloud.""" def apply_sinkhorn( - x: jnp.ndarray, y: jnp.ndarray, a: jnp.ndarray, b: jnp.ndarray, + x: jax.Array, y: jax.Array, a: jax.Array, b: jax.Array, scale_cost: Union[str, float] ): geom = pointcloud.PointCloud( @@ -120,8 +120,8 @@ def test_scale_cost_geometry(self, scale: Union[str, float]): """Test various scale cost options for geometry.""" def apply_sinkhorn( - cost: jnp.ndarray, a: jnp.ndarray, b: jnp.ndarray, - scale_cost: Union[str, float] + cost: jax.Array, a: jax.Array, b: jax.Array, scale_cost: Union[str, + float] ): geom = geometry.Geometry(cost, epsilon=self.eps, scale_cost=scale_cost) prob = linear_problem.LinearProblem(geom, a, b) diff --git a/tests/initializers/linear/sinkhorn_init_test.py b/tests/initializers/linear/sinkhorn_init_test.py index 6acf77f11..73c0ddaaa 100644 --- a/tests/initializers/linear/sinkhorn_init_test.py +++ b/tests/initializers/linear/sinkhorn_init_test.py @@ -80,12 +80,12 @@ def create_ot_problem( def run_sinkhorn( - x: jnp.ndarray, - y: jnp.ndarray, + x: jax.Array, + y: jax.Array, *, initializer: linear_init.SinkhornInitializer, - a: Optional[jnp.ndarray] = None, - b: Optional[jnp.ndarray] = None, + a: Optional[jax.Array] = None, + b: Optional[jax.Array] = None, epsilon: float = 1e-2, lse_mode: bool = True, ) -> sinkhorn.SinkhornOutput: diff --git a/tests/math/matrix_square_root_test.py b/tests/math/matrix_square_root_test.py index fcd557957..2263ea8b9 100644 --- a/tests/math/matrix_square_root_test.py +++ b/tests/math/matrix_square_root_test.py @@ -37,9 +37,9 @@ def _get_random_spd_matrix(dim: int, rng: jax.Array): def _get_test_fn( - fn: Callable[[jnp.ndarray], jnp.ndarray], dim: int, rng: jax.Array, + fn: Callable[[jax.Array], jax.Array], dim: int, rng: jax.Array, **kwargs: Any -) -> Callable[[jnp.ndarray], jnp.ndarray]: +) -> Callable[[jax.Array], jax.Array]: # We want to test gradients of a function fn that maps positive definite # matrices to positive definite matrices by comparing them to finite # difference approximations. We'll do so via a test function that @@ -54,7 +54,7 @@ def _get_test_fn( unit = jax.random.normal(key=subrng3, shape=(dim, dim)) unit /= jnp.sqrt(jnp.sum(unit ** 2.)) - def _test_fn(x: jnp.ndarray, **kwargs: Any) -> jnp.ndarray: + def _test_fn(x: jax.Array, **kwargs: Any) -> jax.Array: # m is the product of 2 symmetric, positive definite matrices # so it will be positive definite but not necessarily symmetric m = jnp.matmul(m0, m1 + x * dx) @@ -63,7 +63,7 @@ def _test_fn(x: jnp.ndarray, **kwargs: Any) -> jnp.ndarray: return _test_fn -def _sqrt_plus_inv_sqrt(x: jnp.ndarray) -> jnp.ndarray: +def _sqrt_plus_inv_sqrt(x: jax.Array) -> jax.Array: sqrtm = matrix_square_root.sqrtm(x) return sqrtm[0] + sqrtm[1] diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 2dc9f1e43..a5fdc1c2b 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -46,7 +46,7 @@ def __init__( self.conditions = list(dataloaders.keys()) self.p = p - def __next__(self) -> jnp.ndarray: + def __next__(self) -> jax.Array: self.rng, rng = jax.random.split(self.rng, 2) idx = jax.random.choice(rng, len(self.conditions), p=self.p) return next(self.dataloaders[self.conditions[idx]]) diff --git a/tests/neural/map_estimator_test.py b/tests/neural/map_estimator_test.py index 7c506aa38..0454db751 100644 --- a/tests/neural/map_estimator_test.py +++ b/tests/neural/map_estimator_test.py @@ -13,7 +13,7 @@ # limitations under the License. from typing import Optional -import jax.numpy as jnp +import jax import pytest from ott import datasets @@ -34,8 +34,8 @@ def test_map_estimator_convergence(self): # define the fitting loss and the regularizer def fitting_loss( - samples: jnp.ndarray, - mapped_samples: jnp.ndarray, + samples: jax.Array, + mapped_samples: jax.Array, ) -> Optional[float]: r"""Sinkhorn divergence fitting loss.""" div = sinkhorn_divergence.sinkhorn_divergence( diff --git a/tests/neural/meta_initializer_test.py b/tests/neural/meta_initializer_test.py index f711366ec..25a88907e 100644 --- a/tests/neural/meta_initializer_test.py +++ b/tests/neural/meta_initializer_test.py @@ -31,7 +31,7 @@ class MetaMLP(nn.Module): num_hidden_layers: int = 3 @nn.compact - def __call__(self, a: jnp.ndarray, b: jnp.ndarray) -> jnp.ndarray: + def __call__(self, a: jax.Array, b: jax.Array) -> jax.Array: dtype = a.dtype z = jnp.concatenate((a, b)) for _ in range(self.num_hidden_layers): @@ -65,12 +65,12 @@ def create_ot_problem( def run_sinkhorn( - x: jnp.ndarray, - y: jnp.ndarray, + x: jax.Array, + y: jax.Array, *, initializer: linear_init.SinkhornInitializer, - a: Optional[jnp.ndarray] = None, - b: Optional[jnp.ndarray] = None, + a: Optional[jax.Array] = None, + b: Optional[jax.Array] = None, epsilon: float = 1e-2, lse_mode: bool = True, ) -> sinkhorn.SinkhornOutput: diff --git a/tests/solvers/linear/continuous_barycenter_test.py b/tests/solvers/linear/continuous_barycenter_test.py index 5512263c7..5c7fabd67 100644 --- a/tests/solvers/linear/continuous_barycenter_test.py +++ b/tests/solvers/linear/continuous_barycenter_test.py @@ -27,7 +27,7 @@ means_and_covs_to_x = jax.vmap(costs.mean_and_cov_to_x, in_axes=[0, 0, None]) -def is_positive_semidefinite(c: jnp.ndarray) -> bool: +def is_positive_semidefinite(c: jax.Array) -> bool: # GPU friendly, eigvals not implemented for non-symmetric matrices w = jnp.linalg.eigvalsh((c + c.T) / 2.0) return jnp.all(w >= 0) @@ -119,8 +119,8 @@ def test_barycenter_jit(self, rng: jax.Array, segment_before: bool): @functools.partial(jax.jit, static_argnums=(2, 3)) def barycenter( - y: jnp.ndarray, - b: jnp.ndarray, + y: jax.Array, + b: jax.Array, segment_before: bool, num_per_segment: Tuple[int, ...], ) -> cb.FreeBarycenterState: diff --git a/tests/solvers/linear/sinkhorn_diff_test.py b/tests/solvers/linear/sinkhorn_diff_test.py index d80f94251..a608c0d71 100644 --- a/tests/solvers/linear/sinkhorn_diff_test.py +++ b/tests/solvers/linear/sinkhorn_diff_test.py @@ -49,7 +49,7 @@ def test_implicit_differentiation_versus_autodiff( ): epsilon = 0.05 - def loss_g(a: jnp.ndarray, x: jnp.ndarray, implicit: bool = True) -> float: + def loss_g(a: jax.Array, x: jax.Array, implicit: bool = True) -> float: implicit_diff = implicit_lib.ImplicitDiff() if implicit else None geom = geometry.Geometry( cost_matrix=jnp.sum(x ** 2, axis=1)[:, jnp.newaxis] + @@ -65,9 +65,7 @@ def loss_g(a: jnp.ndarray, x: jnp.ndarray, implicit: bool = True) -> float: ) return solver(prob).reg_ot_cost - def loss_pcg( - a: jnp.ndarray, x: jnp.ndarray, implicit: bool = True - ) -> float: + def loss_pcg(a: jax.Array, x: jax.Array, implicit: bool = True) -> float: implicit_diff = implicit_lib.ImplicitDiff() if implicit else None geom = pointcloud.PointCloud(x, self.y, epsilon=epsilon) prob = linear_problem.LinearProblem( @@ -154,7 +152,7 @@ def test_autograd_sinkhorn( a = a / jnp.sum(a) b = b / jnp.sum(b) - def reg_ot(a: jnp.ndarray, b: jnp.ndarray) -> float: + def reg_ot(a: jax.Array, b: jax.Array) -> float: geom = pointcloud.PointCloud(x, y, epsilon=1e-1) prob = linear_problem.LinearProblem(geom, a=a, b=b) solver = sinkhorn.Sinkhorn(lse_mode=lse_mode) @@ -190,7 +188,7 @@ def test_gradient_sinkhorn_geometry( delta = delta / jnp.sqrt(jnp.vdot(delta, delta)) eps = 1e-3 # perturbation magnitude - def loss_fn(cm: jnp.ndarray): + def loss_fn(cm: jax.Array): a = jnp.ones(cm.shape[0]) / cm.shape[0] b = jnp.ones(cm.shape[1]) / cm.shape[1] geom = geometry.Geometry(cm, epsilon=0.5) @@ -264,8 +262,8 @@ def test_gradient_sinkhorn_euclidean( # Adding some near-zero distances to test proper handling with p_norm=1. y = y.at[0].set(x[0, :] + 1e-3) - def loss_fn(x: jnp.ndarray, - y: jnp.ndarray) -> Tuple[float, sinkhorn.SinkhornOutput]: + def loss_fn(x: jax.Array, + y: jax.Array) -> Tuple[float, sinkhorn.SinkhornOutput]: implicit_diff = implicit_lib.ImplicitDiff() if implicit else None geom = pointcloud.PointCloud(x, y, epsilon=epsilon, cost_fn=cost_fn) prob = linear_problem.LinearProblem(geom, a, b) @@ -320,7 +318,7 @@ def loss_fn(x: jnp.ndarray, def test_autoepsilon_differentiability(self, rng: jax.Array): cost = jax.random.uniform(rng, (15, 17)) - def reg_ot_cost(c: jnp.ndarray) -> float: + def reg_ot_cost(c: jax.Array) -> float: geom = geometry.Geometry(c, epsilon=None) # auto epsilon prob = linear_problem.LinearProblem(geom) return sinkhorn.Sinkhorn()(prob).reg_ot_cost @@ -331,7 +329,7 @@ def reg_ot_cost(c: jnp.ndarray) -> float: @pytest.mark.fast() def test_differentiability_with_jit(self, rng: jax.Array): - def reg_ot_cost(c: jnp.ndarray) -> float: + def reg_ot_cost(c: jax.Array) -> float: geom = geometry.Geometry(c, epsilon=1e-2) prob = linear_problem.LinearProblem(geom) return sinkhorn.Sinkhorn()(prob).reg_ot_cost @@ -385,7 +383,7 @@ def test_apply_transport_jacobian( # general rule, even more so when using backprop. epsilon = 0.01 if lse_mode else 0.1 - def apply_ot(a: jnp.ndarray, x: jnp.ndarray, implicit: bool) -> jnp.ndarray: + def apply_ot(a: jax.Array, x: jax.Array, implicit: bool) -> jax.Array: geom = pointcloud.PointCloud(x, y, epsilon=epsilon) prob = linear_problem.LinearProblem(geom, a, b, tau_a=tau_a, tau_b=tau_b) @@ -488,7 +486,7 @@ def test_potential_jacobian_sinkhorn( # with small epsilon when differentiating. epsilon = 0.01 if lse_mode else 0.1 - def loss_from_potential(a: jnp.ndarray, x: jnp.ndarray, implicit: bool): + def loss_from_potential(a: jax.Array, x: jax.Array, implicit: bool): geom = pointcloud.PointCloud(x, y, epsilon=epsilon) prob = linear_problem.LinearProblem(geom, a, b, tau_a=tau_a, tau_b=tau_b) @@ -556,7 +554,7 @@ def test_diff_sinkhorn_x_grid_x_perturbation( a = a.ravel() / jnp.sum(a) b = b.ravel() / jnp.sum(b) - def reg_ot(x: List[jnp.ndarray]) -> float: + def reg_ot(x: List[jax.Array]) -> float: geom = grid.Grid(x=x, epsilon=1.0) prob = linear_problem.LinearProblem(geom, a=a, b=b) solver = sinkhorn.Sinkhorn(threshold=1e-1, lse_mode=lse_mode) @@ -605,7 +603,7 @@ def test_diff_sinkhorn_x_grid_weights_perturbation( b = b.ravel() / jnp.sum(b) geom = grid.Grid(x=x, epsilon=1) - def reg_ot(a: jnp.ndarray, b: jnp.ndarray) -> float: + def reg_ot(a: jax.Array, b: jax.Array) -> float: prob = linear_problem.LinearProblem(geom, a, b) solver = sinkhorn.Sinkhorn(threshold=1e-3, lse_mode=lse_mode) return solver(prob).reg_ot_cost @@ -667,9 +665,9 @@ def test_potential_jacobian_sinkhorn_precond( epsilon = 0.05 if lse_mode else 0.1 def loss_from_potential( - a: jnp.ndarray, - x: jnp.ndarray, - precondition_fun: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, + a: jax.Array, + x: jax.Array, + precondition_fun: Optional[Callable[[jax.Array], jax.Array]] = None, symmetric: bool = False ) -> float: geom = pointcloud.PointCloud(x, y, epsilon=epsilon) @@ -771,7 +769,7 @@ def test_hessian_sinkhorn( imp_dif = implicit_lib.ImplicitDiff(solver_kwargs=solver_kwargs) - def loss(a: jnp.ndarray, x: jnp.ndarray, implicit: bool = True): + def loss(a: jax.Array, x: jax.Array, implicit: bool = True): geom = pointcloud.PointCloud(x, y, epsilon=epsilon) prob = linear_problem.LinearProblem(geom, a, b, tau_a, tau_b) implicit_diff = imp_dif if implicit else None diff --git a/tests/solvers/linear/sinkhorn_misc_test.py b/tests/solvers/linear/sinkhorn_misc_test.py index aeb37918b..e88ff4d0c 100644 --- a/tests/solvers/linear/sinkhorn_misc_test.py +++ b/tests/solvers/linear/sinkhorn_misc_test.py @@ -346,12 +346,10 @@ def assert_output_close( ) -> None: """Assert SinkhornOutputs are close.""" x = tuple( - a for a in x - if (a is not None and (isinstance(a, (jnp.ndarray, int)))) + a for a in x if (a is not None and (isinstance(a, (jax.Array, int)))) ) y = tuple( - a for a in y - if (a is not None and (isinstance(a, (jnp.ndarray, int)))) + a for a in y if (a is not None and (isinstance(a, (jax.Array, int)))) ) return chex.assert_trees_all_close(x, y, atol=1e-6, rtol=0) @@ -364,7 +362,7 @@ def assert_output_close( def test_jit_vs_non_jit_bwd(self, implicit: bool): @jax.value_and_grad - def val_grad(a: jnp.ndarray, x: jnp.ndarray) -> float: + def val_grad(a: jax.Array, x: jax.Array) -> float: implicit_diff = implicit_lib.ImplicitDiff() if implicit else None geom = geometry.Geometry( cost_matrix=( diff --git a/tests/solvers/quadratic/fgw_test.py b/tests/solvers/quadratic/fgw_test.py index 0a2a2fff4..508fedcb2 100644 --- a/tests/solvers/quadratic/fgw_test.py +++ b/tests/solvers/quadratic/fgw_test.py @@ -56,7 +56,7 @@ def test_gradient_marginals_fgw_solver(self, jit: bool): geom_y = pointcloud.PointCloud(self.y) geom_xy = pointcloud.PointCloud(self.x_2, self.y_2) - def reg_gw(a: jnp.ndarray, b: jnp.ndarray, implicit: bool): + def reg_gw(a: jax.Array, b: jax.Array, implicit: bool): prob = quadratic_problem.QuadraticProblem( geom_x, geom_y, geom_xy, fused_penalty=self.fused_penalty, a=a, b=b ) @@ -101,9 +101,9 @@ def test_gradient_fgw_solver_geometry(self, lse_mode: bool, is_cost: bool): """Test gradient w.r.t. the geometries.""" def reg_gw( - x: jnp.ndarray, y: jnp.ndarray, - xy: Union[jnp.ndarray, Tuple[jnp.ndarray, jnp.ndarray]], - fused_penalty: float, a: jnp.ndarray, b: jnp.ndarray, implicit: bool + x: jax.Array, y: jax.Array, xy: Union[jax.Array, Tuple[jax.Array, + jax.Array]], + fused_penalty: float, a: jax.Array, b: jax.Array, implicit: bool ): if is_cost: geom_x = geometry.Geometry(cost_matrix=x) @@ -182,8 +182,8 @@ def test_gradient_fgw_solver_penalty(self): lse_mode = True def reg_gw( - cx: jnp.ndarray, cy: jnp.ndarray, cxy: jnp.ndarray, - fused_penalty: float, a: jnp.ndarray, b: jnp.ndarray, implicit: bool + cx: jax.Array, cy: jax.Array, cxy: jax.Array, fused_penalty: float, + a: jax.Array, b: jax.Array, implicit: bool ) -> float: geom_x = geometry.Geometry(cost_matrix=cx) geom_y = geometry.Geometry(cost_matrix=cy) diff --git a/tests/solvers/quadratic/gw_barycenter_test.py b/tests/solvers/quadratic/gw_barycenter_test.py index 6bc843477..d5dadd691 100644 --- a/tests/solvers/quadratic/gw_barycenter_test.py +++ b/tests/solvers/quadratic/gw_barycenter_test.py @@ -42,9 +42,9 @@ def random_pc( @staticmethod def pad_cost_matrices( - costs: Sequence[jnp.ndarray], + costs: Sequence[jax.Array], shape: Optional[Tuple[int, int]] = None - ) -> Tuple[jnp.ndarray, jnp.ndarray]: + ) -> Tuple[jax.Array, jax.Array]: if shape is None: shape = jnp.asarray([arr.shape for arr in costs]).max() shape = (shape, shape) @@ -133,7 +133,7 @@ def test_fgw_barycenter( ): def barycenter( - y: jnp.ndim, y_fused: jnp.ndarray, num_per_segment: Tuple[int, ...] + y: jnp.ndim, y_fused: jax.Array, num_per_segment: Tuple[int, ...] ) -> gwb_solver.GWBarycenterState: prob = gwb.GWBarycenterProblem( y=y, diff --git a/tests/solvers/quadratic/gw_test.py b/tests/solvers/quadratic/gw_test.py index e7b77cd58..e7d0ff106 100644 --- a/tests/solvers/quadratic/gw_test.py +++ b/tests/solvers/quadratic/gw_test.py @@ -156,8 +156,8 @@ def test_flag_store_errors(self): def test_gradient_marginals_gw(self, jit: bool): """Test gradient w.r.t. probability weights.""" - def reg_gw(a: jnp.ndarray, b: jnp.ndarray, - implicit: bool) -> Tuple[float, Tuple[jnp.ndarray, jnp.ndarray]]: + def reg_gw(a: jax.Array, b: jax.Array, + implicit: bool) -> Tuple[float, Tuple[jax.Array, jax.Array]]: prob = quadratic_problem.QuadraticProblem(geom_x, geom_y, a=a, b=b) implicit_diff = implicit_lib.ImplicitDiff() if implicit else None linear_solver = sinkhorn.Sinkhorn( @@ -245,8 +245,7 @@ def test_gradient_gw_geometry( """Test gradient w.r.t. the geometries.""" def reg_gw( - x: jnp.ndarray, y: jnp.ndarray, a: jnp.ndarray, b: jnp.ndarray, - implicit: bool + x: jax.Array, y: jax.Array, a: jax.Array, b: jax.Array, implicit: bool ) -> float: if is_cost: geom_x = geometry.Geometry(cost_matrix=x) diff --git a/tests/solvers/quadratic/lower_bound_test.py b/tests/solvers/quadratic/lower_bound_test.py index ba90d6362..bf32aad87 100644 --- a/tests/solvers/quadratic/lower_bound_test.py +++ b/tests/solvers/quadratic/lower_bound_test.py @@ -118,11 +118,11 @@ def test_lb_pointcloud( ] ) def test_lb_grad( - self, rng: jax.Array, sort_fn: Callable[[jnp.ndarray], jnp.ndarray], + self, rng: jax.Array, sort_fn: Callable[[jax.Array], jax.Array], method: str ): - def fn(x: jnp.ndarray, y: jnp.ndarray) -> float: + def fn(x: jax.Array, y: jax.Array) -> float: geom_x = pointcloud.PointCloud(x) geom_y = pointcloud.PointCloud(y) prob = quadratic_problem.QuadraticProblem(geom_x, geom_y) diff --git a/tests/tools/k_means_test.py b/tests/tools/k_means_test.py index a36c4b5c1..55cacde02 100644 --- a/tests/tools/k_means_test.py +++ b/tests/tools/k_means_test.py @@ -31,7 +31,7 @@ def make_blobs( *args: Any, cost_fn: Optional[Literal["sqeucl", "cosine"]] = None, **kwargs: Any -) -> Tuple[Union[jnp.ndarray, pointcloud.PointCloud], jnp.ndarray, jnp.ndarray]: +) -> Tuple[Union[jax.Array, pointcloud.PointCloud], jax.Array, jax.Array]: X, y, c = datasets.make_blobs(*args, return_centers=True, **kwargs) X, y, c = jnp.asarray(X), jnp.asarray(y), jnp.asarray(c) if cost_fn is None: @@ -47,10 +47,10 @@ def make_blobs( def compute_assignment( - x: jnp.ndarray, - centers: jnp.ndarray, - weights: Optional[jnp.ndarray] = None -) -> Tuple[jnp.ndarray, float]: + x: jax.Array, + centers: jax.Array, + weights: Optional[jax.Array] = None +) -> Tuple[jax.Array, float]: if weights is None: weights = jnp.ones(x.shape[0]) cost_matrix = pointcloud.PointCloud(x, centers).cost_matrix @@ -104,7 +104,7 @@ def test_matches_sklearn(self, rng: jax.Array, k: int): def test_initialization_differentiable(self, rng: jax.Array): - def callback(x: jnp.ndarray) -> float: + def callback(x: jax.Array) -> float: geom = pointcloud.PointCloud(x) centers = k_means._k_means_plus_plus(geom, k=3, rng=rng) _, inertia = compute_assignment(x, centers) @@ -336,7 +336,7 @@ def test_k_means_jitting( self, rng: jax.Array, init: Literal["k-means++", "random"] ): - def callback(x: jnp.ndarray) -> k_means.KMeansOutput: + def callback(x: jax.Array) -> k_means.KMeansOutput: return k_means.k_means( x, k=k, init=init, store_inner_errors=True, rng=rng ) @@ -368,7 +368,7 @@ def test_k_means_differentiability( self, rng: jax.Array, jit: bool, force_scan: bool ): - def inertia(x: jnp.ndarray, w: jnp.ndarray) -> float: + def inertia(x: jax.Array, w: jax.Array) -> float: return k_means.k_means( x, k=k, diff --git a/tests/tools/sinkhorn_divergence_test.py b/tests/tools/sinkhorn_divergence_test.py index d46c220d0..07bcf535e 100644 --- a/tests/tools/sinkhorn_divergence_test.py +++ b/tests/tools/sinkhorn_divergence_test.py @@ -403,7 +403,7 @@ def test_gradient_generic_point_cloud_wrapper(self): x = jax.random.uniform(rngs[0], (self._num_points[0], self._dim)) y = jax.random.uniform(rngs[1], (self._num_points[1], self._dim)) - def loss_fn(cloud_a: jnp.ndarray, cloud_b: jnp.ndarray) -> float: + def loss_fn(cloud_a: jax.Array, cloud_b: jax.Array) -> float: div = sinkhorn_divergence.sinkhorn_divergence( pointcloud.PointCloud, cloud_a, diff --git a/tests/tools/soft_sort_test.py b/tests/tools/soft_sort_test.py index 372420a9e..4f3a12c10 100644 --- a/tests/tools/soft_sort_test.py +++ b/tests/tools/soft_sort_test.py @@ -108,7 +108,7 @@ def test_multivariate_cdf_quantiles(self, rng: jax.Array): # Check passing custom sampler, must be still symmetric / centered on {.5}^d # Check passing custom epsilon also works. - def ball_sampler(k: jax.Array, s: Tuple[int, int]) -> jnp.ndarray: + def ball_sampler(k: jax.Array, s: Tuple[int, int]) -> jax.Array: return 0.5 * (jax.random.ball(k, d=s[1], p=4, shape=(s[0],)) + 1.) num_target_samples = 473 @@ -283,7 +283,7 @@ def test_soft_sort_jacobian(self, rng: jax.Array, implicit: bool): z = jax.random.uniform(rngs[0], ((b, n))) random_dir = jax.random.normal(rngs[1], (b,)) / b - def loss_fn(logits: jnp.ndarray) -> float: + def loss_fn(logits: jax.Array) -> float: im_d = None if implicit: # Ridge parameters are only used when using JAX's CG. From b075758cd367f4f24b6cd5e3e0dd0ca9b74302ad Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 26 Nov 2023 16:27:49 +0100 Subject: [PATCH 021/186] [ci skip] change init arguments of GENOT and add docstrings to GENOT --- src/ott/neural/solvers/genot.py | 184 ++++++++++++++------------------ src/ott/neural/solvers/otfm.py | 45 ++++---- tests/neural/genot_test.py | 12 +-- 3 files changed, 110 insertions(+), 131 deletions(-) diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index efdf5af29..808293f22 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -17,8 +17,6 @@ Any, Callable, Dict, - Literal, - Mapping, Optional, Tuple, Type, @@ -58,6 +56,37 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): + """The GENOT training class as introduced in :cite:`TODO`. + + Args: + neural_vector_field: Neural vector field parameterized by a neural network. + input_dim: Dimension of the data in the source distribution. + output_dim: Dimension of the data in the target distribution. + cond_dim: Dimension of the conditioning variable. + iterations: Number of iterations. + valid_freq: Frequency of validation. + ot_solver: OT solver to match samples from the source and the target distribution. + epsilon: Entropy regularization term of the OT problem solved by `ot_solver`. + cost_fn: Cost function for the OT problem solved by the `ot_solver`. In the linear case, this is always expected to be of type `str`. If the problem is of quadratic type and `cost_fn` is a string, the `cost_fn` is used for all terms, i.e. both quadratic terms and, if applicable, the linear temr. If of type :class:`dict`, the keys are expected to be `x_cost_fn`, `y_cost_fn`, and if applicable, `xy_cost_fn`. + scale_cost: How to scale the cost matrix for the OT problem solved by the `ot_solver`. In the linear case, this is always expected to be not a :class:`dict`. If the problem is of quadratic type and `scale_cost` is a string, the `scale_cost` argument is used for all terms, i.e. both quadratic terms and, if applicable, the linear temr. If of type :class:`dict`, the keys are expected to be `x_scale_cost`, `y_scale_cost`, and if applicable, `xy_scale_cost`. + optimizer: Optimizer for `neural_vector_field`. + flow: Flow between latent distribution and target distribution. + time_sampler: Sampler for the time. + checkpoint_manager: Checkpoint manager. + k_samples_per_x: Number of samples drawn from the conditional distribution of an input sample, see algorithm TODO. + solver_latent_to_data: Linear OT solver to match the latent distribution with the conditional distribution. Only applicable if `k_samples_per_x` is larger than :math:`1`. #TODO: adapt + kwargs_solver_latent_to_data: Keyword arguments for `solver_latent_to_data`. #TODO: adapt + fused_penalty: Fused penalty of the linear/fused term in the Fused Gromov-Wasserstein problem. + tau_a: If :math:`<1`, defines how much unbalanced the problem is + on the first marginal. + tau_b: If :math:`< 1`, defines how much unbalanced the problem is + on the second marginal. + mlp_eta: Neural network to learn the left rescaling function as suggested in :cite:`TODO`. If `None`, the left rescaling factor is not learnt. + mlp_xi: Neural network to learn the right rescaling function as suggested in :cite:`TODO`. If `None`, the right rescaling factor is not learnt. + unbalanced_kwargs: Keyword arguments for the unbalancedness solver. + callback_fn: Callback function. + rng: Random number generator. + """ def __init__( self, @@ -68,95 +97,27 @@ def __init__( iterations: int, valid_freq: int, ot_solver: Type[was_solver.WassersteinSolver], + epsilon: float, + cost_fn: Union[costs.CostFn, Dict[str, costs.CostFn]], + scale_cost: Union[Any, Dict[str, Any]], #TODO: replace `Any` optimizer: Type[optax.GradientTransformation], - checkpoint_manager: Type[checkpoint.CheckpointManager] = None, flow: Type[BaseFlow] = ConstantNoiseFlow(0.0), time_sampler: Type[BaseTimeSampler] = UniformSampler(), - k_noise_per_x: int = 1, - t_offset: float = 1e-5, - epsilon: float = 1e-2, - cost_fn: Union[costs.CostFn, Literal["graph"]] = costs.SqEuclidean(), + checkpoint_manager: Type[checkpoint.CheckpointManager] = None, + k_samples_per_x: int = 1, solver_latent_to_data: Optional[Type[was_solver.WassersteinSolver] ] = None, kwargs_solver_latent_to_data: Dict[str, Any] = types.MappingProxyType({}), - scale_cost: Union[Any, Mapping[str, Any]] = 1.0, fused_penalty: float = 0.0, tau_a: float = 1.0, tau_b: float = 1.0, mlp_eta: Callable[[jax.Array], float] = None, mlp_xi: Callable[[jax.Array], float] = None, unbalanced_kwargs: Dict[str, Any] = {}, - callback: Optional[Callable[[jax.Array, jax.Array, jax.Array], - Any]] = None, - callback_kwargs: Dict[str, Any] = {}, - callback_iters: int = 10, + callback_fn: Optional[Callable[[jax.Array, jax.Array, jax.Array], + Any]] = None, rng: random.PRNGKeyArray = random.PRNGKey(0), - **kwargs: Any, ) -> None: - """The GENOT training class. - - Parameters - ---------- - neural_vector_field - Neural vector field - input_dim - Dimension of the source distribution - output_dim - Dimension of the target distribution - cond_dim - Dimension of the condition - iterations - Number of iterations to train - valid_freq - Number of iterations after which to perform a validation step - ot_solver - Solver to match samples from the source to the target distribution - optimizer - Optimizer for the neural vector field - flow - Flow to use in the target space from noise to data. Should be of type - `ConstantNoiseFlow` to recover the setup in the paper TODO. - k_noise_per_x - Number of samples to draw from the conditional distribution - t_offset - Offset for sampling from the time t - epsilon - Entropy regularization parameter for the discrete solver - cost_fn - Cost function to use for the discrete OT solver - solver_latent_to_data - Linear OT solver to match samples from the noise to the conditional distribution - latent_to_data_epsilon - Entropy regularization term for `solver_latent_to_data` - latent_to_data_scale_cost - How to scale the cost matrix for the `solver_latent_to_data` solver - scale_cost - How to scale the cost matrix in each discrete OT problem - graph_kwargs - Keyword arguments for the graph cost computation in case `cost="graph"` - fused_penalty - Penalisation term for the linear term in a Fused GW setting - split_dim - Dimension to split the data into fused term and purely quadratic term in the FGW setting - mlp_eta - Neural network to learn the left rescaling function - mlp_xi - Neural network to learn the right rescaling function - tau_a - Left unbalancedness parameter - tau_b - Right unbalancedness parameter - callback - Callback function - callback_kwargs - Keyword arguments to the callback function - callback_iters - Number of iterations after which to evaluate callback function - seed - Random seed - kwargs - Keyword arguments passed to `setup`, e.g. custom choice of optimizers for learning rescaling functions - """ BaseNeuralSolver.__init__( self, iterations=iterations, valid_freq=valid_freq ) @@ -196,7 +157,7 @@ def __init__( self.input_dim = input_dim self.output_dim = output_dim self.cond_dim = cond_dim - self.k_noise_per_x = k_noise_per_x + self.k_noise_per_x = k_samples_per_x # OT data-data matching parameters self.ot_solver = ot_solver @@ -210,14 +171,8 @@ def __init__( self.kwargs_solver_latent_to_data = kwargs_solver_latent_to_data # callback parameteres - self.callback = callback - self.callback_kwargs = callback_kwargs - self.callback_iters = callback_iters - - #TODO: check how to handle this - self.t_offset = t_offset - - self.setup(**kwargs) + self.callbac_fn = callback_fn + self.setup() def setup(self) -> None: """Set up the model. @@ -395,23 +350,24 @@ def transport( source: jax.Array, condition: Optional[jax.Array], rng: random.PRNGKeyArray = random.PRNGKey(0), - diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}), forward: bool = True, + diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}), ) -> Union[jnp.array, diffrax.Solution, Optional[jax.Array]]: - """Transport the distribution. + """Transport data with the learnt plan. - Parameters - ---------- - source - Source distribution to transport - seed - Random seed for sampling from the latent distribution - diffeqsolve_kwargs - Keyword arguments for the ODE solver. + This method pushes-forward the `source` to its conditional distribution by solving the neural ODE parameterized by the :attr:`~ott.neural.solvers.GENOTg.neural_vector_field` from + :attr:`~ott.neural.flows.BaseTimeSampler.low` to :attr:`~ott.neural.flows.BaseTimeSampler.high`. + + Args: + data: Initial condition of the ODE. + condition: Condition of the input data. + rng: random seed for sampling from the latent distribution. + forward: If `True` integrates forward, otherwise backwards. + diffeqsovle_kwargs: Keyword arguments for the ODE solver. Returns: - ------- - The transported samples, the solution of the neural ODE, and the rescaling factor. + The push-forward or pull-back distribution defined by the learnt transport plan. + """ if not forward: raise NotImplementedError @@ -449,24 +405,46 @@ def solve_ode(input: jax.Array, cond: jax.Array): return jax.vmap(solve_ode)(latent_batch, cond_input) def _valid_step(self, valid_loader, iter) -> None: + """TODO.""" next(valid_loader) - # TODO: add callback and logging - @property def learn_rescaling(self) -> bool: + """Whether to learn at least one rescaling factor of the marginal distributions.""" return self.mlp_eta is not None or self.mlp_xi is not None def save(self, path: str) -> None: + """Save the model. + + Args: + path: Where to save the model to. + """ raise NotImplementedError def load(self, path: str) -> "GENOT": + """Load a model. + + Args: + path: Where to load the model from. + + Returns: + An instance of :class:`ott.neural.solvers.OTFlowMatching`. + """ raise NotImplementedError + @property def training_logs(self) -> Dict[str, Any]: + """Logs of the training.""" raise NotImplementedError - def sample_noise( #TODO: make more general - self, key: random.PRNGKey, batch_size: int - ) -> jax.Array: #TODO: make more general + def sample_noise(self, key: random.PRNGKey, batch_size: int) -> jax.Array: + """Sample noise from a standard-normal distribution. + + Args: + key: Random key for seeding. + batch_size: Number of samples to draw. + + Returns: + Samples from the standard normal distribution. + """ return random.normal(key, shape=(batch_size, self.output_dim)) diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 3b5aa3319..8001a00a1 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -41,30 +41,30 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): """Flow matching as introduced in :cite:`TODO, with extension to OT-FM (). Args: - neural_vector_field: Neural vector field parameterized by a neural network. - input_dim: Dimension of the input data. - cond_dim: Dimension of the conditioning variable. - iterations: Number of iterations. - valid_freq: Frequency of validation. - ot_solver: OT solver to match samples from the source and the target distribution as proposed in :cite:`TODO`. If `None`, no matching will be performed as proposed in :cite:`TODO`. - flow: Flow between source and target distribution. - time_sampler: Sampler for the time. - optimizer: Optimizer for `neural_vector_field`. - checkpoint_manager: Checkpoint manager. - epsilon: Entropy regularization term for the `ot_solver`. - cost_fn: Cost function for the OT problem solved by the `ot_solver`. - tau_a: If :math:`<1`, defines how much unbalanced the problem is - on the first marginal. - tau_b: If :math:`< 1`, defines how much unbalanced the problem is - on the second marginal. - mlp_eta: Neural network to learn the left rescaling function as suggested in :cite:`TODO`. If `None`, the left rescaling factor is not learnt. - mlp_xi: Neural network to learn the right rescaling function as suggested in :cite:`TODO`. If `None`, the right rescaling factor is not learnt. - unbalanced_kwargs: Keyword arguments for the unbalancedness solver. - callback_fn: Callback function. - rng: Random number generator. + neural_vector_field: Neural vector field parameterized by a neural network. + input_dim: Dimension of the input data. + cond_dim: Dimension of the conditioning variable. + iterations: Number of iterations. + valid_freq: Frequency of validation. + ot_solver: OT solver to match samples from the source and the target distribution as proposed in :cite:`TODO`. If `None`, no matching will be performed as proposed in :cite:`TODO`. + flow: Flow between source and target distribution. + time_sampler: Sampler for the time. + optimizer: Optimizer for `neural_vector_field`. + checkpoint_manager: Checkpoint manager. + epsilon: Entropy regularization term of the OT OT problem solved by the `ot_solver`. + cost_fn: Cost function for the OT problem solved by the `ot_solver`. + tau_a: If :math:`<1`, defines how much unbalanced the problem is + on the first marginal. + tau_b: If :math:`< 1`, defines how much unbalanced the problem is + on the second marginal. + mlp_eta: Neural network to learn the left rescaling function as suggested in :cite:`TODO`. If `None`, the left rescaling factor is not learnt. + mlp_xi: Neural network to learn the right rescaling function as suggested in :cite:`TODO`. If `None`, the right rescaling factor is not learnt. + unbalanced_kwargs: Keyword arguments for the unbalancedness solver. + callback_fn: Callback function. + rng: Random number generator. Returns: - None + None """ @@ -295,6 +295,7 @@ def load(self, path: str) -> "OTFlowMatching": """ raise NotImplementedError + @property def training_logs(self) -> Dict[str, Any]: """Logs of the training.""" raise NotImplementedError diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 183af8419..cf09cad33 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -57,7 +57,7 @@ def test_genot_linear_unconditional( ot_solver=ot_solver, time_sampler=time_sampler, optimizer=optimizer, - k_noise_per_x=k_noise_per_x, + k_samples_per_x=k_noise_per_x, ) genot(genot_data_loader_linear, genot_data_loader_linear) @@ -99,7 +99,7 @@ def test_genot_quad_unconditional( ot_solver=ot_solver, time_sampler=time_sampler, optimizer=optimizer, - k_noise_per_x=k_noise_per_x, + k_samples_per_x=k_noise_per_x, ) genot(genot_data_loader_quad, genot_data_loader_quad) @@ -139,7 +139,7 @@ def test_genot_fused_unconditional( time_sampler=time_sampler, optimizer=optimizer, fused_penalty=0.5, - k_noise_per_x=k_noise_per_x, + k_samples_per_x=k_noise_per_x, ) genot(genot_data_loader_fused, genot_data_loader_fused) @@ -178,7 +178,7 @@ def test_genot_linear_conditional( ot_solver=ot_solver, time_sampler=time_sampler, optimizer=optimizer, - k_noise_per_x=k_noise_per_x, + k_samples_per_x=k_noise_per_x, ) genot( genot_data_loader_linear_conditional, @@ -223,7 +223,7 @@ def test_genot_quad_conditional( ot_solver=ot_solver, time_sampler=time_sampler, optimizer=optimizer, - k_noise_per_x=k_noise_per_x, + k_samples_per_x=k_noise_per_x, ) genot(genot_data_loader_quad, genot_data_loader_quad) @@ -263,7 +263,7 @@ def test_genot_fused_conditional( time_sampler=time_sampler, optimizer=optimizer, fused_penalty=0.5, - k_noise_per_x=k_noise_per_x, + k_samples_per_x=k_noise_per_x, ) genot(genot_data_loader_fused, genot_data_loader_fused) From 95e8707bbef94a02639f164f3c6f4f0c39acd4ef Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 26 Nov 2023 16:38:47 +0100 Subject: [PATCH 022/186] [ci skip] split nets into base_models and models --- src/ott/neural/models/__init__.py | 2 +- src/ott/neural/models/base_models.py | 42 ++++++++++++++++++++++++++++ src/ott/neural/models/models.py | 33 ++++++---------------- src/ott/neural/solvers/flows.py | 5 ++++ src/ott/neural/solvers/genot.py | 2 ++ src/ott/neural/solvers/otfm.py | 2 ++ 6 files changed, 60 insertions(+), 26 deletions(-) create mode 100644 src/ott/neural/models/base_models.py diff --git a/src/ott/neural/models/__init__.py b/src/ott/neural/models/__init__.py index ec2ab6f3f..d2a583f34 100644 --- a/src/ott/neural/models/__init__.py +++ b/src/ott/neural/models/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import conjugate_solvers, layers, models +from . import base_models, conjugate_solvers, layers, models diff --git a/src/ott/neural/models/base_models.py b/src/ott/neural/models/base_models.py new file mode 100644 index 000000000..daf161abf --- /dev/null +++ b/src/ott/neural/models/base_models.py @@ -0,0 +1,42 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import abc +from typing import Optional + +import flax.linen as nn +import jax + +__all__ = ["BaseNeuralVectorField", "BaseRescalingNet"] + + +class BaseNeuralVectorField(nn.Module, abc.ABC): + + @abc.abstractmethod + def __call__( + self, + t: jax.Array, + x: jax.Array, + condition: Optional[jax.Array] = None, + keys_model: Optional[jax.Array] = None + ) -> jax.Array: # noqa: D102): + pass + + +class BaseRescalingNet(nn.Module, abc.ABC): + + @abc.abstractmethod + def __call___( + self, x: jax.Array, condition: Optional[jax.Array] = None + ) -> jax.Array: + pass diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 5ec8fb292..ea191d99d 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import abc import functools from typing import Any, Callable, Dict, Optional, Sequence, Tuple @@ -28,10 +27,16 @@ from ott.initializers.linear import initializers as lin_init from ott.math import matrix_square_root from ott.neural.models import layers +from ott.neural.models.base_models import ( + BaseNeuralVectorField, + BaseRescalingNet, +) from ott.neural.solvers import neuraldual from ott.problems.linear import linear_problem -__all__ = ["ICNN", "MLP", "MetaInitializer"] +__all__ = [ + "ICNN", "MLP", "MetaInitializer", "NeuralVectorField", "Rescaling_MLP" +] class ICNN(neuraldual.BaseW2NeuralDual): @@ -418,19 +423,6 @@ def __call__(self, x): return nn.Dense(self.out_dim)(x) -class BaseNeuralVectorField(nn.Module, abc.ABC): - - @abc.abstractmethod - def __call__( - self, - t: jax.Array, - x: jax.Array, - condition: Optional[jax.Array] = None, - keys_model: Optional[jax.Array] = None - ) -> jax.Array: # noqa: D102): - pass - - class NeuralVectorField(BaseNeuralVectorField): output_dim: int condition_dim: int @@ -541,16 +533,7 @@ def create_train_state( ) -class BaseRescalingNet(nn.Module, abc.ABC): - - @abc.abstractmethod - def __call___( - self, x: jax.Array, condition: Optional[jax.Array] = None - ) -> jax.Array: - pass - - -class Rescaling_MLP(nn.Module): +class Rescaling_MLP(BaseRescalingNet): hidden_dim: int cond_dim: int is_potential: bool = False diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py index 6552048fb..148c7b188 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/solvers/flows.py @@ -16,6 +16,11 @@ import jax import jax.numpy as jnp +__all__ = [ + "BaseFlow", "StraightFlow", "ConstantNoiseFlow", "BrownianNoiseFlow", + "BaseTimeSampler", "UniformSampler", "OffsetUniformSampler" +] + class BaseFlow(abc.ABC): """Base class for all flows. diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 808293f22..3c96a269d 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -54,6 +54,8 @@ Match_latent_fn_T = Callable[[jax.random.PRNGKeyArray, jnp.array, jnp.array], Tuple[jnp.array, jnp.array]] +__all__ = ["GENOT"] + class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): """The GENOT training class as introduced in :cite:`TODO`. diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 8001a00a1..27c4fab64 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -36,6 +36,8 @@ ) from ott.solvers import was_solver +__all__ = ["OTFlowMatching"] + class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): """Flow matching as introduced in :cite:`TODO, with extension to OT-FM (). From 3b1791d586a302a80739d76b907c581196ad2e1a Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 26 Nov 2023 16:52:52 +0100 Subject: [PATCH 023/186] [ci skip] add references --- docs/references.bib | 41 +++++++++++++++++++++++++++++++++ src/ott/neural/solvers/genot.py | 6 ++--- src/ott/neural/solvers/otfm.py | 4 ++-- 3 files changed, 46 insertions(+), 5 deletions(-) diff --git a/docs/references.bib b/docs/references.bib index 35ba274ba..0df2ad9ce 100644 --- a/docs/references.bib +++ b/docs/references.bib @@ -805,3 +805,44 @@ @misc{klein:23 title = {Learning Costs for Structured Monge Displacements}, year = {2023}, } + +@misc{klein_uscidda:23, + author = {Klein, Dominik and Uscidda, Th{\'e}o and Theis, Fabian and Cuturi, Marco}, + doi = {10.48550/arXiv.2310.09254}, + eprint = {2310.09254}, + eprintclass = {stat.ML}, + eprinttype = {arXiv}, + title = {Generative Entropic Neural Optimal Transport To Map Within and Across Spaces}, + year = {2023}, +} + +@misc{lipman:22, + author = {Lipman, Yaron and Chen, Ricky TQ and Ben-Hamu, Heli and Nickel, Maximilian and Le, Matt}, + doi = {10.48550/arXiv.2210.02747, + eprint = {2210.02747}, + eprintclass = {stat.ML}, + eprinttype = {arXiv}, + title = {Flow matching for generative modeling}, + year = {2022}, +} + + +@misc{tong:23, + author={Tong, Alexander and Malkin, Nikolay and Huguet, Guillaume and Zhang, Yanlei and Rector-Brooks, Jarrid and Fatras, Kilian and Wolf, Guy and Bengio, Yoshua}, + doi = {TODO}, + eprint = {TODO}, + eprintclass = {TODO}, + eprinttype = {TODO}, + title={Improving and generalizing flow-based generative models with minibatch optimal transport}, + year={2023} +} + +@misc{pooladian:23, + author={Pooladian, Aram-Alexandre and Ben-Hamu, Heli and Domingo-Enrich, Carles and Amos, Brandon and Lipman, Yaron and Chen, Ricky}, + doi = {10.48550/arXiv.2304.14772, + eprint = {2304.14772}, + eprintclass = {stat.ML}, + eprinttype = {arXiv}, + title = {Multisample flow matching: Straightening flows with minibatch couplings}, + year = {2022}, +} diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 3c96a269d..e3f17c7f3 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -58,7 +58,7 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): - """The GENOT training class as introduced in :cite:`TODO`. + """The GENOT training class as introduced in :cite:`klein_uscidda:23`. Args: neural_vector_field: Neural vector field parameterized by a neural network. @@ -83,8 +83,8 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): on the first marginal. tau_b: If :math:`< 1`, defines how much unbalanced the problem is on the second marginal. - mlp_eta: Neural network to learn the left rescaling function as suggested in :cite:`TODO`. If `None`, the left rescaling factor is not learnt. - mlp_xi: Neural network to learn the right rescaling function as suggested in :cite:`TODO`. If `None`, the right rescaling factor is not learnt. + mlp_eta: Neural network to learn the left rescaling function. If `None`, the left rescaling factor is not learnt. + mlp_xi: Neural network to learn the right rescaling function. If `None`, the right rescaling factor is not learnt. unbalanced_kwargs: Keyword arguments for the unbalancedness solver. callback_fn: Callback function. rng: Random number generator. diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 27c4fab64..7faf68c71 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -40,7 +40,7 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): - """Flow matching as introduced in :cite:`TODO, with extension to OT-FM (). + """Flow matching as introduced in :cite:`lipman:22`, with extension to OT-FM (:cite`tong:23`, :cite:`pooladian:23`). Args: neural_vector_field: Neural vector field parameterized by a neural network. @@ -48,7 +48,7 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): cond_dim: Dimension of the conditioning variable. iterations: Number of iterations. valid_freq: Frequency of validation. - ot_solver: OT solver to match samples from the source and the target distribution as proposed in :cite:`TODO`. If `None`, no matching will be performed as proposed in :cite:`TODO`. + ot_solver: OT solver to match samples from the source and the target distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. If `None`, no matching will be performed as proposed in :cite:`lipman:22`. flow: Flow between source and target distribution. time_sampler: Sampler for the time. optimizer: Optimizer for `neural_vector_field`. From eca77c055024c0e068553aa7bede2c127137da75 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 26 Nov 2023 18:51:33 +0100 Subject: [PATCH 024/186] add tests for learning the rescaling factors --- src/ott/neural/solvers/base_solver.py | 39 +++++++++++++++ src/ott/neural/solvers/genot.py | 2 +- src/ott/neural/solvers/otfm.py | 2 +- tests/neural/genot_test.py | 47 +++++++++++++++++ .../{flow_matching_test.py => otfm_test.py} | 50 ++++++++++++++++++- 5 files changed, 136 insertions(+), 4 deletions(-) rename tests/neural/{flow_matching_test.py => otfm_test.py} (74%) diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 69f510d81..e18f0db96 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -44,6 +44,7 @@ def __init__(self, iterations: int, valid_freq: int, **_: Any) -> None: @abstractmethod def setup(self, *args: Any, **kwargs: Any) -> None: + """Setup the model.""" pass @abstractmethod @@ -74,6 +75,7 @@ def training_logs(self) -> Dict[str, Any]: class ResampleMixin: + """Mixin class for mini-batch OT in neural optimal transport solvers.""" def __init__(*args, **kwargs): pass @@ -239,6 +241,7 @@ def match_pairs( class UnbalancednessMixin: + """Mixin class to incorporate unbalancedness into neural OT models.""" def __init__( self, @@ -421,3 +424,39 @@ def step_fn( return new_state_eta, new_state_xi, eta_predictions, xi_predictions, loss_a, loss_b return step_fn + + def evaluate_eta( + self, source: jax.Array, condition: Optional[jax.Array] + ) -> jax.Array: + """Evaluate the left learnt rescaling factor. + + Args: + source: Samples from the source distribution to evaluate rescaling function on. + condition: Condition belonging to the samples in the source distribution. + + Returns: + Learnt left rescaling factors. + """ + if self.state_eta is None: + raise ValueError("The left rescaling factor was not parameterized.") + return self.state_xi.apply_fn({"params": self.state_eta.params}, + x=source, + condition=condition) + + def evaluate_xi( + self, target: jax.Array, condition: Optional[jax.Array] + ) -> jax.Array: + """Evaluate the right learnt rescaling factor. + + Args: + target: Samples from the target distribution to evaluate the rescaling function on. + condition: Condition belonging to the samples in the target distribution. + + Returns: + Learnt right rescaling factors. + """ + if self.state_xi is None: + raise ValueError("The right rescaling factor was not parameterized.") + return self.state_xi.apply_fn({"params": self.state_xi.params}, + x=target, + condition=condition) diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index e3f17c7f3..00b01cb6f 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -365,7 +365,7 @@ def transport( condition: Condition of the input data. rng: random seed for sampling from the latent distribution. forward: If `True` integrates forward, otherwise backwards. - diffeqsovle_kwargs: Keyword arguments for the ODE solver. + diffeqsolve_kwargs: Keyword arguments for the ODE solver. Returns: The push-forward or pull-back distribution defined by the learnt transport plan. diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 7faf68c71..1ca5b0b0e 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -235,7 +235,7 @@ def transport( data: Initial condition of the ODE. condition: Condition of the input data. forward: If `True` integrates forward, otherwise backwards. - diffeqsovle_kwargs: Keyword arguments for the ODE solver. + diffeqsolve_kwargs: Keyword arguments for the ODE solver. Returns: The push-forward or pull-back distribution defined by the learnt transport plan. diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index cf09cad33..e13eb0aaf 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -272,3 +272,50 @@ def test_genot_fused_conditional( ) assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 + + @pytest.mark.parametrize("conditional", [False, True]) + def test_genot_linear_learn_rescaling( + self, conditional: bool, genot_data_loader_linear: Iterator, + genot_data_loader_linear_conditional: Iterator + ): + data_loader = genot_data_loader_linear_conditional if conditional else genot_data_loader_linear + + source_lin, source_quad, target_lin, target_quad, condition = next( + genot_data_loader_linear + ) + source_dim = source_lin.shape[1] + target_dim = target_lin.shape[1] + condition_dim = condition.shape[1] if conditional else 0 + + neural_vf = NeuralVectorField( + output_dim=target_dim, + condition_dim=condition_dim, + latent_embed_dim=5, + ) + ot_solver = sinkhorn.Sinkhorn() + time_sampler = UniformSampler() + optimizer = optax.adam(learning_rate=1e-3) + genot = GENOT( + neural_vf, + input_dim=source_dim, + output_dim=target_dim, + cond_dim=condition_dim, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + time_sampler=time_sampler, + optimizer=optimizer, + ) + genot(data_loader, data_loader) + + source_lin, source_quad, target_lin, target_quad, condition = next( + genot_data_loader_linear + ) + + result_eta = genot.evaluate_eta(source_lin, condition=condition) + assert isinstance(result_eta, jax.Array) + assert jnp.sum(jnp.isnan(result_eta)) == 0 + + result_xi = genot.evaluate_xi(target_lin, condition=condition) + assert isinstance(result_xi, jax.Array) + assert jnp.sum(jnp.isnan(result_xi)) == 0 diff --git a/tests/neural/flow_matching_test.py b/tests/neural/otfm_test.py similarity index 74% rename from tests/neural/flow_matching_test.py rename to tests/neural/otfm_test.py index a1135cf2d..e26920253 100644 --- a/tests/neural/flow_matching_test.py +++ b/tests/neural/otfm_test.py @@ -11,14 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Type +from typing import Iterator, Type import jax import jax.numpy as jnp import optax import pytest -from ott.neural.models.models import NeuralVectorField +from ott.neural.models.models import NeuralVectorField, Rescaling_MLP from ott.neural.solvers.flows import ( BaseFlow, BrownianNoiseFlow, @@ -149,3 +149,49 @@ def test_flow_matching_conditional( result_backward = fm.transport(target, condition=condition, forward=False) assert isinstance(result_backward, jax.Array) assert jnp.sum(jnp.isnan(result_backward)) == 0 + + @pytest.mark.parametrize("conditional", [True, False]) + def test_flow_matching_learn_rescaling( + self, conditional: bool, data_loader_gaussian: Iterator, + data_loader_gaussian_conditional: Iterator + ): + data_loader = data_loader_gaussian_conditional if conditional else data_loader_gaussian + neural_vf = NeuralVectorField( + output_dim=2, + condition_dim=0, + latent_embed_dim=5, + ) + ot_solver = sinkhorn.Sinkhorn() + time_sampler = UniformSampler() + flow = ConstantNoiseFlow(1.0) + optimizer = optax.adam(learning_rate=1e-3) + + tau_a = 0.9 + tau_b = 0.2 + mlp_eta = Rescaling_MLP(hidden_dim=4, cond_dim=0) + mlp_xi = Rescaling_MLP(hidden_dim=4, cond_dim=0) + fm = OTFlowMatching( + neural_vf, + input_dim=2, + cond_dim=0, + iterations=3, + valid_freq=2, + ot_solver=ot_solver, + flow=flow, + time_sampler=time_sampler, + optimizer=optimizer, + tau_a=tau_a, + tau_b=tau_b, + mlp_eta=mlp_eta, + mlp_xi=mlp_xi, + ) + fm(data_loader, data_loader) + + source, target, condition = next(data_loader_gaussian) + result_eta = fm.evaluate_eta(source, condition=condition) + assert isinstance(result_eta, jax.Array) + assert jnp.sum(jnp.isnan(result_eta)) == 0 + + result_xi = fm.evaluate_xi(target, condition=condition) + assert isinstance(result_xi, jax.Array) + assert jnp.sum(jnp.isnan(result_xi)) == 0 From 62b266655482701b5cb28b8e1520dc3956873041 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 26 Nov 2023 19:38:58 +0100 Subject: [PATCH 025/186] [ci skip] partially fix rescaling factor learning --- src/ott/neural/models/base_models.py | 2 +- src/ott/neural/models/models.py | 18 +++++++++--------- src/ott/neural/solvers/base_solver.py | 21 +++++++++++++++------ src/ott/neural/solvers/flows.py | 3 +++ src/ott/neural/solvers/genot.py | 2 ++ src/ott/neural/solvers/otfm.py | 4 +++- tests/neural/genot_test.py | 8 +++++++- tests/neural/otfm_test.py | 11 +++++++---- 8 files changed, 47 insertions(+), 22 deletions(-) diff --git a/src/ott/neural/models/base_models.py b/src/ott/neural/models/base_models.py index daf161abf..74a87df93 100644 --- a/src/ott/neural/models/base_models.py +++ b/src/ott/neural/models/base_models.py @@ -36,7 +36,7 @@ def __call__( class BaseRescalingNet(nn.Module, abc.ABC): @abc.abstractmethod - def __call___( + def __call__( self, x: jax.Array, condition: Optional[jax.Array] = None ) -> jax.Array: pass diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index ea191d99d..62edee7ef 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -535,8 +535,8 @@ def create_train_state( class Rescaling_MLP(BaseRescalingNet): hidden_dim: int - cond_dim: int - is_potential: bool = False + condition_dim: int + num_layers_per_block: int = 3 act_fn: Callable[[jax.Array], jax.Array] = nn.selu @nn.compact @@ -544,8 +544,8 @@ def __call__( self, x: jax.Array, condition: Optional[jax.Array] ) -> jax.Array: # noqa: D102 x = Block( - dim=self.latent_embed_dim, - out_dim=self.latent_embed_dim, + dim=self.hidden_dim, + out_dim=self.hidden_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn )( @@ -553,8 +553,8 @@ def __call__( ) if self.condition_dim > 0: condition = Block( - dim=self.condition_embed_dim, - out_dim=self.condition_embed_dim, + dim=self.hidden_dim, + out_dim=self.hidden_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn )( @@ -565,8 +565,8 @@ def __call__( concatenated = x out = Block( - dim=self.joint_hidden_dim, - out_dim=self.joint_hidden_dim, + dim=self.hidden_dim, + out_dim=self.hidden_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn, )( @@ -582,7 +582,7 @@ def create_train_state( input_dim: int, ) -> train_state.TrainState: params = self.init( - rng, jnp.ones((1, input_dim)), jnp.ones((1, self.cond_dim)) + rng, jnp.ones((1, input_dim)), jnp.ones((1, self.condition_dim)) )["params"] return train_state.TrainState.create( apply_fn=self.apply, params=params, tx=optimizer diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index e18f0db96..996f2fe0e 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -245,6 +245,7 @@ class UnbalancednessMixin: def __init__( self, + rng: jax.Array, source_dim: int, target_dim: int, cond_dim: Optional[int], @@ -261,6 +262,7 @@ def __init__( sinkhorn_kwargs: Mapping[str, Any] = MappingProxyType({}), **_: Any, ) -> None: + self.rng_unbalanced = rng self.source_dim = source_dim self.target_dim = target_dim self.cond_dim = cond_dim @@ -325,14 +327,17 @@ def _resample_unbalanced( return tuple(b[indices] if b is not None else None for b in batch) def _setup(self, source_dim: int, target_dim: int, cond_dim: int): - self.unbalancedness_step_fn = self._get_step_fn() + self.rng_unbalanced, rng_eta, rng_xi = jax.random.split( + self.rng_unbalanced, 3 + ) + self.unbalancedness_step_fn = self._get_rescaling_step_fn() if self.mlp_eta is not None: self.opt_eta = ( self.opt_eta if self.opt_eta is not None else optax.adamw(learning_rate=1e-4, weight_decay=1e-10) ) self.state_eta = self.mlp_eta.create_train_state( - self._key, self.opt_eta, source_dim + cond_dim + rng_eta, self.opt_eta, source_dim + cond_dim ) if self.mlp_xi is not None: self.opt_xi = ( @@ -340,19 +345,20 @@ def _setup(self, source_dim: int, target_dim: int, cond_dim: int): optax.adamw(learning_rate=1e-4, weight_decay=1e-10) ) self.state_xi = self.mlp_xi.create_train_state( - self._key, self.opt_xi, target_dim + cond_dim + rng_xi, self.opt_xi, target_dim + cond_dim ) - def _get_step_fn(self) -> Callable: # type:ignore[type-arg] + def _get_rescaling_step_fn(self) -> Callable: # type:ignore[type-arg] def loss_a_fn( params_eta: Optional[jax.Array], apply_fn_eta: Callable[[Dict[str, jax.Array], jax.Array], jax.Array], x: jax.Array, + condition: Optional[jax.Array], a: jax.Array, expectation_reweighting: float, ) -> Tuple[float, jax.Array]: - eta_predictions = apply_fn_eta({"params": params_eta}, x) + eta_predictions = apply_fn_eta({"params": params_eta}, x, condition) return ( optax.l2_loss(eta_predictions[:, 0], a).mean() + optax.l2_loss(jnp.mean(eta_predictions) - expectation_reweighting), @@ -363,10 +369,11 @@ def loss_b_fn( params_xi: Optional[jax.Array], apply_fn_xi: Callable[[Dict[str, jax.Array], jax.Array], jax.Array], x: jax.Array, + condition: Optional[jax.Array], b: jax.Array, expectation_reweighting: float, ) -> Tuple[float, jax.Array]: - xi_predictions = apply_fn_xi({"params": params_xi}, x) + xi_predictions = apply_fn_xi({"params": params_xi}, x, condition) return ( optax.l2_loss(xi_predictions[:, 0], b).mean() + optax.l2_loss(jnp.mean(xi_predictions) - expectation_reweighting), @@ -397,6 +404,7 @@ def step_fn( state_eta.params, state_eta.apply_fn, input_source, + condition, a * len(a), jnp.sum(b), ) @@ -412,6 +420,7 @@ def step_fn( state_xi.params, state_xi.apply_fn, input_target, + condition, b * len(b), jnp.sum(a), ) diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py index 148c7b188..6450e2c1b 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/solvers/flows.py @@ -176,6 +176,9 @@ class UniformSampler(BaseTimeSampler): high: Upper bound of the uniform distribution. """ + def __init__(self, low: float = 0.0, high: float = 1.0) -> None: + super().__init__(low=low, high=high) + def __call__(self, rng: jax.Array, num_samples: int) -> jax.Array: """Generate `num_samples` samples of the time `math`:t:. diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 00b01cb6f..55888d878 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -120,12 +120,14 @@ def __init__( Any]] = None, rng: random.PRNGKeyArray = random.PRNGKey(0), ) -> None: + rng, rng_unbalanced = random.split(rng) BaseNeuralSolver.__init__( self, iterations=iterations, valid_freq=valid_freq ) ResampleMixin.__init__(self) UnbalancednessMixin.__init__( self, + rng=rng_unbalanced, source_dim=input_dim, target_dim=input_dim, cond_dim=cond_dim, diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 1ca5b0b0e..74392b2c5 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -93,12 +93,14 @@ def __init__( Any]] = None, rng: random.PRNGKeyArray = random.PRNGKey(0), ) -> None: + rng, rng_unbalanced = random.split(rng) BaseNeuralSolver.__init__( self, iterations=iterations, valid_freq=valid_freq ) ResampleMixin.__init__(self) UnbalancednessMixin.__init__( self, + rng=rng_unbalanced, source_dim=input_dim, target_dim=input_dim, cond_dim=cond_dim, @@ -204,7 +206,7 @@ def __call__(self, train_loader, valid_loader) -> None: ) if self.learn_rescaling: self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( - batch, tmat.sum(axis=1), tmat.sum(axis=0) + source=batch["source"], target=batch["target"], condition=batch["condition"], a=tmat.sum(axis=1), b=tmat.sum(axis=0), state_eta=self.state_eta, state_xi=self.state_xi, ) if iter % self.valid_freq == 0: self._valid_step(valid_loader, iter) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index e13eb0aaf..cba3f1ef7 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -18,7 +18,7 @@ import optax import pytest -from ott.neural.models.models import NeuralVectorField +from ott.neural.models.models import NeuralVectorField, Rescaling_MLP from ott.neural.solvers.flows import OffsetUniformSampler, UniformSampler from ott.neural.solvers.genot import GENOT from ott.solvers.linear import sinkhorn @@ -295,6 +295,8 @@ def test_genot_linear_learn_rescaling( ot_solver = sinkhorn.Sinkhorn() time_sampler = UniformSampler() optimizer = optax.adam(learning_rate=1e-3) + mlp_eta = Rescaling_MLP(hidden_dim=4, condition_dim=condition_dim) + mlp_xi = Rescaling_MLP(hidden_dim=4, condition_dim=condition_dim) genot = GENOT( neural_vf, input_dim=source_dim, @@ -305,6 +307,10 @@ def test_genot_linear_learn_rescaling( ot_solver=ot_solver, time_sampler=time_sampler, optimizer=optimizer, + tau_a=tau_a, + tau_b=tau_b, + mlp_eta=mlp_eta, + mlp_xi=mlp_xi, ) genot(data_loader, data_loader) diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index e26920253..9e83251e7 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -156,6 +156,9 @@ def test_flow_matching_learn_rescaling( data_loader_gaussian_conditional: Iterator ): data_loader = data_loader_gaussian_conditional if conditional else data_loader_gaussian + source, target, condition = next(data_loader) + source_dim = source.shape[1] + condition_dim = condition.shape[1] if conditional else 0 neural_vf = NeuralVectorField( output_dim=2, condition_dim=0, @@ -168,12 +171,12 @@ def test_flow_matching_learn_rescaling( tau_a = 0.9 tau_b = 0.2 - mlp_eta = Rescaling_MLP(hidden_dim=4, cond_dim=0) - mlp_xi = Rescaling_MLP(hidden_dim=4, cond_dim=0) + mlp_eta = Rescaling_MLP(hidden_dim=4, condition_dim=condition_dim) + mlp_xi = Rescaling_MLP(hidden_dim=4, condition_dim=condition_dim) fm = OTFlowMatching( neural_vf, - input_dim=2, - cond_dim=0, + input_dim=source_dim, + cond_dim=condition_dim, iterations=3, valid_freq=2, ot_solver=ot_solver, From 2ceceea191464d779a95f4ba6b65cc4ed3792595 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 26 Nov 2023 19:58:54 +0100 Subject: [PATCH 026/186] [ci skip] fix rescaling factor learning --- src/ott/neural/solvers/base_solver.py | 19 ++++++------------- src/ott/neural/solvers/genot.py | 22 +++++++++++++++++----- src/ott/neural/solvers/otfm.py | 8 +++++++- tests/neural/genot_test.py | 2 ++ tests/neural/otfm_test.py | 3 +-- 5 files changed, 33 insertions(+), 21 deletions(-) diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 996f2fe0e..b58710dfd 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -89,9 +89,7 @@ def _resample_data( ) -> Tuple[jax.Array, ...]: """Resample a batch according to coupling `tmat`.""" tmat_flattened = tmat.flatten() - indices = random.choice( - key, len(tmat_flattened), shape=[len(tmat_flattened)] - ) + indices = random.choice(key, len(tmat_flattened), shape=[tmat.shape[0]]) indices_source = indices // tmat.shape[1] indices_target = indices % tmat.shape[1] return tuple( @@ -337,7 +335,7 @@ def _setup(self, source_dim: int, target_dim: int, cond_dim: int): optax.adamw(learning_rate=1e-4, weight_decay=1e-10) ) self.state_eta = self.mlp_eta.create_train_state( - rng_eta, self.opt_eta, source_dim + cond_dim + rng_eta, self.opt_eta, source_dim ) if self.mlp_xi is not None: self.opt_xi = ( @@ -345,7 +343,7 @@ def _setup(self, source_dim: int, target_dim: int, cond_dim: int): optax.adamw(learning_rate=1e-4, weight_decay=1e-10) ) self.state_xi = self.mlp_xi.create_train_state( - rng_xi, self.opt_xi, target_dim + cond_dim + rng_xi, self.opt_xi, target_dim ) def _get_rescaling_step_fn(self) -> Callable: # type:ignore[type-arg] @@ -392,18 +390,13 @@ def step_fn( *, is_training: bool = True, ): - if condition is None: - input_source = source - input_target = target - else: - input_source = jnp.concatenate([source, condition], axis=-1) - input_target = jnp.concatenate([target, condition], axis=-1) if state_eta is not None: grad_a_fn = jax.value_and_grad(loss_a_fn, argnums=0, has_aux=True) + print(source.shape, (a * len(a)).shape) (loss_a, eta_predictions), grads_eta = grad_a_fn( state_eta.params, state_eta.apply_fn, - input_source, + source, condition, a * len(a), jnp.sum(b), @@ -419,7 +412,7 @@ def step_fn( (loss_b, xi_predictions), grads_xi = grad_b_fn( state_xi.params, state_xi.apply_fn, - input_target, + target, condition, b * len(b), jnp.sum(a), diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 55888d878..9cdab7a08 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -249,19 +249,25 @@ def __call__(self, train_loader, valid_loader) -> None: (batch["target"], batch["target_q"]), source_is_balanced=(self.tau_a == 1.0) ) + source = jnp.concatenate([ + batch[el] for el in ["source", "source_q"] if batch[el] is not None + ], + axis=1) + target = jnp.concatenate([ + batch[el] for el in ["target", "target_q"] if batch[el] is not None + ], + axis=1) + rng_latent = jax.random.split(rng_noise, batch_size * self.k_noise_per_x) if self.solver_latent_to_data is not None: - target = jnp.concatenate([ - batch[el] for el in ["target", "target_q"] if batch[el] is not None - ], - axis=1) tmats_latent_data = jnp.array( jax.vmap(self.match_latent_to_data_fn, 0, 0)(key=rng_latent, x=batch["latent"], y=target) ) if self.k_noise_per_x > 1: + raise NotImplementedError rng_latent_data_match = jax.random.split( rng_latent_data_match, batch_size ) @@ -290,7 +296,13 @@ def __call__(self, train_loader, valid_loader) -> None: ) if self.learn_rescaling: self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( - batch, tmat.sum(axis=1), tmat.sum(axis=0) + source=source, + target=target, + condition=batch["condition"], + a=tmat.sum(axis=1), + b=tmat.sum(axis=0), + state_eta=self.state_eta, + state_xi=self.state_xi, ) if iteration % self.valid_freq == 0: self._valid_step(valid_loader, iteration) diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 74392b2c5..25684016f 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -206,7 +206,13 @@ def __call__(self, train_loader, valid_loader) -> None: ) if self.learn_rescaling: self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( - source=batch["source"], target=batch["target"], condition=batch["condition"], a=tmat.sum(axis=1), b=tmat.sum(axis=0), state_eta=self.state_eta, state_xi=self.state_xi, + source=batch["source"], + target=batch["target"], + condition=batch["condition"], + a=tmat.sum(axis=1), + b=tmat.sum(axis=0), + state_eta=self.state_eta, + state_xi=self.state_xi, ) if iter % self.valid_freq == 0: self._valid_step(valid_loader, iter) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index cba3f1ef7..ea4280f8f 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -295,6 +295,8 @@ def test_genot_linear_learn_rescaling( ot_solver = sinkhorn.Sinkhorn() time_sampler = UniformSampler() optimizer = optax.adam(learning_rate=1e-3) + tau_a = 0.9 + tau_b = 0.2 mlp_eta = Rescaling_MLP(hidden_dim=4, condition_dim=condition_dim) mlp_xi = Rescaling_MLP(hidden_dim=4, condition_dim=condition_dim) genot = GENOT( diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 9e83251e7..1993432f2 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -150,7 +150,7 @@ def test_flow_matching_conditional( assert isinstance(result_backward, jax.Array) assert jnp.sum(jnp.isnan(result_backward)) == 0 - @pytest.mark.parametrize("conditional", [True, False]) + @pytest.mark.parametrize("conditional", [False, True]) def test_flow_matching_learn_rescaling( self, conditional: bool, data_loader_gaussian: Iterator, data_loader_gaussian_conditional: Iterator @@ -190,7 +190,6 @@ def test_flow_matching_learn_rescaling( ) fm(data_loader, data_loader) - source, target, condition = next(data_loader_gaussian) result_eta = fm.evaluate_eta(source, condition=condition) assert isinstance(result_eta, jax.Array) assert jnp.sum(jnp.isnan(result_eta)) == 0 From e8f8171aee55a7b167c4e86c9ed59ea517833c01 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Mon, 27 Nov 2023 12:04:36 +0100 Subject: [PATCH 027/186] [ci skip] all tests passing but k_samples_per_x in genot --- src/ott/neural/solvers/base_solver.py | 1 - src/ott/neural/solvers/genot.py | 67 +++++++++++---------- tests/neural/genot_test.py | 83 +++++++++++++++++---------- 3 files changed, 85 insertions(+), 66 deletions(-) diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index b58710dfd..80dc8e616 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -392,7 +392,6 @@ def step_fn( ): if state_eta is not None: grad_a_fn = jax.value_and_grad(loss_a_fn, argnums=0, has_aux=True) - print(source.shape, (a * len(a)).shape) (loss_a, eta_predictions), grads_eta = grad_a_fn( state_eta.params, state_eta.apply_fn, diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 9cdab7a08..cfb1b6158 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -218,15 +218,15 @@ def __call__(self, train_loader, valid_loader) -> None: """Train GENOT.""" batch: Dict[str, jnp.array] = {} for iteration in range(self.iterations): - batch["source"], batch["source_q"], batch["target"], batch[ + batch["source_lin"], batch["source_q"], batch["target_lin"], batch[ "target_q"], batch["condition"] = next(train_loader) self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn = jax.random.split( self.rng, 6 ) - batch_size = len(batch["source"]) if batch["source"] is not None else len( - batch["source_q"] - ) + batch_size = len( + batch["source_lin"] + ) if batch["source_lin"] is not None else len(batch["source_q"]) n_samples = batch_size * self.k_noise_per_x batch["time"] = self.time_sampler(rng_time, n_samples) batch["noise"] = self.sample_noise(rng_noise, n_samples) @@ -237,33 +237,36 @@ def __call__(self, train_loader, valid_loader) -> None: ) tmat = self.match_fn( - batch["source"], batch["source_q"], batch["target"], batch["target_q"] + batch["source_lin"], batch["source_q"], batch["target_lin"], + batch["target_q"] ) - (batch["source"], batch["source_q"], batch["condition"] - ), (batch["target"], - batch["target_q"]) = self._sample_conditional_indices_from_tmap( - rng_resample, - tmat, - self.k_noise_per_x, - (batch["source"], batch["source_q"], batch["condition"]), - (batch["target"], batch["target_q"]), - source_is_balanced=(self.tau_a == 1.0) - ) - source = jnp.concatenate([ - batch[el] for el in ["source", "source_q"] if batch[el] is not None + + batch["source"] = jnp.concatenate([ + batch[el] + for el in ["source_lin", "source_q"] + if batch[el] is not None ], - axis=1) - target = jnp.concatenate([ - batch[el] for el in ["target", "target_q"] if batch[el] is not None + axis=1) + batch["target"] = jnp.concatenate([ + batch[el] + for el in ["target_lin", "target_q"] + if batch[el] is not None ], - axis=1) - + axis=1) + (batch["source"], batch["condition"] + ), (batch["target"],) = self._sample_conditional_indices_from_tmap( + rng_resample, + tmat, + self.k_noise_per_x, (batch["source"], batch["condition"]), + (batch["target"],), + source_is_balanced=(self.tau_a == 1.0) + ) rng_latent = jax.random.split(rng_noise, batch_size * self.k_noise_per_x) if self.solver_latent_to_data is not None: tmats_latent_data = jnp.array( jax.vmap(self.match_latent_to_data_fn, 0, - 0)(key=rng_latent, x=batch["latent"], y=target) + 0)(key=rng_latent, x=batch["latent"], y=batch["target"]) ) if self.k_noise_per_x > 1: @@ -296,8 +299,8 @@ def __call__(self, train_loader, valid_loader) -> None: ) if self.learn_rescaling: self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( - source=source, - target=target, + source=batch["source"], + target=batch["target"], condition=batch["condition"], a=tmat.sum(axis=1), b=tmat.sum(axis=0), @@ -329,27 +332,23 @@ def loss_fn( params: jax.Array, batch: Dict[str, jnp.array], keys_model: random.PRNGKeyArray ): - target = jnp.concatenate([ - batch[el] for el in ["target", "target_q"] if batch[el] is not None - ], - axis=1) x_t = self.flow.compute_xt( - batch["noise"], batch["time"], batch["latent"], target + batch["noise"], batch["time"], batch["latent"], batch["target"] ) apply_fn = functools.partial( state_neural_vector_field.apply_fn, {"params": params} ) cond_input = jnp.concatenate([ - batch[el] - for el in ["source", "source_q", "condition"] - if batch[el] is not None + batch[el] for el in ["source", "condition"] if batch[el] is not None ], axis=1) v_t = jax.vmap(apply_fn)( t=batch["time"], x=x_t, condition=cond_input, keys_model=keys_model ) - u_t = self.flow.compute_ut(batch["time"], batch["latent"], target) + u_t = self.flow.compute_ut( + batch["time"], batch["latent"], batch["target"] + ) return jnp.mean((v_t - u_t) ** 2) keys_model = random.split(key, len(batch["noise"])) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index ea4280f8f..be2aa7a3c 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -18,6 +18,7 @@ import optax import pytest +from ott.geometry import costs from ott.neural.models.models import NeuralVectorField, Rescaling_MLP from ott.neural.solvers.flows import OffsetUniformSampler, UniformSampler from ott.neural.solvers.genot import GENOT @@ -41,7 +42,7 @@ def test_genot_linear_unconditional( neural_vf = NeuralVectorField( output_dim=target_dim, - condition_dim=condition_dim, + condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() @@ -55,8 +56,11 @@ def test_genot_linear_unconditional( iterations=3, valid_freq=2, ot_solver=ot_solver, - time_sampler=time_sampler, + epsilon=0.1, + cost_fn=costs.SqEuclidean(), + scale_cost=1.0, optimizer=optimizer, + time_sampler=time_sampler, k_samples_per_x=k_noise_per_x, ) genot(genot_data_loader_linear, genot_data_loader_linear) @@ -82,7 +86,7 @@ def test_genot_quad_unconditional( condition_dim = 0 neural_vf = NeuralVectorField( output_dim=target_dim, - condition_dim=condition_dim, + condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) @@ -93,12 +97,14 @@ def test_genot_quad_unconditional( input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, - epsilon=None, iterations=3, valid_freq=2, ot_solver=ot_solver, - time_sampler=time_sampler, + epsilon=None, + cost_fn=costs.SqEuclidean(), + scale_cost=1.0, optimizer=optimizer, + time_sampler=time_sampler, k_samples_per_x=k_noise_per_x, ) genot(genot_data_loader_quad, genot_data_loader_quad) @@ -121,11 +127,11 @@ def test_genot_fused_unconditional( condition_dim = 0 neural_vf = NeuralVectorField( output_dim=target_dim, - condition_dim=condition_dim, + condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) - time_sampler = UniformSampler() + UniformSampler() optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -136,7 +142,8 @@ def test_genot_fused_unconditional( iterations=3, valid_freq=2, ot_solver=ot_solver, - time_sampler=time_sampler, + cost_fn=costs.SqEuclidean(), + scale_cost=1.0, optimizer=optimizer, fused_penalty=0.5, k_samples_per_x=k_noise_per_x, @@ -144,7 +151,9 @@ def test_genot_fused_unconditional( genot(genot_data_loader_fused, genot_data_loader_fused) result_forward = genot.transport( - source_quad, condition=condition, forward=True + jnp.concatenate((source_lin, source_quad), axis=1), + condition=condition, + forward=True ) assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -176,8 +185,11 @@ def test_genot_linear_conditional( iterations=3, valid_freq=2, ot_solver=ot_solver, - time_sampler=time_sampler, + epsilon=0.1, + cost_fn=costs.SqEuclidean(), + scale_cost=1.0, optimizer=optimizer, + time_sampler=time_sampler, k_samples_per_x=k_noise_per_x, ) genot( @@ -196,17 +208,17 @@ def test_genot_linear_conditional( @pytest.mark.parametrize("k_noise_per_x", [1, 2]) def test_genot_quad_conditional( - self, genot_data_loader_quad: Iterator, k_noise_per_x: int + self, genot_data_loader_quad_conditional: Iterator, k_noise_per_x: int ): source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_quad + genot_data_loader_quad_conditional ) source_dim = source_quad.shape[1] target_dim = target_quad.shape[1] condition_dim = condition.shape[1] neural_vf = NeuralVectorField( output_dim=target_dim, - condition_dim=condition_dim, + condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) @@ -217,15 +229,19 @@ def test_genot_quad_conditional( input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, - epsilon=None, iterations=3, valid_freq=2, ot_solver=ot_solver, - time_sampler=time_sampler, + epsilon=None, + cost_fn=costs.SqEuclidean(), + scale_cost=1.0, optimizer=optimizer, + time_sampler=time_sampler, k_samples_per_x=k_noise_per_x, ) - genot(genot_data_loader_quad, genot_data_loader_quad) + genot( + genot_data_loader_quad_conditional, genot_data_loader_quad_conditional + ) result_forward = genot.transport( source_quad, condition=condition, forward=True @@ -235,17 +251,17 @@ def test_genot_quad_conditional( @pytest.mark.parametrize("k_noise_per_x", [1, 2]) def test_genot_fused_conditional( - self, genot_data_loader_fused: Iterator, k_noise_per_x: int + self, genot_data_loader_fused_conditional: Iterator, k_noise_per_x: int ): source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_fused + genot_data_loader_fused_conditional ) source_dim = source_lin.shape[1] + source_quad.shape[1] target_dim = target_lin.shape[1] + target_quad.shape[1] condition_dim = condition.shape[1] neural_vf = NeuralVectorField( output_dim=target_dim, - condition_dim=condition_dim, + condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) @@ -256,19 +272,24 @@ def test_genot_fused_conditional( input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, - epsilon=None, iterations=3, valid_freq=2, ot_solver=ot_solver, - time_sampler=time_sampler, + epsilon=None, + cost_fn=costs.SqEuclidean(), + scale_cost=1.0, optimizer=optimizer, - fused_penalty=0.5, + time_sampler=time_sampler, k_samples_per_x=k_noise_per_x, ) - genot(genot_data_loader_fused, genot_data_loader_fused) + genot( + genot_data_loader_fused_conditional, genot_data_loader_fused_conditional + ) result_forward = genot.transport( - source_quad, condition=condition, forward=True + jnp.concatenate((source_lin, source_quad), axis=1), + condition=condition, + forward=True ) assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -281,7 +302,7 @@ def test_genot_linear_learn_rescaling( data_loader = genot_data_loader_linear_conditional if conditional else genot_data_loader_linear source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_linear + data_loader ) source_dim = source_lin.shape[1] target_dim = target_lin.shape[1] @@ -289,7 +310,7 @@ def test_genot_linear_learn_rescaling( neural_vf = NeuralVectorField( output_dim=target_dim, - condition_dim=condition_dim, + condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() @@ -307,18 +328,18 @@ def test_genot_linear_learn_rescaling( iterations=3, valid_freq=2, ot_solver=ot_solver, - time_sampler=time_sampler, + epsilon=0.1, + cost_fn=costs.SqEuclidean(), + scale_cost=1.0, optimizer=optimizer, + time_sampler=time_sampler, tau_a=tau_a, tau_b=tau_b, mlp_eta=mlp_eta, mlp_xi=mlp_xi, ) - genot(data_loader, data_loader) - source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_linear - ) + genot(data_loader, data_loader) result_eta = genot.evaluate_eta(source_lin, condition=condition) assert isinstance(result_eta, jax.Array) From add1348cec204d6ffb7162fda8301b74ed145761 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Mon, 27 Nov 2023 15:37:23 +0100 Subject: [PATCH 028/186] k_samples_per_x working in GENOT --- src/ott/neural/solvers/base_solver.py | 29 ++++++++----- src/ott/neural/solvers/genot.py | 50 ++++++++++------------ tests/neural/genot_test.py | 61 ++++++++++++++++++--------- 3 files changed, 82 insertions(+), 58 deletions(-) diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 80dc8e616..2c8f7541e 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -22,7 +22,7 @@ from flax.training import train_state from jax import random -from ott.geometry import pointcloud +from ott.geometry import costs, pointcloud from ott.geometry.pointcloud import PointCloud from ott.neural.models import models from ott.problems.linear import linear_problem @@ -108,6 +108,7 @@ def _sample_conditional_indices_from_tmap( *, source_is_balanced: bool, ) -> Tuple[jnp.array, jnp.array]: + batch_size = tmat.shape[0] left_marginals = tmat.sum(axis=1) if not source_is_balanced: key, key2 = jax.random.split(key, 2) @@ -118,12 +119,12 @@ def _sample_conditional_indices_from_tmap( shape=(len(left_marginals),) ) else: - indices = jnp.arange(tmat.shape[0]) + indices = jnp.arange(batch_size) tmat_adapted = tmat[indices] indices_per_row = jax.vmap( lambda tmat_adapted: jax.random.choice( key=key, - a=jnp.arange(tmat.shape[1]), + a=jnp.arange(batch_size), p=tmat_adapted, shape=(k_samples_per_x,) ), @@ -134,21 +135,27 @@ def _sample_conditional_indices_from_tmap( ) indices_source = jnp.repeat(indices, k_samples_per_x) - indices_target = indices_per_row % tmat.shape[1] + indices_target = jnp.reshape( + indices_per_row % tmat.shape[1], (batch_size * k_samples_per_x,) + ) return tuple( - b[indices_source, :] if b is not None else None for b in source_arrays + jnp.reshape(b[indices_source], (k_samples_per_x, batch_size, + -1)) if b is not None else None + for b in source_arrays ), tuple( - b[indices_target, :] if b is not None else None for b in target_arrays + jnp.reshape(b[indices_target, :], (k_samples_per_x, batch_size, + -1)) if b is not None else None + for b in target_arrays ) def _get_sinkhorn_match_fn( self, ot_solver: Any, - epsilon: float, - cost_fn: str, - scale_cost: Any, - tau_a: float, - tau_b: float, + epsilon: float = 1e-2, + cost_fn: Any = costs.SqEuclidean(), + scale_cost: Any = "mean", + tau_a: float = 1.0, + tau_b: float = 1.0, *, filter_input: bool = False, ) -> Callable: diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index cfb1b6158..377ef033d 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -137,7 +137,6 @@ def __init__( mlp_xi=mlp_xi, unbalanced_kwargs=unbalanced_kwargs, ) - if isinstance( ot_solver, gromov_wasserstein.GromovWasserstein ) and epsilon is not None: @@ -161,7 +160,7 @@ def __init__( self.input_dim = input_dim self.output_dim = output_dim self.cond_dim = cond_dim - self.k_noise_per_x = k_samples_per_x + self.k_samples_per_x = k_samples_per_x # OT data-data matching parameters self.ot_solver = ot_solver @@ -175,7 +174,7 @@ def __init__( self.kwargs_solver_latent_to_data = kwargs_solver_latent_to_data # callback parameteres - self.callbac_fn = callback_fn + self.callback_fn = callback_fn self.setup() def setup(self) -> None: @@ -227,13 +226,11 @@ def __call__(self, train_loader, valid_loader) -> None: batch_size = len( batch["source_lin"] ) if batch["source_lin"] is not None else len(batch["source_q"]) - n_samples = batch_size * self.k_noise_per_x + n_samples = batch_size * self.k_samples_per_x batch["time"] = self.time_sampler(rng_time, n_samples) batch["noise"] = self.sample_noise(rng_noise, n_samples) batch["latent"] = self.latent_noise_fn( - rng_noise, - shape=(batch_size, self.k_noise_per_x) if self.k_noise_per_x > 1 else - (batch_size,) + rng_noise, shape=(self.k_samples_per_x, batch_size) ) tmat = self.match_fn( @@ -253,43 +250,40 @@ def __call__(self, train_loader, valid_loader) -> None: if batch[el] is not None ], axis=1) + + batch = { + k: v + for k, v in batch.items() + if k in ["source", "target", "condition", "time", "noise", "latent"] + } + (batch["source"], batch["condition"] ), (batch["target"],) = self._sample_conditional_indices_from_tmap( rng_resample, tmat, - self.k_noise_per_x, (batch["source"], batch["condition"]), + self.k_samples_per_x, (batch["source"], batch["condition"]), (batch["target"],), source_is_balanced=(self.tau_a == 1.0) ) - rng_latent = jax.random.split(rng_noise, batch_size * self.k_noise_per_x) + jax.random.split(rng_noise, batch_size * self.k_samples_per_x) if self.solver_latent_to_data is not None: tmats_latent_data = jnp.array( jax.vmap(self.match_latent_to_data_fn, 0, - 0)(key=rng_latent, x=batch["latent"], y=batch["target"]) + 0)(x=batch["latent"], y=batch["target"]) ) - if self.k_noise_per_x > 1: - raise NotImplementedError rng_latent_data_match = jax.random.split( - rng_latent_data_match, batch_size + rng_latent_data_match, self.k_samples_per_x + ) + (batch["source"], batch["condition"] + ), (batch["target"],) = jax.vmap(self._resample_data, 0, 0)( + rng_latent_data_match, tmats_latent_data, + (batch["source"], batch["condition"]), (batch["target"],) ) - (batch["source"], batch["source_q"], batch["condition"] - ), (batch["target"], - batch["target_q"]) = jax.vmap(self._resample_data, 0, 0)( - rng_latent_data_match, tmats_latent_data, - (batch["source"], batch["source_q"], batch["condition"]), - (batch["target"], batch["target_q"]) - ) - #(batch["source"], batch["source_q"], batch["condition"] - #), (batch["target"], batch["target_q"]) = self._resample_data( - # rng_latent_data_match, tmat_latent_data, - # (batch["source"], batch["source_q"], batch["condition"]), - # (batch["target"], batch["target_q"]) - #) batch = { key: - jnp.reshape(arr, (batch_size * self.k_noise_per_x, + jnp.reshape(arr, (batch_size * self.k_samples_per_x, -1)) if arr is not None else None for key, arr in batch.items() } @@ -374,7 +368,7 @@ def transport( :attr:`~ott.neural.flows.BaseTimeSampler.low` to :attr:`~ott.neural.flows.BaseTimeSampler.high`. Args: - data: Initial condition of the ODE. + source: Data to transport. condition: Condition of the input data. rng: random seed for sampling from the latent distribution. forward: If `True` integrates forward, otherwise backwards. diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index be2aa7a3c..0c4abb55e 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Iterator +from typing import Iterator, Optional import jax import jax.numpy as jnp @@ -29,10 +29,14 @@ class TestGENOT: #TODO: add tests for unbalancedness - @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + @pytest.mark.parametrize("k_samples_per_x", [1, 2]) + @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_linear_unconditional( - self, genot_data_loader_linear: Iterator, k_noise_per_x: int + self, genot_data_loader_linear: Iterator, k_samples_per_x: int, + solver_latent_to_data: Optional[str] ): + solver_latent_to_data = None if solver_latent_to_data is None else sinkhorn.Sinkhorn( + ) source_lin, source_quad, target_lin, target_quad, condition = next( genot_data_loader_linear ) @@ -61,7 +65,8 @@ def test_genot_linear_unconditional( scale_cost=1.0, optimizer=optimizer, time_sampler=time_sampler, - k_samples_per_x=k_noise_per_x, + k_samples_per_x=k_samples_per_x, + solver_latent_to_data=solver_latent_to_data, ) genot(genot_data_loader_linear, genot_data_loader_linear) @@ -74,10 +79,13 @@ def test_genot_linear_unconditional( assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 - @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + @pytest.mark.parametrize("k_samples_per_x", [1, 2]) + @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_quad_unconditional( - self, genot_data_loader_quad: Iterator, k_noise_per_x: int + self, genot_data_loader_quad: Iterator, k_samples_per_x: int, + solver_latent_to_data: Optional[str] ): + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() source_lin, source_quad, target_lin, target_quad, condition = next( genot_data_loader_quad ) @@ -105,7 +113,7 @@ def test_genot_quad_unconditional( scale_cost=1.0, optimizer=optimizer, time_sampler=time_sampler, - k_samples_per_x=k_noise_per_x, + k_samples_per_x=k_samples_per_x, ) genot(genot_data_loader_quad, genot_data_loader_quad) @@ -115,10 +123,13 @@ def test_genot_quad_unconditional( assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 - @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + @pytest.mark.parametrize("k_samples_per_x", [1, 2]) + @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_fused_unconditional( - self, genot_data_loader_fused: Iterator, k_noise_per_x: int + self, genot_data_loader_fused: Iterator, k_samples_per_x: int, + solver_latent_to_data: Optional[str] ): + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() source_lin, source_quad, target_lin, target_quad, condition = next( genot_data_loader_fused ) @@ -146,7 +157,7 @@ def test_genot_fused_unconditional( scale_cost=1.0, optimizer=optimizer, fused_penalty=0.5, - k_samples_per_x=k_noise_per_x, + k_samples_per_x=k_samples_per_x, ) genot(genot_data_loader_fused, genot_data_loader_fused) @@ -158,10 +169,13 @@ def test_genot_fused_unconditional( assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 - @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + @pytest.mark.parametrize("k_samples_per_x", [1, 2]) + @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_linear_conditional( - self, genot_data_loader_linear_conditional: Iterator, k_noise_per_x: int + self, genot_data_loader_linear_conditional: Iterator, + k_samples_per_x: int, solver_latent_to_data: Optional[str] ): + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() source_lin, source_quad, target_lin, target_quad, condition = next( genot_data_loader_linear_conditional ) @@ -190,7 +204,7 @@ def test_genot_linear_conditional( scale_cost=1.0, optimizer=optimizer, time_sampler=time_sampler, - k_samples_per_x=k_noise_per_x, + k_samples_per_x=k_samples_per_x, ) genot( genot_data_loader_linear_conditional, @@ -206,10 +220,13 @@ def test_genot_linear_conditional( assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 - @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + @pytest.mark.parametrize("k_samples_per_x", [1, 2]) + @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_quad_conditional( - self, genot_data_loader_quad_conditional: Iterator, k_noise_per_x: int + self, genot_data_loader_quad_conditional: Iterator, k_samples_per_x: int, + solver_latent_to_data: Optional[str] ): + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() source_lin, source_quad, target_lin, target_quad, condition = next( genot_data_loader_quad_conditional ) @@ -237,7 +254,7 @@ def test_genot_quad_conditional( scale_cost=1.0, optimizer=optimizer, time_sampler=time_sampler, - k_samples_per_x=k_noise_per_x, + k_samples_per_x=k_samples_per_x, ) genot( genot_data_loader_quad_conditional, genot_data_loader_quad_conditional @@ -249,10 +266,13 @@ def test_genot_quad_conditional( assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 - @pytest.mark.parametrize("k_noise_per_x", [1, 2]) + @pytest.mark.parametrize("k_samples_per_x", [1, 2]) + @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_fused_conditional( - self, genot_data_loader_fused_conditional: Iterator, k_noise_per_x: int + self, genot_data_loader_fused_conditional: Iterator, k_samples_per_x: int, + solver_latent_to_data: Optional[str] ): + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() source_lin, source_quad, target_lin, target_quad, condition = next( genot_data_loader_fused_conditional ) @@ -280,7 +300,7 @@ def test_genot_fused_conditional( scale_cost=1.0, optimizer=optimizer, time_sampler=time_sampler, - k_samples_per_x=k_noise_per_x, + k_samples_per_x=k_samples_per_x, ) genot( genot_data_loader_fused_conditional, genot_data_loader_fused_conditional @@ -295,10 +315,13 @@ def test_genot_fused_conditional( assert jnp.sum(jnp.isnan(result_forward)) == 0 @pytest.mark.parametrize("conditional", [False, True]) + @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_linear_learn_rescaling( self, conditional: bool, genot_data_loader_linear: Iterator, + solver_latent_to_data: Optional[str], genot_data_loader_linear_conditional: Iterator ): + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() data_loader = genot_data_loader_linear_conditional if conditional else genot_data_loader_linear source_lin, source_quad, target_lin, target_quad, condition = next( From 993d1de2f9f982e1571af8f6c79bcc6806f5214b Mon Sep 17 00:00:00 2001 From: lucaeyring Date: Mon, 27 Nov 2023 20:16:22 +0100 Subject: [PATCH 029/186] [ci skip] changed dataloaders to numpy and dict return --- src/ott/neural/models/models.py | 1 + src/ott/neural/solvers/base_solver.py | 2 +- src/ott/neural/solvers/otfm.py | 69 +++++-- tests/neural/conftest.py | 248 ++++++++++++++++---------- tests/neural/otfm_test.py | 28 +-- 5 files changed, 219 insertions(+), 129 deletions(-) diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 62edee7ef..6bb075ff3 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -552,6 +552,7 @@ def __call__( x ) if self.condition_dim > 0: + condition = jnp.atleast_1d(condition) condition = Block( dim=self.hidden_dim, out_dim=self.hidden_dim, diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 2c8f7541e..e7216da46 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -447,7 +447,7 @@ def evaluate_eta( """ if self.state_eta is None: raise ValueError("The left rescaling factor was not parameterized.") - return self.state_xi.apply_fn({"params": self.state_eta.params}, + return self.state_eta.apply_fn({"params": self.state_eta.params}, x=source, condition=condition) diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 25684016f..6e6f419ca 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from collections import defaultdict import functools import types from typing import Any, Callable, Dict, Mapping, Optional, Tuple, Type @@ -35,6 +36,7 @@ BaseTimeSampler, ) from ott.solvers import was_solver +from ott.tools.sinkhorn_divergence import sinkhorn_divergence __all__ = ["OTFlowMatching"] @@ -63,6 +65,7 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): mlp_xi: Neural network to learn the right rescaling function as suggested in :cite:`TODO`. If `None`, the right rescaling factor is not learnt. unbalanced_kwargs: Keyword arguments for the unbalancedness solver. callback_fn: Callback function. + num_eval_samples: Number of samples to evaluate on during evaluation. rng: Random number generator. Returns: @@ -76,7 +79,6 @@ def __init__( input_dim: int, cond_dim: int, iterations: int, - valid_freq: int, ot_solver: Optional[Type[was_solver.WassersteinSolver]], flow: Type[BaseFlow], time_sampler: Type[BaseTimeSampler], @@ -91,6 +93,9 @@ def __init__( unbalanced_kwargs: Dict[str, Any] = {}, callback_fn: Optional[Callable[[jax.Array, jax.Array, jax.Array], Any]] = None, + logging_freq: int = 100, + valid_freq: int = 5000, + num_eval_samples: int = 1000, rng: random.PRNGKeyArray = random.PRNGKey(0), ) -> None: rng, rng_unbalanced = random.split(rng) @@ -122,6 +127,9 @@ def __init__( self.callback_fn = callback_fn self.checkpoint_manager = checkpoint_manager self.rng = rng + self.logging_freq = logging_freq + self.num_eval_samples = num_eval_samples + self._training_logs: Mapping[str, Any] = defaultdict(list) self.setup() @@ -146,6 +154,7 @@ def setup(self) -> None: def _get_step_fn(self) -> Callable: + @jax.jit def step_fn( key: random.PRNGKeyArray, state_neural_vector_field: train_state.TrainState, @@ -157,17 +166,17 @@ def loss_fn( batch: Dict[str, jax.Array], keys_model: random.PRNGKeyArray ) -> jax.Array: - x_t = self.flow.compute_xt(noise, t, batch["source"], batch["target"]) + x_t = self.flow.compute_xt(noise, t, batch["source_lin"], batch["target_lin"]) apply_fn = functools.partial( state_neural_vector_field.apply_fn, {"params": params} ) v_t = jax.vmap(apply_fn)( - t=t, x=x_t, condition=batch["condition"], keys_model=keys_model + t=t, x=x_t, condition=batch["source_conditions"], keys_model=keys_model ) - u_t = self.flow.compute_ut(t, batch["source"], batch["target"]) + u_t = self.flow.compute_ut(t, batch["source_lin"], batch["target_lin"]) return jnp.mean((v_t - u_t) ** 2) - batch_size = len(batch["source"]) + batch_size = len(batch["source_lin"]) key_noise, key_t, key_model = random.split(key, 3) keys_model = random.split(key_model, batch_size) t = self.time_sampler(key_t, batch_size) @@ -191,24 +200,50 @@ def __call__(self, train_loader, valid_loader) -> None: None """ batch: Mapping[str, jax.Array] = {} + curr_loss = 0.0 + """ + if self.num_eval_samples > 0: + eval_batch_source, eval_batch_target = [], [] + for iter in range(self.num_eval_samples): + batch = next( + valid_loader + ) + eval_batch_source.append(batch["source_lin"]) + eval_batch_target.append(batch["target_lin"]) + eval_batch_source = jnp.stack(eval_batch_source) + eval_batch_target = jnp.stack(eval_batch_target) + self._training_logs["data_sink_div"].append( + sinkhorn_divergence( + eval_batch_source, + eval_batch_target, + self.epsilon, + self.cost_fn, + self.scale_cost, + ) + )""" + for iter in range(self.iterations): rng_resample, rng_step_fn, self.rng = random.split(self.rng, 3) - batch["source"], batch["target"], batch["condition"] = next(train_loader) + batch = next(train_loader) if self.ot_solver is not None: - tmat = self.match_fn(batch["source"], batch["target"]) - (batch["source"], - batch["condition"]), (batch["target"],) = self._resample_data( - rng_resample, tmat, (batch["source"], batch["condition"]), - (batch["target"],) + tmat = self.match_fn(batch["source_lin"], batch["target_lin"]) + (batch["source_lin"], + batch["source_conditions"]), (batch["target_lin"], batch["target_conditions"]) = self._resample_data( + rng_resample, tmat, (batch["source_lin"], batch["source_conditions"]), + (batch["target_lin"], batch["target_conditions"]) ) self.state_neural_vector_field, loss = self.step_fn( rng_step_fn, self.state_neural_vector_field, batch ) + curr_loss += loss + if iter % self.logging_freq == 0: + self._training_logs["loss"].append(curr_loss / self.logging_freq) + curr_loss = 0.0 if self.learn_rescaling: self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( - source=batch["source"], - target=batch["target"], - condition=batch["condition"], + source=batch["source_lin"], + target=batch["target_lin"], + condition=batch["source_conditions"], a=tmat.sum(axis=1), b=tmat.sum(axis=0), state_eta=self.state_eta, @@ -220,8 +255,8 @@ def __call__(self, train_loader, valid_loader) -> None: states_to_save = { "state_neural_vector_field": self.state_neural_vector_field } - if self.state_mlp is not None: - states_to_save["state_eta"] = self.state_mlp + if self.state_eta is not None: + states_to_save["state_eta"] = self.state_eta if self.state_xi is not None: states_to_save["state_xi"] = self.state_xi self.checkpoint_manager.save(iter, states_to_save) @@ -254,6 +289,7 @@ def transport( t0, t1 = (self.time_sampler.low, self.time_sampler.high ) if forward else (self.time_sampler.high, self.time_sampler.low) + @jax.jit def solve_ode(input: jax.Array, cond: jax.Array): return diffrax.diffeqsolve( diffrax.ODETerm( @@ -280,6 +316,7 @@ def solve_ode(input: jax.Array, cond: jax.Array): def _valid_step(self, valid_loader, iter) -> None: next(valid_loader) # TODO: add callback and logging + @property def learn_rescaling(self) -> bool: diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index a5fdc1c2b..008036790 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -1,7 +1,6 @@ -from typing import Dict, Iterator, Optional +from typing import Dict, Iterator, Mapping, Optional -import jax -import jax.numpy as jnp +import numpy as np import pytest @@ -9,95 +8,105 @@ class DataLoader: def __init__( self, - source_data: jax.Array, - target_data: jax.Array, - conditions: Optional[jax.Array], - batch_size: int = 64 + source_data: np.ndarray, + target_data: np.ndarray, + batch_size: int = 64, + source_conditions: Optional[np.ndarray] = None, + target_conditions: Optional[np.ndarray] = None, ) -> None: super().__init__() self.source_data = source_data self.target_data = target_data - self.conditions = conditions + self.source_conditions = source_conditions + self.target_conditions = target_conditions self.batch_size = batch_size - self.key = jax.random.PRNGKey(0) + self.rng = np.random.default_rng(seed=0) - def __next__(self) -> jax.Array: - key, self.key = jax.random.split(self.key) - inds_source = jax.random.choice( - key, len(self.source_data), shape=[self.batch_size] + def __next__(self) -> Mapping[str, np.ndarray]: + inds_source = self.rng.choice( + len(self.source_data), size=[self.batch_size] ) - inds_target = jax.random.choice( - key, len(self.target_data), shape=[self.batch_size] + inds_target = self.rng.choice( + len(self.target_data), size=[self.batch_size] ) - return self.source_data[inds_source, :], self.target_data[ - inds_target, :], self.conditions[ - inds_source, :] if self.conditions is not None else None + return { + "source_lin": + self.source_data[inds_source, :], + "target_lin": + self.target_data[inds_target, :], + "source_conditions": + self.source_conditions[inds_source, :] + if self.source_conditions is not None else None, + "target_conditions": + self.target_conditions[inds_target, :] + if self.target_conditions is not None else None, + } class ConditionalDataLoader: - def __init__( - self, rng: jax.random.KeyArray, dataloaders: Dict[str, Iterator], - p: jax.Array - ) -> None: + def __init__(self, dataloaders: Dict[str, Iterator], p: np.ndarray) -> None: super().__init__() - self.rng = rng self.dataloaders = dataloaders self.conditions = list(dataloaders.keys()) self.p = p - - def __next__(self) -> jax.Array: - self.rng, rng = jax.random.split(self.rng, 2) - idx = jax.random.choice(rng, len(self.conditions), p=self.p) + self.rng = np.random.default_rng(seed=0) + + def __next__(self, cond: str = None) -> Mapping[str, np.ndarray]: + if cond is not None: + if cond not in self.conditions: + raise ValueError(f"Condition {cond} not in {self.conditions}") + return next(self.dataloaders[cond]) + idx = self.rng.choice(len(self.conditions), p=self.p) return next(self.dataloaders[self.conditions[idx]]) @pytest.fixture(scope="module") def data_loader_gaussian(): """Returns a data loader for a simple Gaussian mixture.""" - source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 - return DataLoader(source, target, None, 16) + rng = np.random.default_rng(seed=0) + source = rng.normal(size=(100, 2)) + target = rng.normal(size=(100, 2)) + 1.0 + return DataLoader(source, target, 16) @pytest.fixture(scope="module") def data_loader_gaussian_conditional(): """Returns a data loader for Gaussian mixtures with conditions.""" - source_0 = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target_0 = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 2.0 - - source_1 = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target_1 = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - 2.0 - dl0 = DataLoader(source_0, target_0, jnp.zeros_like(source_0) * 0.0, 16) - dl1 = DataLoader(source_1, target_1, jnp.ones_like(source_1) * 1.0, 16) - - return ConditionalDataLoader( - jax.random.PRNGKey(0), { - "0": dl0, - "1": dl1 - }, jnp.array([0.5, 0.5]) - ) + rng = np.random.default_rng(seed=0) + source_0 = rng.normal(size=(100, 2)) + target_0 = rng.normal(size=(100, 2)) + 2.0 + + source_1 = rng.normal(size=(100, 2)) + target_1 = rng.normal(size=(100, 2)) - 2.0 + dl0 = DataLoader(source_0, target_0, 16, source_conditions=np.zeros_like(source_0) * 0.0) + dl1 = DataLoader(source_1, target_1, 16, source_conditions=np.ones_like(source_1) * 1.0) + + return ConditionalDataLoader({"0": dl0, "1": dl1}, np.array([0.5, 0.5])) @pytest.fixture(scope="module") def data_loader_gaussian_with_conditions(): """Returns a data loader for a simple Gaussian mixture with conditions.""" - source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - conditions = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) - target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 - return DataLoader(source, target, conditions, 16) + rng = np.random.default_rng(seed=0) + source = rng.normal(size=(100, 2)) + target = rng.normal(size=(100, 2)) + 1.0 + source_conditions = rng.normal(size=(100, 1)) + target_conditions = rng.normal(size=(100, 1)) - 1.0 + return DataLoader(source, target, 16, source_conditions, target_conditions) class GENOTDataLoader: def __init__( self, - source_lin: Optional[jax.Array], - source_quad: Optional[jax.Array], - target_lin: Optional[jax.Array], - target_quad: Optional[jax.Array], - conditions: Optional[jax.Array], - batch_size: int = 64 + batch_size: int = 64, + source_lin: Optional[np.ndarray] = None, + source_quad: Optional[np.ndarray] = None, + target_lin: Optional[np.ndarray] = None, + target_quad: Optional[np.ndarray] = None, + source_conditions: Optional[np.ndarray] = None, + target_conditions: Optional[np.ndarray] = None, ) -> None: super().__init__() if source_lin is not None: @@ -108,8 +117,8 @@ def __init__( self.n_source = len(source_lin) else: self.n_source = len(source_quad) - if conditions is not None: - assert len(conditions) == self.n_source + if source_conditions is not None: + assert len(source_conditions) == self.n_source if target_lin is not None: if target_quad is not None: assert len(target_lin) == len(target_quad) @@ -118,83 +127,126 @@ def __init__( self.n_target = len(target_lin) else: self.n_target = len(target_quad) + if target_conditions is not None: + assert len(target_conditions) == self.n_target self.source_lin = source_lin self.target_lin = target_lin self.source_quad = source_quad self.target_quad = target_quad - self.conditions = conditions + self.source_conditions = source_conditions + self.target_conditions = target_conditions self.batch_size = batch_size - self.key = jax.random.PRNGKey(0) - - def __next__(self) -> jax.Array: - key, self.key = jax.random.split(self.key) - inds_source = jax.random.choice(key, self.n_source, shape=[self.batch_size]) - inds_target = jax.random.choice(key, self.n_target, shape=[self.batch_size]) - return self.source_lin[ - inds_source, : - ] if self.source_lin is not None else None, self.source_quad[ - inds_source, : - ] if self.source_quad is not None else None, self.target_lin[ - inds_target, : - ] if self.target_lin is not None else None, self.target_quad[ - inds_target, : - ] if self.target_quad is not None else None, self.conditions[ - inds_source, :] if self.conditions is not None else None + self.rng = np.random.default_rng(seed=0) + + def __next__(self) -> Mapping[str, np.ndarray]: + inds_source = self.rng.choice(self.n_source, size=[self.batch_size]) + inds_target = self.rng.choice(self.n_target, size=[self.batch_size]) + return { + "source_lin": + self.source_lin[inds_source, :] + if self.source_lin is not None else None, + "source_quad": + self.source_quad[inds_source, :] + if self.source_quad is not None else None, + "target_lin": + self.target_lin[inds_target, :] + if self.target_lin is not None else None, + "target_quad": + self.target_quad[inds_target, :] + if self.target_quad is not None else None, + "source_conditions": + self.source_conditions[inds_source, :] + if self.source_conditions is not None else None, + "target_conditions": + self.target_conditions[inds_target, :] + if self.target_conditions is not None else None, + } @pytest.fixture(scope="module") def genot_data_loader_linear(): """Returns a data loader for a simple Gaussian mixture.""" - source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 - return GENOTDataLoader(source, None, target, None, None, 16) + rng = np.random.default_rng(seed=0) + source = rng.normal(size=(100, 2)) + target = rng.normal(size=(100, 2)) + 1.0 + return GENOTDataLoader(16, source_lin=source, target_lin=target) @pytest.fixture(scope="module") def genot_data_loader_linear_conditional(): """Returns a data loader for a simple Gaussian mixture.""" - source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 - conditions = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 4)) - return GENOTDataLoader(source, None, target, None, conditions, 16) + rng = np.random.default_rng(seed=0) + source = rng.normal(size=(100, 2)) + target = rng.normal(size=(100, 2)) + 1.0 + conditions_source = rng.normal(size=(100, 4)) + conditions_target = rng.normal(size=(100, 4)) - 1.0 + return GENOTDataLoader( + 16, + source_lin=source, + target_lin=target, + conditions_source=conditions_source, + conditions_target=conditions_target + ) @pytest.fixture(scope="module") def genot_data_loader_quad(): """Returns a data loader for a simple Gaussian mixture.""" - source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + 1.0 - return GENOTDataLoader(None, source, None, target, None, 16) + rng = np.random.default_rng(seed=0) + source = rng.normal(size=(100, 2)) + target = rng.normal(size=(100, 1)) + 1.0 + return GENOTDataLoader(16, source_quad=source, target_quad=target) @pytest.fixture(scope="module") def genot_data_loader_quad_conditional(): """Returns a data loader for a simple Gaussian mixture.""" - source = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + 1.0 - conditions = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 7)) - return GENOTDataLoader(None, source, None, target, conditions, 16) + rng = np.random.default_rng(seed=0) + source = rng.normal(size=(100, 2)) + target = rng.normal(size=(100, 1)) + 1.0 + conditions = rng.normal(size=(100, 7)) + return GENOTDataLoader( + 16, + source_quad=source, + target_quad=target, + source_conditions=conditions, + target_conditions=conditions + ) @pytest.fixture(scope="module") def genot_data_loader_fused(): """Returns a data loader for a simple Gaussian mixture.""" - source_q = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target_q = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + 1.0 - source_lin = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target_lin = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 - return GENOTDataLoader(source_lin, source_q, target_lin, target_q, None, 16) + rng = np.random.default_rng(seed=0) + source_q = rng.normal(size=(100, 2)) + target_q = rng.normal(size=(100, 1)) + 1.0 + source_lin = rng.normal(size=(100, 2)) + target_lin = rng.normal(size=(100, 2)) + 1.0 + return GENOTDataLoader( + 16, + source_lin=source_lin, + source_quad=source_q, + target_lin=target_lin, + target_quad=target_q + ) @pytest.fixture(scope="module") def genot_data_loader_fused_conditional(): """Returns a data loader for a simple Gaussian mixture.""" - source_q = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target_q = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + 1.0 - source_lin = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) - target_lin = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 2)) + 1.0 - conditions = jax.random.normal(jax.random.PRNGKey(0), shape=(100, 1)) + rng = np.random.default_rng(seed=0) + source_q = rng.normal(size=(100, 2)) + target_q = rng.normal(size=(100, 1)) + 1.0 + source_lin = rng.normal(size=(100, 2)) + target_lin = rng.normal(size=(100, 2)) + 1.0 + conditions = rng.normal(size=(100, 7)) return GENOTDataLoader( - source_lin, source_q, target_lin, target_q, conditions, 16 + 16, + source_lin=source_lin, + source_quad=source_q, + target_lin=target_lin, + target_quad=target_q, + source_conditions=conditions, + target_conditions=conditions ) diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 1993432f2..9a75cb3fb 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -60,12 +60,12 @@ def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): ) fm(data_loader_gaussian, data_loader_gaussian) - source, target, condition = next(data_loader_gaussian) - result_forward = fm.transport(source, condition=condition, forward=True) + batch = next(data_loader_gaussian) + result_forward = fm.transport(batch["source_lin"], condition=batch["source_conditions"], forward=True) assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 - result_backward = fm.transport(target, condition=condition, forward=False) + result_backward = fm.transport(batch["target_lin"], condition=batch["target_conditions"], forward=False) assert isinstance(result_backward, jax.Array) assert jnp.sum(jnp.isnan(result_backward)) == 0 @@ -102,12 +102,12 @@ def test_flow_matching_with_conditions( data_loader_gaussian_with_conditions ) - source, target, condition = next(data_loader_gaussian_with_conditions) - result_forward = fm.transport(source, condition=condition, forward=True) + batch = next(data_loader_gaussian_with_conditions) + result_forward = fm.transport(batch["source_lin"], condition=batch["source_conditions"], forward=True) assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 - result_backward = fm.transport(target, condition=condition, forward=False) + result_backward = fm.transport(batch["target_lin"], condition=batch["target_conditions"], forward=False) assert isinstance(result_backward, jax.Array) assert jnp.sum(jnp.isnan(result_backward)) == 0 @@ -141,12 +141,12 @@ def test_flow_matching_conditional( ) fm(data_loader_gaussian_conditional, data_loader_gaussian_conditional) - source, target, condition = next(data_loader_gaussian_conditional) - result_forward = fm.transport(source, condition=condition, forward=True) + batch = next(data_loader_gaussian_conditional) + result_forward = fm.transport(batch["source_lin"], condition=batch["source_conditions"], forward=True) assert isinstance(result_forward, jax.Array) assert jnp.sum(jnp.isnan(result_forward)) == 0 - result_backward = fm.transport(target, condition=condition, forward=False) + result_backward = fm.transport(batch["target_lin"], condition=batch["target_conditions"], forward=False) assert isinstance(result_backward, jax.Array) assert jnp.sum(jnp.isnan(result_backward)) == 0 @@ -156,9 +156,9 @@ def test_flow_matching_learn_rescaling( data_loader_gaussian_conditional: Iterator ): data_loader = data_loader_gaussian_conditional if conditional else data_loader_gaussian - source, target, condition = next(data_loader) - source_dim = source.shape[1] - condition_dim = condition.shape[1] if conditional else 0 + batch = next(data_loader) + source_dim = batch["source_lin"].shape[1] + condition_dim = batch["source_conditions"].shape[1] if conditional else 0 neural_vf = NeuralVectorField( output_dim=2, condition_dim=0, @@ -190,10 +190,10 @@ def test_flow_matching_learn_rescaling( ) fm(data_loader, data_loader) - result_eta = fm.evaluate_eta(source, condition=condition) + result_eta = fm.evaluate_eta(batch["source_lin"], condition=batch["source_conditions"]) assert isinstance(result_eta, jax.Array) assert jnp.sum(jnp.isnan(result_eta)) == 0 - result_xi = fm.evaluate_xi(target, condition=condition) + result_xi = fm.evaluate_xi(batch["target_lin"], condition=batch["target_conditions"]) assert isinstance(result_xi, jax.Array) assert jnp.sum(jnp.isnan(result_xi)) == 0 From beee22dff6c5741efc7b3269e0eb9de8d609ec65 Mon Sep 17 00:00:00 2001 From: lucaeyring Date: Mon, 27 Nov 2023 20:17:47 +0100 Subject: [PATCH 030/186] [ci skip] changed dataloaders to numpy and dict return --- src/ott/neural/solvers/otfm.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 6e6f419ca..82a6b67aa 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -201,26 +201,6 @@ def __call__(self, train_loader, valid_loader) -> None: """ batch: Mapping[str, jax.Array] = {} curr_loss = 0.0 - """ - if self.num_eval_samples > 0: - eval_batch_source, eval_batch_target = [], [] - for iter in range(self.num_eval_samples): - batch = next( - valid_loader - ) - eval_batch_source.append(batch["source_lin"]) - eval_batch_target.append(batch["target_lin"]) - eval_batch_source = jnp.stack(eval_batch_source) - eval_batch_target = jnp.stack(eval_batch_target) - self._training_logs["data_sink_div"].append( - sinkhorn_divergence( - eval_batch_source, - eval_batch_target, - self.epsilon, - self.cost_fn, - self.scale_cost, - ) - )""" for iter in range(self.iterations): rng_resample, rng_step_fn, self.rng = random.split(self.rng, 3) From f26e07292a1a9cbfb31a79326edce114c1b636dd Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 28 Nov 2023 10:07:06 +0100 Subject: [PATCH 031/186] revert jax.Array to jnp.ndarray --- docs/tutorials/GWLRSinkhorn.ipynb | 2 +- docs/tutorials/Hessians.ipynb | 2 +- docs/tutorials/Monge_Gap.ipynb | 12 +- docs/tutorials/One_Sinkhorn.ipynb | 4 +- .../tutorials/basic_ot_between_datasets.ipynb | 2 +- docs/tutorials/point_clouds.ipynb | 4 +- .../sinkhorn_divergence_gradient_flow.ipynb | 4 +- .../sparse_monge_displacements.ipynb | 2 +- src/ott/datasets.py | 8 +- src/ott/geometry/costs.py | 166 +++++++-------- src/ott/geometry/geometry.py | 190 +++++++++--------- src/ott/geometry/graph.py | 44 ++-- src/ott/geometry/grid.py | 40 ++-- src/ott/geometry/low_rank.py | 60 +++--- src/ott/geometry/pointcloud.py | 110 +++++----- src/ott/geometry/segment.py | 33 +-- src/ott/initializers/linear/initializers.py | 58 +++--- .../initializers/linear/initializers_lr.py | 100 ++++----- .../initializers/quadratic/initializers.py | 4 +- src/ott/math/fixed_point_loop.py | 2 +- src/ott/math/matrix_square_root.py | 51 ++--- src/ott/math/unbalanced_functions.py | 23 +-- src/ott/math/utils.py | 22 +- src/ott/neural/data/dataloaders.py | 4 +- src/ott/neural/models/base_models.py | 18 +- src/ott/neural/models/conjugate_solvers.py | 13 +- src/ott/neural/models/layers.py | 12 +- src/ott/neural/models/models.py | 52 ++--- src/ott/neural/solvers/base_solver.py | 95 ++++----- src/ott/neural/solvers/flows.py | 31 +-- src/ott/neural/solvers/genot.py | 18 +- src/ott/neural/solvers/losses.py | 8 +- src/ott/neural/solvers/map_estimator.py | 30 +-- src/ott/neural/solvers/neuraldual.py | 56 +++--- src/ott/neural/solvers/otfm.py | 49 +++-- src/ott/problems/linear/barycenter_problem.py | 20 +- src/ott/problems/linear/linear_problem.py | 13 +- src/ott/problems/linear/potentials.py | 34 ++-- src/ott/problems/quadratic/gw_barycenter.py | 50 ++--- src/ott/problems/quadratic/quadratic_costs.py | 3 +- .../problems/quadratic/quadratic_problem.py | 32 +-- src/ott/solvers/linear/_solve.py | 6 +- src/ott/solvers/linear/acceleration.py | 8 +- .../solvers/linear/continuous_barycenter.py | 24 +-- src/ott/solvers/linear/discrete_barycenter.py | 18 +- .../linear/implicit_differentiation.py | 31 +-- src/ott/solvers/linear/lineax_implicit.py | 4 +- src/ott/solvers/linear/lr_utils.py | 42 ++-- src/ott/solvers/linear/sinkhorn.py | 88 ++++---- src/ott/solvers/linear/sinkhorn_lr.py | 131 ++++++------ src/ott/solvers/linear/univariate.py | 14 +- src/ott/solvers/quadratic/_solve.py | 6 +- .../solvers/quadratic/gromov_wasserstein.py | 24 +-- .../quadratic/gromov_wasserstein_lr.py | 135 +++++++------ src/ott/solvers/quadratic/gw_barycenter.py | 34 ++-- src/ott/tools/gaussian_mixture/fit_gmm.py | 38 ++-- .../tools/gaussian_mixture/fit_gmm_pair.py | 26 +-- src/ott/tools/gaussian_mixture/gaussian.py | 32 +-- .../gaussian_mixture/gaussian_mixture.py | 41 ++-- .../gaussian_mixture/gaussian_mixture_pair.py | 6 +- src/ott/tools/gaussian_mixture/linalg.py | 34 ++-- .../tools/gaussian_mixture/probabilities.py | 14 +- src/ott/tools/gaussian_mixture/scale_tril.py | 38 ++-- src/ott/tools/k_means.py | 70 +++---- src/ott/tools/plot.py | 6 +- src/ott/tools/segment_sinkhorn.py | 24 +-- src/ott/tools/sinkhorn_divergence.py | 44 ++-- src/ott/tools/soft_sort.py | 82 ++++---- src/ott/types.py | 8 +- src/ott/utils.py | 3 +- tests/conftest.py | 3 +- tests/geometry/costs_test.py | 22 +- tests/geometry/graph_test.py | 22 +- tests/geometry/low_rank_test.py | 26 +-- tests/geometry/pointcloud_test.py | 10 +- tests/geometry/scaling_cost_test.py | 8 +- tests/geometry/subsetting_test.py | 8 +- .../initializers/linear/sinkhorn_init_test.py | 26 +-- .../linear/sinkhorn_lr_init_test.py | 8 +- tests/initializers/quadratic/gw_init_test.py | 3 +- tests/math/lse_test.py | 2 +- tests/math/math_utils_test.py | 2 +- tests/math/matrix_square_root_test.py | 12 +- tests/neural/conftest.py | 16 +- tests/neural/genot_test.py | 17 +- tests/neural/icnn_test.py | 4 +- tests/neural/losses_test.py | 7 +- tests/neural/map_estimator_test.py | 6 +- tests/neural/meta_initializer_test.py | 14 +- tests/neural/otfm_test.py | 55 +++-- tests/problems/linear/potentials_test.py | 14 +- .../linear/continuous_barycenter_test.py | 14 +- tests/solvers/linear/sinkhorn_diff_test.py | 58 +++--- tests/solvers/linear/sinkhorn_grid_test.py | 8 +- tests/solvers/linear/sinkhorn_lr_test.py | 2 +- tests/solvers/linear/sinkhorn_misc_test.py | 18 +- tests/solvers/linear/sinkhorn_test.py | 2 +- tests/solvers/linear/univariate_test.py | 4 +- tests/solvers/quadratic/fgw_test.py | 18 +- tests/solvers/quadratic/gw_barycenter_test.py | 12 +- tests/solvers/quadratic/gw_test.py | 19 +- tests/solvers/quadratic/lower_bound_test.py | 6 +- .../gaussian_mixture/fit_gmm_pair_test.py | 2 +- tests/tools/gaussian_mixture/fit_gmm_test.py | 2 +- .../gaussian_mixture_pair_test.py | 2 +- .../gaussian_mixture/gaussian_mixture_test.py | 16 +- tests/tools/gaussian_mixture/gaussian_test.py | 18 +- tests/tools/gaussian_mixture/linalg_test.py | 20 +- .../gaussian_mixture/probabilities_test.py | 4 +- .../tools/gaussian_mixture/scale_tril_test.py | 12 +- tests/tools/k_means_test.py | 48 ++--- tests/tools/segment_sinkhorn_test.py | 2 +- tests/tools/sinkhorn_divergence_test.py | 6 +- tests/tools/soft_sort_test.py | 28 +-- 114 files changed, 1603 insertions(+), 1515 deletions(-) diff --git a/docs/tutorials/GWLRSinkhorn.ipynb b/docs/tutorials/GWLRSinkhorn.ipynb index ace06be8f..590671428 100644 --- a/docs/tutorials/GWLRSinkhorn.ipynb +++ b/docs/tutorials/GWLRSinkhorn.ipynb @@ -66,7 +66,7 @@ }, "outputs": [], "source": [ - "def create_points(rng: jax.Array, n: int, m: int, d1: int, d2: int):\n", + "def create_points(rng: jnp.ndarray, n: int, m: int, d1: int, d2: int):\n", " rngs = jax.random.split(rng, 5)\n", " x = jax.random.uniform(rngs[0], (n, d1))\n", " y = jax.random.uniform(rngs[1], (m, d2))\n", diff --git a/docs/tutorials/Hessians.ipynb b/docs/tutorials/Hessians.ipynb index f7c8b56d1..0e50ec959 100644 --- a/docs/tutorials/Hessians.ipynb +++ b/docs/tutorials/Hessians.ipynb @@ -103,7 +103,7 @@ }, "outputs": [], "source": [ - "def loss(a: jax.Array, x: jax.Array, implicit: bool = True) -> float:\n", + "def loss(a: jnp.ndarray, x: jnp.ndarray, implicit: bool = True) -> float:\n", " return sinkhorn_divergence.sinkhorn_divergence(\n", " pointcloud.PointCloud,\n", " x,\n", diff --git a/docs/tutorials/Monge_Gap.ipynb b/docs/tutorials/Monge_Gap.ipynb index ac38d89b4..53bc670dc 100644 --- a/docs/tutorials/Monge_Gap.ipynb +++ b/docs/tutorials/Monge_Gap.ipynb @@ -94,13 +94,13 @@ "\n", " name: Literal[\"moon\", \"s_curve\"]\n", " theta_rotation: float = 0.0\n", - " mean: Optional[jax.Array] = None\n", + " mean: Optional[jnp.ndarray] = None\n", " noise: float = 0.01\n", " scale: float = 1.0\n", " batch_size: int = 1024\n", - " rng: Optional[jax.Array] = (None,)\n", + " rng: Optional[jnp.ndarray] = (None,)\n", "\n", - " def __iter__(self) -> Iterator[jax.Array]:\n", + " def __iter__(self) -> Iterator[jnp.ndarray]:\n", " \"\"\"Random sample generator from Gaussian mixture.\n", "\n", " Returns:\n", @@ -108,7 +108,7 @@ " \"\"\"\n", " return self._create_sample_generators()\n", "\n", - " def _create_sample_generators(self) -> Iterator[jax.Array]:\n", + " def _create_sample_generators(self) -> Iterator[jnp.ndarray]:\n", " rng = jax.random.PRNGKey(0) if self.rng is None else self.rng\n", "\n", " # define rotation matrix tp rotate samples\n", @@ -151,7 +151,7 @@ " target_kwargs: Mapping[str, Any] = MappingProxyType({}),\n", " train_batch_size: int = 256,\n", " valid_batch_size: int = 256,\n", - " rng: Optional[jax.Array] = None,\n", + " rng: Optional[jnp.ndarray] = None,\n", ") -> Tuple[dataset.Dataset, dataset.Dataset, int]:\n", " \"\"\"Samplers from ``SklearnDistribution``.\"\"\"\n", " rng = jax.random.PRNGKey(0) if rng is None else rng\n", @@ -202,7 +202,7 @@ " num_points: Optional[int] = None,\n", " title: Optional[str] = None,\n", " figsize: Tuple[int, int] = (8, 6),\n", - " rng: Optional[jax.Array] = None,\n", + " rng: Optional[jnp.ndarray] = None,\n", "):\n", " \"\"\"Plot samples from the source and target measures.\n", "\n", diff --git a/docs/tutorials/One_Sinkhorn.ipynb b/docs/tutorials/One_Sinkhorn.ipynb index 9465441d8..8c3d98e2e 100644 --- a/docs/tutorials/One_Sinkhorn.ipynb +++ b/docs/tutorials/One_Sinkhorn.ipynb @@ -555,7 +555,9 @@ }, "outputs": [], "source": [ - "def my_sinkhorn(geom: geometry.Geometry, a: jax.Array, b: jax.Array, **kwargs):\n", + "def my_sinkhorn(\n", + " geom: geometry.Geometry, a: jnp.ndarray, b: jnp.ndarray, **kwargs\n", + "):\n", " return linear.solve(\n", " geom, a, b, inner_iterations=1, max_iterations=10_000, **kwargs\n", " )" diff --git a/docs/tutorials/basic_ot_between_datasets.ipynb b/docs/tutorials/basic_ot_between_datasets.ipynb index b3c452d36..3cc61d403 100644 --- a/docs/tutorials/basic_ot_between_datasets.ipynb +++ b/docs/tutorials/basic_ot_between_datasets.ipynb @@ -260,7 +260,7 @@ "metadata": {}, "outputs": [], "source": [ - "def reg_ot_cost(x: jax.Array, y: jax.Array) -> float:\n", + "def reg_ot_cost(x: jnp.ndarray, y: jnp.ndarray) -> float:\n", " geom = pointcloud.PointCloud(x, y)\n", " ot = linear.solve(geom)\n", " return ot.reg_ot_cost" diff --git a/docs/tutorials/point_clouds.ipynb b/docs/tutorials/point_clouds.ipynb index e1b77edca..fd20ffc9a 100644 --- a/docs/tutorials/point_clouds.ipynb +++ b/docs/tutorials/point_clouds.ipynb @@ -241,8 +241,8 @@ "outputs": [], "source": [ "def optimize(\n", - " x: jax.Array,\n", - " y: jax.Array,\n", + " x: jnp.ndarray,\n", + " y: jnp.ndarray,\n", " num_iter: int = 300,\n", " dump_every: int = 5,\n", " learning_rate: float = 0.2,\n", diff --git a/docs/tutorials/sinkhorn_divergence_gradient_flow.ipynb b/docs/tutorials/sinkhorn_divergence_gradient_flow.ipynb index ff84f53b4..c3b73039c 100644 --- a/docs/tutorials/sinkhorn_divergence_gradient_flow.ipynb +++ b/docs/tutorials/sinkhorn_divergence_gradient_flow.ipynb @@ -145,8 +145,8 @@ "outputs": [], "source": [ "def gradient_flow(\n", - " x: jax.Array,\n", - " y: jax.Array,\n", + " x: jnp.ndarray,\n", + " y: jnp.ndarray,\n", " cost_fn: callable,\n", " num_iter: int = 500,\n", " lr: float = 0.2,\n", diff --git a/docs/tutorials/sparse_monge_displacements.ipynb b/docs/tutorials/sparse_monge_displacements.ipynb index 8fcb49096..a21213703 100644 --- a/docs/tutorials/sparse_monge_displacements.ipynb +++ b/docs/tutorials/sparse_monge_displacements.ipynb @@ -241,7 +241,7 @@ "solver = jax.jit(sinkhorn.Sinkhorn())\n", "\n", "\n", - "def entropic_map(x, y, cost_fn: costs.TICost) -> jax.Array:\n", + "def entropic_map(x, y, cost_fn: costs.TICost) -> jnp.ndarray:\n", " geom = pointcloud.PointCloud(x, y, cost_fn=cost_fn)\n", " output = solver(linear_problem.LinearProblem(geom))\n", " dual_potentials = output.to_dual_potentials()\n", diff --git a/src/ott/datasets.py b/src/ott/datasets.py index 12dda06bb..07bd87fb9 100644 --- a/src/ott/datasets.py +++ b/src/ott/datasets.py @@ -32,8 +32,8 @@ class Dataset(NamedTuple): source_iter: loader for the source measure target_iter: loader for the target measure """ - source_iter: Iterator[jax.Array] - target_iter: Iterator[jax.Array] + source_iter: Iterator[jnp.ndarray] + target_iter: Iterator[jnp.ndarray] @dataclasses.dataclass @@ -57,7 +57,7 @@ class GaussianMixture: """ name: Name_t batch_size: int - init_rng: jax.Array + init_rng: jnp.ndarray scale: float = 5.0 std: float = 0.5 @@ -110,7 +110,7 @@ def create_gaussian_mixture_samplers( name_target: Name_t, train_batch_size: int = 2048, valid_batch_size: int = 2048, - rng: Optional[jax.Array] = None, + rng: Optional[jnp.ndarray] = None, ) -> Tuple[Dataset, Dataset, int]: """Gaussian samplers for :class:`~ott.solvers.nn.neuraldual.W2NeuralDual`. diff --git a/src/ott/geometry/costs.py b/src/ott/geometry/costs.py index aeaf89b72..9f1a6c3a0 100644 --- a/src/ott/geometry/costs.py +++ b/src/ott/geometry/costs.py @@ -56,10 +56,10 @@ class CostFn(abc.ABC): """ # no norm function created by default. - norm: Optional[Callable[[jax.Array], Union[float, jax.Array]]] = None + norm: Optional[Callable[[jnp.ndarray], Union[float, jnp.ndarray]]] = None @abc.abstractmethod - def pairwise(self, x: jax.Array, y: jax.Array) -> float: + def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: """Compute cost between :math:`x` and :math:`y`. Args: @@ -70,8 +70,8 @@ def pairwise(self, x: jax.Array, y: jax.Array) -> float: The cost. """ - def barycenter(self, weights: jax.Array, - xs: jax.Array) -> Tuple[jax.Array, Any]: + def barycenter(self, weights: jnp.ndarray, + xs: jnp.ndarray) -> Tuple[jnp.ndarray, Any]: """Barycentric operator. Args: @@ -86,7 +86,7 @@ def barycenter(self, weights: jax.Array, raise NotImplementedError("Barycenter is not implemented.") @classmethod - def _padder(cls, dim: int) -> jax.Array: + def _padder(cls, dim: int) -> jnp.ndarray: """Create a padding vector of adequate dimension, well-suited to a cost. Args: @@ -97,7 +97,7 @@ def _padder(cls, dim: int) -> jax.Array: """ return jnp.zeros((1, dim)) - def __call__(self, x: jax.Array, y: jax.Array) -> float: + def __call__(self, x: jnp.ndarray, y: jnp.ndarray) -> float: """Compute cost between :math:`x` and :math:`y`. Args: @@ -113,7 +113,7 @@ def __call__(self, x: jax.Array, y: jax.Array) -> float: return cost return cost + self.norm(x) + self.norm(y) - def all_pairs(self, x: jax.Array, y: jax.Array) -> jax.Array: + def all_pairs(self, x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: """Compute matrix of all pairwise costs, including the :attr:`norms `. Args: @@ -125,7 +125,7 @@ def all_pairs(self, x: jax.Array, y: jax.Array) -> jax.Array: """ return jax.vmap(lambda x_: jax.vmap(lambda y_: self(x_, y_))(y))(x) - def all_pairs_pairwise(self, x: jax.Array, y: jax.Array) -> jax.Array: + def all_pairs_pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: """Compute matrix of all pairwise costs, excluding the :attr:`norms `. Args: @@ -163,7 +163,7 @@ class TICost(CostFn): """ @abc.abstractmethod - def h(self, z: jax.Array) -> float: + def h(self, z: jnp.ndarray) -> float: """TI function acting on difference of :math:`x-y` to output cost. Args: @@ -173,11 +173,11 @@ def h(self, z: jax.Array) -> float: The cost. """ - def h_legendre(self, z: jax.Array) -> float: + def h_legendre(self, z: jnp.ndarray) -> float: """Legendre transform of :func:`h` when it is convex.""" raise NotImplementedError("Legendre transform of `h` is not implemented.") - def pairwise(self, x: jax.Array, y: jax.Array) -> float: + def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: """Compute cost as evaluation of :func:`h` on :math:`x-y`.""" return self.h(x - y) @@ -198,10 +198,10 @@ def __init__(self, p: float): self.p = p self.q = 1.0 / (1.0 - (1.0 / p)) if p > 1.0 else jnp.inf - def h(self, z: jax.Array) -> float: # noqa: D102 + def h(self, z: jnp.ndarray) -> float: # noqa: D102 return 0.5 * mu.norm(z, self.p) ** 2 - def h_legendre(self, z: jax.Array) -> float: + def h_legendre(self, z: jnp.ndarray) -> float: """Legendre transform of :func:`h`. For details on the derivation, see e.g., :cite:`boyd:04`, p. 93/94. @@ -234,10 +234,10 @@ def __init__(self, p: float): self.p = p self.q = 1.0 / (1.0 - (1.0 / p)) if p > 1.0 else jnp.inf - def h(self, z: jax.Array) -> float: # noqa: D102 + def h(self, z: jnp.ndarray) -> float: # noqa: D102 return mu.norm(z, self.p) ** self.p / self.p - def h_legendre(self, z: jax.Array) -> float: # noqa: D102 + def h_legendre(self, z: jnp.ndarray) -> float: # noqa: D102 # not defined for `p=1` return mu.norm(z, self.q) ** self.q / self.q @@ -260,7 +260,7 @@ class Euclidean(CostFn): because the function is not strictly convex (it is linear on rays). """ - def pairwise(self, x: jax.Array, y: jax.Array) -> float: + def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: """Compute Euclidean norm using custom jvp implementation. Here we use a custom jvp implementation for the norm that does not yield @@ -277,22 +277,22 @@ class SqEuclidean(TICost): Implemented as a translation invariant cost, :math:`h(z) = \|z\|^2`. """ - def norm(self, x: jax.Array) -> Union[float, jax.Array]: + def norm(self, x: jnp.ndarray) -> Union[float, jnp.ndarray]: """Compute squared Euclidean norm for vector.""" return jnp.sum(x ** 2, axis=-1) - def pairwise(self, x: jax.Array, y: jax.Array) -> float: + def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: """Compute minus twice the dot-product between vectors.""" return -2. * jnp.vdot(x, y) - def h(self, z: jax.Array) -> float: # noqa: D102 + def h(self, z: jnp.ndarray) -> float: # noqa: D102 return jnp.sum(z ** 2) - def h_legendre(self, z: jax.Array) -> float: # noqa: D102 + def h_legendre(self, z: jnp.ndarray) -> float: # noqa: D102 return 0.25 * jnp.sum(z ** 2) - def barycenter(self, weights: jax.Array, - xs: jax.Array) -> Tuple[jax.Array, Any]: + def barycenter(self, weights: jnp.ndarray, + xs: jnp.ndarray) -> Tuple[jnp.ndarray, Any]: """Output barycenter of vectors when using squared-Euclidean distance.""" return jnp.average(xs, weights=weights, axis=0), None @@ -309,7 +309,7 @@ def __init__(self, ridge: float = 1e-8): super().__init__() self._ridge = ridge - def pairwise(self, x: jax.Array, y: jax.Array) -> float: + def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: """Cosine distance between vectors, denominator regularized with ridge.""" ridge = self._ridge x_norm = jnp.linalg.norm(x, axis=-1) @@ -318,7 +318,7 @@ def pairwise(self, x: jax.Array, y: jax.Array) -> float: return 1.0 - cosine_similarity @classmethod - def _padder(cls, dim: int) -> jax.Array: + def _padder(cls, dim: int) -> jnp.ndarray: return jnp.ones((1, dim)) @@ -341,7 +341,7 @@ class RegTICost(TICost, abc.ABC): def __init__( self, scaling_reg: float = 1.0, - matrix: Optional[jax.Array] = None, + matrix: Optional[jnp.ndarray] = None, orthogonal: bool = False, ): super().__init__() @@ -350,16 +350,16 @@ def __init__( self.orthogonal = orthogonal @abc.abstractmethod - def _reg(self, z: jax.Array) -> float: + def _reg(self, z: jnp.ndarray) -> float: """Regularization function.""" - def _reg_stiefel_orth(self, z: jax.Array) -> float: + def _reg_stiefel_orth(self, z: jnp.ndarray) -> float: raise NotImplementedError( "Regularization in the orthogonal " "subspace is not implemented." ) - def reg(self, z: jax.Array) -> float: + def reg(self, z: jnp.ndarray) -> float: """Regularization function. Args: @@ -374,7 +374,7 @@ def reg(self, z: jax.Array) -> float: return self._reg_stiefel_orth(z) return self._reg(self.matrix @ z) - def prox_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: + def prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: """Proximal operator of :meth:`reg`. Args: @@ -391,24 +391,26 @@ def prox_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: return self._prox_reg_stiefel_orth(z, tau) return self._prox_reg_stiefel(z, tau) - def _prox_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: + def _prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: raise NotImplementedError("Proximal operator is not implemented.") - def _prox_reg_stiefel_orth(self, z: jax.Array, tau: float = 1.0) -> jax.Array: + def _prox_reg_stiefel_orth( + self, z: jnp.ndarray, tau: float = 1.0 + ) -> jnp.ndarray: - def orth(x: jax.Array) -> jax.Array: + def orth(x: jnp.ndarray) -> jnp.ndarray: return x - self.matrix.T @ (self.matrix @ x) # assumes `matrix` has orthogonal rows tmp = orth(z) return z - orth(tmp - self._prox_reg(tmp, tau)) - def _prox_reg_stiefel(self, z: jax.Array, tau: float) -> jax.Array: + def _prox_reg_stiefel(self, z: jnp.ndarray, tau: float) -> jnp.ndarray: # assumes `matrix` has orthogonal rows tmp = self.matrix @ z return z - self.matrix.T @ (tmp - self._prox_reg(tmp, tau)) - def prox_legendre_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: + def prox_legendre_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: r"""Proximal operator of the Legendre transform of :meth:`reg`. Uses Moreau's decomposition: @@ -426,16 +428,16 @@ def prox_legendre_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: """ return z - tau * self.prox_reg(z / tau, 1.0 / tau) - def h(self, z: jax.Array) -> float: # noqa: D102 + def h(self, z: jnp.ndarray) -> float: # noqa: D102 out = 0.5 * jnp.sum(z ** 2) return out + self.scaling_reg * self.reg(z) - def h_legendre(self, z: jax.Array) -> float: # noqa: D102 + def h_legendre(self, z: jnp.ndarray) -> float: # noqa: D102 q = jax.lax.stop_gradient(self.prox_reg(z)) return jnp.sum(q * z) - self.h(q) - def h_transform(self, f: Callable[[jax.Array], float], - **kwargs: Any) -> Callable[[jax.Array], float]: + def h_transform(self, f: Callable[[jnp.ndarray], float], + **kwargs: Any) -> Callable[[jnp.ndarray], float]: r"""Compute the h-transform of a concave function. Return a callable :math:`f_h` defined as: @@ -465,16 +467,18 @@ def h_transform(self, f: Callable[[jax.Array], float], The h-transform of ``f``. """ - def minus_f(z: jax.Array, x: jax.Array) -> float: + def minus_f(z: jnp.ndarray, x: jnp.ndarray) -> float: return -f(x - z) - def prox(x: jax.Array, scaling_reg: float, scaling_h: float) -> jax.Array: + def prox( + x: jnp.ndarray, scaling_reg: float, scaling_h: float + ) -> jnp.ndarray: # https://web.stanford.edu/~boyd/papers/pdf/prox_algs.pdf 2.2. tmp = 1.0 / (1.0 + scaling_h) tau = scaling_reg * scaling_h * tmp return self.prox_reg(x * tmp, tau) - def f_h(x: jax.Array) -> float: + def f_h(x: jnp.ndarray) -> float: pg = jaxopt.ProximalGradient(fun=minus_f, prox=prox, **kwargs) pg_run = pg.run(x, self.scaling_reg, x=x) pg_sol = jax.lax.stop_gradient(pg_run.params) @@ -504,10 +508,10 @@ class ElasticL1(RegTICost): to promote displacements in the span of ``matrix``. """ - def _reg(self, z: jax.Array) -> float: # noqa: D102 + def _reg(self, z: jnp.ndarray) -> float: # noqa: D102 return jnp.linalg.norm(z, ord=1) - def _prox_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: + def _prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: return jnp.sign(z) * jax.nn.relu(jnp.abs(z) - tau * self.scaling_reg) @@ -525,17 +529,19 @@ class ElasticL2(RegTICost): to promote displacements in the span of ``matrix``. """ - def _reg(self, z: jax.Array) -> float: # noqa: D102 + def _reg(self, z: jnp.ndarray) -> float: # noqa: D102 return 0.5 * jnp.sum(z ** 2) - def _reg_stiefel_orth(self, z: jax.Array) -> float: + def _reg_stiefel_orth(self, z: jnp.ndarray) -> float: # Pythagorean identity return self._reg(z) - self._reg(self.matrix @ z) - def _prox_reg(self, z: jax.Array, tau: float = 1.0) -> jax.Array: + def _prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> jnp.ndarray: return z / (1.0 + tau * self.scaling_reg) - def _prox_reg_stiefel_orth(self, z: jax.Array, tau: float = 1.0) -> jax.Array: + def _prox_reg_stiefel_orth( + self, z: jnp.ndarray, tau: float = 1.0 + ) -> jnp.ndarray: out = z + tau * self.scaling_reg * self.matrix.T @ (self.matrix @ z) return self._prox_reg(out, tau) @@ -559,7 +565,7 @@ class ElasticSTVS(RegTICost): to promote displacements in the span of ``matrix``. """ # noqa: D205,E501 - def _reg(self, z: jax.Array) -> float: # noqa: D102 + def _reg(self, z: jnp.ndarray) -> float: # noqa: D102 u = jnp.arcsinh(jnp.abs(z) / (2 * self.scaling_reg)) out = u - 0.5 * jnp.exp(-2.0 * u) # Lemma 2.1 of `schreck:15`; @@ -567,8 +573,8 @@ def _reg(self, z: jax.Array) -> float: # noqa: D102 return self.scaling_reg * jnp.sum(out + 0.5) # make positive def _prox_reg( # noqa: D102 - self, z: jax.Array, tau: float = 1.0 - ) -> jax.Array: + self, z: jnp.ndarray, tau: float = 1.0 + ) -> jnp.ndarray: tmp = 1.0 - (self.scaling_reg * tau / (jnp.abs(z) + 1e-12)) ** 2 return jax.nn.relu(tmp) * z @@ -594,7 +600,7 @@ def __init__(self, k: int, *args, **kwargs: Any): super().__init__(*args, **kwargs) self.k = k - def _reg(self, z: jax.Array) -> float: # noqa: D102 + def _reg(self, z: jnp.ndarray) -> float: # noqa: D102 # Prop 2.1 in :cite:`argyriou:12` k = self.k top_w = jax.lax.top_k(jnp.abs(z), k)[0] # Fetch largest k values @@ -615,14 +621,15 @@ def _reg(self, z: jax.Array) -> float: # noqa: D102 return 0.5 * (s + (r + 1) * cesaro[r] ** 2) - def prox_reg(self, z: jax.Array, tau: float = 1.0) -> float: # noqa: D102 + def prox_reg(self, z: jnp.ndarray, tau: float = 1.0) -> float: # noqa: D102 @functools.partial(jax.vmap, in_axes=[0, None, None]) - def find_indices(r: int, l: jax.Array, - z: jax.Array) -> Tuple[jax.Array, jax.Array]: + def find_indices(r: int, l: jnp.ndarray, + z: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: @functools.partial(jax.vmap, in_axes=[None, 0, None]) - def inner(r: int, l: int, z: jax.Array) -> Tuple[jax.Array, jax.Array]: + def inner(r: int, l: int, + z: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: i = k - r - 1 res = jnp.sum(z * ((i <= ixs) & (ixs < l))) res /= l - k + (beta + 1) * r + beta + 1 @@ -685,14 +692,14 @@ def __init__(self, dimension: int, sqrtm_kw: Optional[Dict[str, Any]] = None): self._dimension = dimension self._sqrtm_kw = {} if sqrtm_kw is None else sqrtm_kw - def norm(self, x: jax.Array) -> jax.Array: + def norm(self, x: jnp.ndarray) -> jnp.ndarray: """Compute norm of Gaussian, sq. 2-norm of mean + trace of covariance.""" mean, cov = x_to_means_and_covs(x, self._dimension) norm = jnp.sum(mean ** 2, axis=-1) norm += jnp.trace(cov, axis1=-2, axis2=-1) return norm - def pairwise(self, x: jax.Array, y: jax.Array) -> float: + def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: """Compute - 2 x Bures dot-product.""" mean_x, cov_x = x_to_means_and_covs(x, self._dimension) mean_y, cov_y = x_to_means_and_covs(y, self._dimension) @@ -706,12 +713,12 @@ def pairwise(self, x: jax.Array, y: jax.Array) -> float: def covariance_fixpoint_iter( self, - covs: jax.Array, - weights: jax.Array, + covs: jnp.ndarray, + weights: jnp.ndarray, tolerance: float = 1e-4, sqrtm_kw: Optional[Dict[str, Any]] = None, **kwargs: Any - ) -> jax.Array: + ) -> jnp.ndarray: """Iterate fix-point updates to compute barycenter of Gaussians. Args: @@ -737,8 +744,8 @@ def covariance_fixpoint_iter( @functools.partial(jax.vmap, in_axes=[None, 0, 0]) def scale_covariances( - cov_sqrt: jax.Array, cov: jax.Array, weight: jax.Array - ) -> jax.Array: + cov_sqrt: jnp.ndarray, cov: jnp.ndarray, weight: jnp.ndarray + ) -> jnp.ndarray: """Rescale covariance in barycenter step.""" return weight * matrix_square_root.sqrtm_only((cov_sqrt @ cov) @ cov_sqrt, **sqrtm_kw) @@ -750,8 +757,8 @@ def cond_fn(iteration: int, constants: Tuple[Any, ...], state) -> bool: def body_fn( iteration: int, constants: Tuple[Any, ...], - state: Tuple[jax.Array, float], compute_error: bool - ) -> Tuple[jax.Array, float]: + state: Tuple[jnp.ndarray, float], compute_error: bool + ) -> Tuple[jnp.ndarray, float]: del constants, compute_error cov, diffs = state cov_sqrt, cov_inv_sqrt, _ = matrix_square_root.sqrtm(cov, **sqrtm_kw) @@ -763,7 +770,7 @@ def body_fn( diffs = diffs.at[iteration // inner_iterations].set(diff) return next_cov, diffs - def init_state() -> Tuple[jax.Array, float]: + def init_state() -> Tuple[jnp.ndarray, float]: cov_init = jnp.eye(self._dimension) diffs = -jnp.ones( (np.ceil(max_iterations / inner_iterations).astype(int),), @@ -784,12 +791,12 @@ def init_state() -> Tuple[jax.Array, float]: def barycenter( self, - weights: jax.Array, - xs: jax.Array, + weights: jnp.ndarray, + xs: jnp.ndarray, tolerance: float = 1e-4, sqrtm_kw: Optional[Dict[str, Any]] = None, **kwargs: Any - ) -> Tuple[jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Compute the Bures barycenter of weighted Gaussian distributions. Implements the fixed point approach proposed in :cite:`alvarez-esteban:16` @@ -835,7 +842,7 @@ def barycenter( return mean_and_cov_to_x(mu_bary, cov_bary, self._dimension), diffs @classmethod - def _padder(cls, dim: int) -> jax.Array: + def _padder(cls, dim: int) -> jnp.ndarray: dimension = int((-1 + math.sqrt(1 + 4 * dim)) / 2) padding = mean_and_cov_to_x( jnp.zeros((dimension,)), jnp.eye(dimension), dimension @@ -878,7 +885,7 @@ def __init__( self._gamma = gamma self._sqrtm_kw = kwargs - def norm(self, x: jax.Array) -> jax.Array: + def norm(self, x: jnp.ndarray) -> jnp.ndarray: """Compute norm of Gaussian for unbalanced Bures. Args: @@ -891,7 +898,7 @@ def norm(self, x: jax.Array) -> jax.Array: """ return self._gamma * x[..., 0] - def pairwise(self, x: jax.Array, y: jax.Array) -> float: + def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: """Compute dot-product for unbalanced Bures. Args: @@ -985,17 +992,18 @@ def __init__( self.ground_cost = SqEuclidean() if ground_cost is None else ground_cost self.debiased = debiased - def pairwise(self, x: jax.Array, y: jax.Array) -> float: # noqa: D102 + def pairwise(self, x: jnp.ndarray, y: jnp.ndarray) -> float: # noqa: D102 c_xy = self._soft_dtw(x, y) if self.debiased: return c_xy - 0.5 * (self._soft_dtw(x, x) + self._soft_dtw(y, y)) return c_xy - def _soft_dtw(self, t1: jax.Array, t2: jax.Array) -> float: + def _soft_dtw(self, t1: jnp.ndarray, t2: jnp.ndarray) -> float: def body( - carry: Tuple[jax.Array, jax.Array], current_antidiagonal: jax.Array - ) -> Tuple[Tuple[jax.Array, jax.Array], jax.Array]: + carry: Tuple[jnp.ndarray, jnp.ndarray], + current_antidiagonal: jnp.ndarray + ) -> Tuple[Tuple[jnp.ndarray, jnp.ndarray], jnp.ndarray]: # modified from: https://github.com/khdlr/softdtw_jax two_ago, one_ago = carry @@ -1042,8 +1050,8 @@ def tree_unflatten(cls, aux_data, children): # noqa: D102 return cls(*children, **aux_data) -def x_to_means_and_covs(x: jax.Array, - dimension: int) -> Tuple[jax.Array, jax.Array]: +def x_to_means_and_covs(x: jnp.ndarray, + dimension: int) -> Tuple[jnp.ndarray, jnp.ndarray]: """Extract means and covariance matrices of Gaussians from raveled vector. Args: @@ -1063,8 +1071,8 @@ def x_to_means_and_covs(x: jax.Array, def mean_and_cov_to_x( - mean: jax.Array, covariance: jax.Array, dimension: int -) -> jax.Array: + mean: jnp.ndarray, covariance: jnp.ndarray, dimension: int +) -> jnp.ndarray: """Ravel a Gaussian's mean and covariance matrix to d(1 + d) vector.""" return jnp.concatenate( (mean, jnp.reshape(covariance, (dimension * dimension))) diff --git a/src/ott/geometry/geometry.py b/src/ott/geometry/geometry.py index 5d3db3ee6..6894176a6 100644 --- a/src/ott/geometry/geometry.py +++ b/src/ott/geometry/geometry.py @@ -79,14 +79,14 @@ class Geometry: def __init__( self, - cost_matrix: Optional[jax.Array] = None, - kernel_matrix: Optional[jax.Array] = None, + cost_matrix: Optional[jnp.ndarray] = None, + kernel_matrix: Optional[jnp.ndarray] = None, epsilon: Optional[Union[float, epsilon_scheduler.Epsilon]] = None, relative_epsilon: Optional[bool] = None, scale_cost: Union[bool, int, float, Literal["mean", "max_cost", "median"]] = 1.0, - src_mask: Optional[jax.Array] = None, - tgt_mask: Optional[jax.Array] = None, + src_mask: Optional[jnp.ndarray] = None, + tgt_mask: Optional[jnp.ndarray] = None, ): self._cost_matrix = cost_matrix self._kernel_matrix = kernel_matrix @@ -107,7 +107,7 @@ def cost_rank(self) -> Optional[int]: """Output rank of cost matrix, if any was provided.""" @property - def cost_matrix(self) -> jax.Array: + def cost_matrix(self) -> jnp.ndarray: """Cost matrix, recomputed from kernel if only kernel was specified.""" if self._cost_matrix is None: # If no epsilon was passed on to the geometry, then assume it is one by @@ -131,7 +131,7 @@ def mean_cost_matrix(self) -> float: return jnp.sum(tmp * self._m_normed_ones) @property - def kernel_matrix(self) -> jax.Array: + def kernel_matrix(self) -> jnp.ndarray: """Kernel matrix. Either provided by user or recomputed from :attr:`cost_matrix`. @@ -201,7 +201,7 @@ def is_symmetric(self) -> bool: @property def inv_scale_cost(self) -> float: """Compute and return inverse of scaling factor for cost matrix.""" - if isinstance(self._scale_cost, (int, float, np.number, jax.Array)): + if isinstance(self._scale_cost, (int, float, np.number, jnp.ndarray)): return 1.0 / self._scale_cost self = self._masked_geom(mask_value=jnp.nan) if self._scale_cost == "max_cost": @@ -245,12 +245,12 @@ def copy_epsilon(self, other: "Geometry") -> "Geometry": def apply_lse_kernel( self, - f: jax.Array, - g: jax.Array, + f: jnp.ndarray, + g: jnp.ndarray, eps: float, - vec: jax.Array = None, + vec: jnp.ndarray = None, axis: int = 0 - ) -> jax.Array: + ) -> jnp.ndarray: r"""Apply :attr:`kernel_matrix` in log domain. This function applies the ground geometry's kernel in log domain, using @@ -267,10 +267,10 @@ def apply_lse_kernel( f and g in iterations 1 & 2 respectively. Args: - f: jax.Array [num_a,] , potential of size num_rows of cost_matrix - g: jax.Array [num_b,] , potential of size num_cols of cost_matrix + f: jnp.ndarray [num_a,] , potential of size num_rows of cost_matrix + g: jnp.ndarray [num_b,] , potential of size num_cols of cost_matrix eps: float, regularization strength - vec: jax.Array [num_a or num_b,] , when not None, this has the effect of + vec: jnp.ndarray [num_a or num_b,] , when not None, this has the effect of doing log-Kernel computations with an addition elementwise multiplication of exp(g / eps) by a vector. This is carried out by adding weights to the log-sum-exp function, and needs to handle signs @@ -278,7 +278,7 @@ def apply_lse_kernel( axis: summing over axis 0 when doing (2), or over axis 1 when doing (1) Returns: - A jax.Array corresponding to output above, depending on axis. + A jnp.ndarray corresponding to output above, depending on axis. """ w_res, w_sgn = self._softmax(f, g, eps, vec, axis) remove = f if axis == 1 else g @@ -286,20 +286,20 @@ def apply_lse_kernel( def apply_kernel( self, - scaling: jax.Array, + scaling: jnp.ndarray, eps: Optional[float] = None, axis: int = 0, - ) -> jax.Array: + ) -> jnp.ndarray: """Apply :attr:`kernel_matrix` on positive scaling vector. Args: - scaling: jax.Array [num_a or num_b] , scaling of size num_rows or + scaling: jnp.ndarray [num_a or num_b] , scaling of size num_rows or num_cols of kernel_matrix eps: passed for consistency, not used yet. axis: standard kernel product if axis is 1, transpose if 0. Returns: - a jax.Array corresponding to output above, depending on axis. + a jnp.ndarray corresponding to output above, depending on axis. """ if eps is None: kernel = self.kernel_matrix @@ -311,10 +311,10 @@ def apply_kernel( def marginal_from_potentials( self, - f: jax.Array, - g: jax.Array, + f: jnp.ndarray, + g: jnp.ndarray, axis: int = 0, - ) -> jax.Array: + ) -> jnp.ndarray: """Output marginal of transportation matrix from potentials. This applies first lse kernel in the standard way, removes the @@ -323,8 +323,8 @@ def marginal_from_potentials( by potentials. Args: - f: jax.Array [num_a,] , potential of size num_rows of cost_matrix - g: jax.Array [num_b,] , potential of size num_cols of cost_matrix + f: jnp.ndarray [num_a,] , potential of size num_rows of cost_matrix + g: jnp.ndarray [num_b,] , potential of size num_cols of cost_matrix axis: axis along which to integrate, returns marginal on other axis. Returns: @@ -336,19 +336,23 @@ def marginal_from_potentials( def marginal_from_scalings( self, - u: jax.Array, - v: jax.Array, + u: jnp.ndarray, + v: jnp.ndarray, axis: int = 0, - ) -> jax.Array: + ) -> jnp.ndarray: """Output marginal of transportation matrix from scalings.""" u, v = (v, u) if axis == 0 else (u, v) return u * self.apply_kernel(v, eps=self.epsilon, axis=axis) - def transport_from_potentials(self, f: jax.Array, g: jax.Array) -> jax.Array: + def transport_from_potentials( + self, f: jnp.ndarray, g: jnp.ndarray + ) -> jnp.ndarray: """Output transport matrix from potentials.""" return jnp.exp(self._center(f, g) / self.epsilon) - def transport_from_scalings(self, u: jax.Array, v: jax.Array) -> jax.Array: + def transport_from_scalings( + self, u: jnp.ndarray, v: jnp.ndarray + ) -> jnp.ndarray: """Output transport matrix from pair of scalings.""" return self.kernel_matrix * u[:, jnp.newaxis] * v[jnp.newaxis, :] @@ -357,17 +361,17 @@ def transport_from_scalings(self, u: jax.Array, v: jax.Array) -> jax.Array: def update_potential( self, - f: jax.Array, - g: jax.Array, - log_marginal: jax.Array, + f: jnp.ndarray, + g: jnp.ndarray, + log_marginal: jnp.ndarray, iteration: Optional[int] = None, axis: int = 0, - ) -> jax.Array: + ) -> jnp.ndarray: """Carry out one Sinkhorn update for potentials, i.e. in log space. Args: - f: jax.Array [num_a,] , potential of size num_rows of cost_matrix - g: jax.Array [num_b,] , potential of size num_cols of cost_matrix + f: jnp.ndarray [num_a,] , potential of size num_rows of cost_matrix + g: jnp.ndarray [num_b,] , potential of size num_cols of cost_matrix log_marginal: targeted marginal iteration: used to compute epsilon from schedule, if provided. axis: axis along which the update should be carried out. @@ -381,15 +385,15 @@ def update_potential( def update_scaling( self, - scaling: jax.Array, - marginal: jax.Array, + scaling: jnp.ndarray, + marginal: jnp.ndarray, iteration: Optional[int] = None, axis: int = 0, - ) -> jax.Array: + ) -> jnp.ndarray: """Carry out one Sinkhorn update for scalings, using kernel directly. Args: - scaling: jax.Array of num_a or num_b positive values. + scaling: jnp.ndarray of num_a or num_b positive values. marginal: targeted marginal iteration: used to compute epsilon from schedule, if provided. axis: axis along which the update should be carried out. @@ -402,13 +406,13 @@ def update_scaling( return marginal / jnp.where(app_kernel > 0, app_kernel, 1.0) # Helper functions - def _center(self, f: jax.Array, g: jax.Array) -> jax.Array: + def _center(self, f: jnp.ndarray, g: jnp.ndarray) -> jnp.ndarray: return f[:, jnp.newaxis] + g[jnp.newaxis, :] - self.cost_matrix def _softmax( - self, f: jax.Array, g: jax.Array, eps: float, vec: Optional[jax.Array], - axis: int - ) -> Tuple[jax.Array, jax.Array]: + self, f: jnp.ndarray, g: jnp.ndarray, eps: float, + vec: Optional[jnp.ndarray], axis: int + ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Apply softmax row or column wise, weighted by vec.""" if vec is not None: if axis == 0: @@ -425,8 +429,8 @@ def _softmax( @functools.partial(jax.vmap, in_axes=[None, None, None, 0, None]) def _apply_transport_from_potentials( - self, f: jax.Array, g: jax.Array, vec: jax.Array, axis: int - ) -> jax.Array: + self, f: jnp.ndarray, g: jnp.ndarray, vec: jnp.ndarray, axis: int + ) -> jnp.ndarray: """Apply lse_kernel to arbitrary vector while keeping track of signs.""" lse_res, lse_sgn = self.apply_lse_kernel( f, g, self.epsilon, vec=vec, axis=axis @@ -437,11 +441,11 @@ def _apply_transport_from_potentials( # wrapper to allow default option for axis. def apply_transport_from_potentials( self, - f: jax.Array, - g: jax.Array, - vec: jax.Array, + f: jnp.ndarray, + g: jnp.ndarray, + vec: jnp.ndarray, axis: int = 0 - ) -> jax.Array: + ) -> jnp.ndarray: """Apply transport matrix computed from potentials to a (batched) vec. This approach does not instantiate the transport matrix itself, but uses @@ -452,9 +456,9 @@ def apply_transport_from_potentials( (b=..., return_sign=True) optional parameters of logsumexp. Args: - f: jax.Array [num_a,] , potential of size num_rows of cost_matrix - g: jax.Array [num_b,] , potential of size num_cols of cost_matrix - vec: jax.Array [batch, num_a or num_b], vector that will be multiplied + f: jnp.ndarray [num_a,] , potential of size num_rows of cost_matrix + g: jnp.ndarray [num_b,] , potential of size num_cols of cost_matrix + vec: jnp.ndarray [batch, num_a or num_b], vector that will be multiplied by transport matrix corresponding to potentials f, g, and geom. axis: axis to differentiate left (0) or right (1) multiply. @@ -469,7 +473,7 @@ def apply_transport_from_potentials( @functools.partial(jax.vmap, in_axes=[None, None, None, 0, None]) def _apply_transport_from_scalings( - self, u: jax.Array, v: jax.Array, vec: jax.Array, axis: int + self, u: jnp.ndarray, v: jnp.ndarray, vec: jnp.ndarray, axis: int ): u, v = (u, v * vec) if axis == 1 else (v, u * vec) return u * self.apply_kernel(v, eps=self.epsilon, axis=axis) @@ -477,20 +481,20 @@ def _apply_transport_from_scalings( # wrapper to allow default option for axis def apply_transport_from_scalings( self, - u: jax.Array, - v: jax.Array, - vec: jax.Array, + u: jnp.ndarray, + v: jnp.ndarray, + vec: jnp.ndarray, axis: int = 0 - ) -> jax.Array: + ) -> jnp.ndarray: """Apply transport matrix computed from scalings to a (batched) vec. This approach does not instantiate the transport matrix itself, but relies instead on the apply_kernel function. Args: - u: jax.Array [num_a,] , scaling of size num_rows of cost_matrix - v: jax.Array [num_b,] , scaling of size num_cols of cost_matrix - vec: jax.Array [batch, num_a or num_b], vector that will be multiplied + u: jnp.ndarray [num_a,] , scaling of size num_rows of cost_matrix + v: jnp.ndarray [num_b,] , scaling of size num_cols of cost_matrix + vec: jnp.ndarray [batch, num_a or num_b], vector that will be multiplied by transport matrix corresponding to scalings u, v, and geom. axis: axis to differentiate left (0) or right (1) multiply. @@ -503,7 +507,7 @@ def apply_transport_from_scalings( )[0, :] return self._apply_transport_from_scalings(u, v, vec, axis) - def potential_from_scaling(self, scaling: jax.Array) -> jax.Array: + def potential_from_scaling(self, scaling: jnp.ndarray) -> jnp.ndarray: """Compute dual potential vector from scaling vector. Args: @@ -514,7 +518,7 @@ def potential_from_scaling(self, scaling: jax.Array) -> jax.Array: """ return self.epsilon * jnp.log(scaling) - def scaling_from_potential(self, potential: jax.Array) -> jax.Array: + def scaling_from_potential(self, potential: jnp.ndarray) -> jnp.ndarray: """Compute scaling vector from dual potential. Args: @@ -528,7 +532,7 @@ def scaling_from_potential(self, potential: jax.Array) -> jax.Array: finite, jnp.exp(jnp.where(finite, potential / self.epsilon, 0.0)), 0.0 ) - def apply_square_cost(self, arr: jax.Array, axis: int = 0) -> jax.Array: + def apply_square_cost(self, arr: jnp.ndarray, axis: int = 0) -> jnp.ndarray: """Apply elementwise-square of cost matrix to array (vector or matrix). This function applies the ground geometry's cost matrix, to perform either @@ -549,11 +553,11 @@ def apply_square_cost(self, arr: jax.Array, axis: int = 0) -> jax.Array: def apply_cost( self, - arr: jax.Array, + arr: jnp.ndarray, axis: int = 0, - fn: Optional[Callable[[jax.Array], jax.Array]] = None, + fn: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, **kwargs: Any - ) -> jax.Array: + ) -> jnp.ndarray: """Apply :attr:`cost_matrix` to array (vector or matrix). This function applies the ground geometry's cost matrix, to perform either @@ -562,7 +566,7 @@ def apply_cost( where C is [num_a, num_b] Args: - arr: jax.Array [num_a or num_b, p], vector that will be multiplied by + arr: jnp.ndarray [num_a or num_b, p], vector that will be multiplied by the cost matrix. axis: standard cost matrix if axis=1, transpose if 0 fn: function to apply to cost matrix element-wise before the dot product @@ -579,21 +583,21 @@ def apply_cost( def _apply_cost_to_vec( self, - vec: jax.Array, + vec: jnp.ndarray, axis: int = 0, fn=None, **_: Any, - ) -> jax.Array: + ) -> jnp.ndarray: """Apply ``[num_a, num_b]`` fn(cost) (or transpose) to vector. Args: - vec: jax.Array [num_a,] ([num_b,] if axis=1) vector + vec: jnp.ndarray [num_a,] ([num_b,] if axis=1) vector axis: axis on which the reduction is done. fn: function optionally applied to cost matrix element-wise, before the doc product Returns: - A jax.Array corresponding to cost x vector + A jnp.ndarray corresponding to cost x vector """ matrix = self.cost_matrix.T if axis == 0 else self.cost_matrix matrix = fn(matrix) if fn is not None else matrix @@ -621,7 +625,7 @@ def to_LRCGeometry( self, rank: int = 0, tol: float = 1e-2, - rng: Optional[jax.Array] = None, + rng: Optional[jnp.ndarray] = None, scale: float = 1. ) -> "low_rank.LRCGeometry": r"""Factorize the cost matrix using either SVD (full) or :cite:`indyk:19`. @@ -714,7 +718,7 @@ def to_LRCGeometry( ) def subset( - self, src_ixs: Optional[jax.Array], tgt_ixs: Optional[jax.Array], + self, src_ixs: Optional[jnp.ndarray], tgt_ixs: Optional[jnp.ndarray], **kwargs: Any ) -> "Geometry": """Subset rows or columns of a geometry. @@ -729,10 +733,10 @@ def subset( """ def subset_fn( - arr: Optional[jax.Array], - src_ixs: Optional[jax.Array], - tgt_ixs: Optional[jax.Array], - ) -> Optional[jax.Array]: + arr: Optional[jnp.ndarray], + src_ixs: Optional[jnp.ndarray], + tgt_ixs: Optional[jnp.ndarray], + ) -> Optional[jnp.ndarray]: if arr is None: return None if src_ixs is not None: @@ -751,8 +755,8 @@ def subset_fn( def mask( self, - src_mask: Optional[jax.Array], - tgt_mask: Optional[jax.Array], + src_mask: Optional[jnp.ndarray], + tgt_mask: Optional[jnp.ndarray], mask_value: float = 0., ) -> "Geometry": """Mask rows or columns of a geometry. @@ -776,10 +780,10 @@ def mask( """ def mask_fn( - arr: Optional[jax.Array], - src_mask: Optional[jax.Array], - tgt_mask: Optional[jax.Array], - ) -> Optional[jax.Array]: + arr: Optional[jnp.ndarray], + src_mask: Optional[jnp.ndarray], + tgt_mask: Optional[jnp.ndarray], + ) -> Optional[jnp.ndarray]: if arr is None: return arr assert arr.ndim == 2, arr.ndim @@ -797,12 +801,12 @@ def mask_fn( def _mask_subset_helper( self, - src_ixs: Optional[jax.Array], - tgt_ixs: Optional[jax.Array], + src_ixs: Optional[jnp.ndarray], + tgt_ixs: Optional[jnp.ndarray], *, fn: Callable[ - [Optional[jax.Array], Optional[jax.Array], Optional[jax.Array]], - Optional[jax.Array]], + [Optional[jnp.ndarray], Optional[jnp.ndarray], Optional[jnp.ndarray]], + Optional[jnp.ndarray]], propagate_mask: bool, **kwargs: Any, ) -> "Geometry": @@ -821,7 +825,7 @@ def _mask_subset_helper( ) @property - def src_mask(self) -> Optional[jax.Array]: + def src_mask(self) -> Optional[jnp.ndarray]: """Mask of shape ``[num_a,]`` to compute :attr:`cost_matrix` statistics. Specifically, it is used when computing: @@ -833,7 +837,7 @@ def src_mask(self) -> Optional[jax.Array]: return self._normalize_mask(self._src_mask, self.shape[0]) @property - def tgt_mask(self) -> Optional[jax.Array]: + def tgt_mask(self) -> Optional[jnp.ndarray]: """Mask of shape ``[num_b,]`` to compute :attr:`cost_matrix` statistics. Specifically, it is used when computing: @@ -859,22 +863,22 @@ def _masked_geom(self, mask_value: float = 0.) -> "Geometry": return self.mask(src_mask, tgt_mask, mask_value=mask_value) @property - def _n_normed_ones(self) -> jax.Array: + def _n_normed_ones(self) -> jnp.ndarray: """Normalized array of shape ``[num_a,]``.""" mask = self.src_mask arr = jnp.ones(self.shape[0]) if mask is None else mask return arr / jnp.sum(arr) @property - def _m_normed_ones(self) -> jax.Array: + def _m_normed_ones(self) -> jnp.ndarray: """Normalized array of shape ``[num_b,]``.""" mask = self.tgt_mask arr = jnp.ones(self.shape[1]) if mask is None else mask return arr / jnp.sum(arr) @staticmethod - def _normalize_mask(mask: Optional[Union[int, jax.Array]], - size: int) -> Optional[jax.Array]: + def _normalize_mask(mask: Optional[Union[int, jnp.ndarray]], + size: int) -> Optional[jnp.ndarray]: """Convert array of indices to a boolean mask.""" if mask is None: return None diff --git a/src/ott/geometry/graph.py b/src/ott/geometry/graph.py index ab0fe8768..c7dac0c99 100644 --- a/src/ott/geometry/graph.py +++ b/src/ott/geometry/graph.py @@ -48,7 +48,7 @@ class Graph(geometry.Geometry): def __init__( self, - laplacian: jax.Array, + laplacian: jnp.ndarray, t: float = 1e-3, n_steps: int = 100, numerical_scheme: Literal["backward_euler", @@ -66,7 +66,7 @@ def __init__( @classmethod def from_graph( cls, - G: jax.Array, + G: jnp.ndarray, t: Optional[float] = 1e-3, directed: bool = False, normalize: bool = False, @@ -113,10 +113,10 @@ def from_graph( def apply_kernel( self, - scaling: jax.Array, + scaling: jnp.ndarray, eps: Optional[float] = None, axis: int = 0, - ) -> jax.Array: + ) -> jnp.ndarray: r"""Apply :attr:`kernel_matrix` on positive scaling vector. Args: @@ -129,8 +129,8 @@ def apply_kernel( """ def conf_fn( - iteration: int, consts: Tuple[jax.Array, Optional[jax.Array]], - old_new: Tuple[jax.Array, jax.Array] + iteration: int, consts: Tuple[jnp.ndarray, Optional[jnp.ndarray]], + old_new: Tuple[jnp.ndarray, jnp.ndarray] ) -> bool: del iteration, consts @@ -143,9 +143,9 @@ def conf_fn( return (jnp.nanmax(f) - jnp.nanmin(f)) > self.tol def body_fn( - iteration: int, consts: Tuple[jax.Array, Optional[jax.Array]], - old_new: Tuple[jax.Array, jax.Array], compute_errors: bool - ) -> Tuple[jax.Array, jax.Array]: + iteration: int, consts: Tuple[jnp.ndarray, Optional[jnp.ndarray]], + old_new: Tuple[jnp.ndarray, jnp.ndarray], compute_errors: bool + ) -> Tuple[jnp.ndarray, jnp.ndarray]: del iteration, compute_errors L, scaled_lap = consts @@ -186,7 +186,7 @@ def body_fn( )[1] @property - def kernel_matrix(self) -> jax.Array: # noqa: D102 + def kernel_matrix(self) -> jnp.ndarray: # noqa: D102 n, _ = self.shape kernel = self.apply_kernel(jnp.eye(n)) # force symmetry because of numerical imprecision @@ -194,7 +194,7 @@ def kernel_matrix(self) -> jax.Array: # noqa: D102 return (kernel + kernel.T) * 0.5 @property - def cost_matrix(self) -> jax.Array: # noqa: D102 + def cost_matrix(self) -> jnp.ndarray: # noqa: D102 return -self.t * mu.safe_log(self.kernel_matrix) @property @@ -209,12 +209,12 @@ def _scale(self) -> float: ) @property - def _scaled_laplacian(self) -> jax.Array: + def _scaled_laplacian(self) -> jnp.ndarray: """Laplacian scaled by a constant, depending on the numerical scheme.""" return self._scale * self.laplacian @property - def _M(self) -> jax.Array: + def _M(self) -> jnp.ndarray: n, _ = self.shape return self._scaled_laplacian + jnp.eye(n) @@ -230,27 +230,29 @@ def is_symmetric(self) -> bool: # noqa: D102 def dtype(self) -> jnp.dtype: # noqa: D102 return self.laplacian.dtype - def transport_from_potentials(self, f: jax.Array, g: jax.Array) -> jax.Array: + def transport_from_potentials( + self, f: jnp.ndarray, g: jnp.ndarray + ) -> jnp.ndarray: """Not implemented.""" raise ValueError("Not implemented.") def apply_transport_from_potentials( self, - f: jax.Array, - g: jax.Array, - vec: jax.Array, + f: jnp.ndarray, + g: jnp.ndarray, + vec: jnp.ndarray, axis: int = 0 - ) -> jax.Array: + ) -> jnp.ndarray: """Since applying from potentials is not feasible in grids, use scalings.""" u, v = self.scaling_from_potential(f), self.scaling_from_potential(g) return self.apply_transport_from_scalings(u, v, vec, axis=axis) def marginal_from_potentials( self, - f: jax.Array, - g: jax.Array, + f: jnp.ndarray, + g: jnp.ndarray, axis: int = 0, - ) -> jax.Array: + ) -> jnp.ndarray: """Not implemented.""" raise ValueError("Not implemented.") diff --git a/src/ott/geometry/grid.py b/src/ott/geometry/grid.py index 3401f52c7..fd64500c9 100644 --- a/src/ott/geometry/grid.py +++ b/src/ott/geometry/grid.py @@ -71,7 +71,7 @@ class Grid(geometry.Geometry): def __init__( self, - x: Optional[Sequence[jax.Array]] = None, + x: Optional[Sequence[jnp.ndarray]] = None, grid_size: Optional[Sequence[int]] = None, cost_fns: Optional[Sequence[costs.CostFn]] = None, num_a: Optional[int] = None, @@ -146,12 +146,12 @@ def is_symmetric(self) -> bool: # noqa: D102 # Reimplemented functions to be used in regularized OT def apply_lse_kernel( self, - f: jax.Array, - g: jax.Array, + f: jnp.ndarray, + g: jnp.ndarray, eps: float, - vec: Optional[jax.Array] = None, + vec: Optional[jnp.ndarray] = None, axis: int = 0 - ) -> jax.Array: + ) -> jnp.ndarray: """Apply grid kernel in log space. See notes in parent class for use case. Reshapes vector inputs below as grids, applies kernels onto each slice, and @@ -160,10 +160,10 @@ def apply_lse_kernel( More implementation details in :cite:`schmitz:18`. Args: - f: jax.Array, a vector of potentials - g: jax.Array, a vector of potentials + f: jnp.ndarray, a vector of potentials + g: jnp.ndarray, a vector of potentials eps: float, regularization strength - vec: jax.Array, if needed, a vector onto which apply the kernel weighted + vec: jnp.ndarray, if needed, a vector onto which apply the kernel weighted by f and g. axis: axis (0 or 1) along which summation should be carried out. @@ -209,8 +209,8 @@ def _apply_lse_kernel_one_dimension(self, dimension, f, g, eps, vec=None): return jnp.transpose(softmax_res, indices), None def _apply_cost_to_vec( - self, vec: jax.Array, axis: int = 0, fn=None - ) -> jax.Array: + self, vec: jnp.ndarray, axis: int = 0, fn=None + ) -> jnp.ndarray: r"""Apply grid's cost matrix (without instantiating it) to a vector. The `apply_cost` operation on grids rests on the following identity. @@ -229,13 +229,13 @@ def _apply_cost_to_vec( summation while keeping dimensions. Args: - vec: jax.Array, flat vector of total size prod(grid_size). + vec: jnp.ndarray, flat vector of total size prod(grid_size). axis: axis 0 if applying transpose costs, 1 if using the original cost. fn: function optionally applied to cost matrix element-wise, before the dot product. Returns: - A jax.Array corresponding to cost x matrix + A jnp.ndarray corresponding to cost x matrix """ vec = jnp.reshape(vec, self.grid_size) accum_vec = jnp.zeros_like(vec) @@ -255,10 +255,10 @@ def _apply_cost_to_vec( def apply_kernel( self, - scaling: jax.Array, + scaling: jnp.ndarray, eps: Optional[float] = None, axis: Optional[int] = None - ) -> jax.Array: + ) -> jnp.ndarray: """Apply grid kernel on scaling vector. See notes in parent class for use. @@ -269,7 +269,7 @@ def apply_kernel( More implementation details in :cite:`schmitz:18`, Args: - scaling: jax.Array, a vector of scaling (>0) values. + scaling: jnp.ndarray, a vector of scaling (>0) values. eps: float, regularization strength axis: axis (0 or 1) along which summation should be carried out. @@ -289,7 +289,7 @@ def apply_kernel( return scaling.ravel() def transport_from_potentials( - self, f: jax.Array, g: jax.Array, axis: int = 0 + self, f: jnp.ndarray, g: jnp.ndarray, axis: int = 0 ) -> NoReturn: """Not implemented, use :meth:`apply_transport_from_potentials` instead.""" raise ValueError( @@ -300,7 +300,7 @@ def transport_from_potentials( ) def transport_from_scalings( - self, f: jax.Array, g: jax.Array, axis: int = 0 + self, f: jnp.ndarray, g: jnp.ndarray, axis: int = 0 ) -> NoReturn: """Not implemented, use :meth:`apply_transport_from_scalings` instead.""" raise ValueError( @@ -311,15 +311,15 @@ def transport_from_scalings( ) def subset( - self, src_ixs: Optional[jax.Array], tgt_ixs: Optional[jax.Array] + self, src_ixs: Optional[jnp.ndarray], tgt_ixs: Optional[jnp.ndarray] ) -> NoReturn: """Not implemented.""" raise NotImplementedError("Subsetting is not implemented for grids.") def mask( self, - src_mask: Optional[jax.Array], - tgt_mask: Optional[jax.Array], + src_mask: Optional[jnp.ndarray], + tgt_mask: Optional[jnp.ndarray], mask_value: float = 0., ) -> NoReturn: """Not implemented.""" diff --git a/src/ott/geometry/low_rank.py b/src/ott/geometry/low_rank.py index 750d8db62..1bfaeae0a 100644 --- a/src/ott/geometry/low_rank.py +++ b/src/ott/geometry/low_rank.py @@ -33,8 +33,8 @@ class LRCGeometry(geometry.Geometry): if :math:`C = AB^T` and :math:`D = EF^T` then :math:`C + D = [A,E][B,F]^T` Args: - cost_1: jax.Array[num_a, r] - cost_2: jax.Array[num_b, r] + cost_1: jnp.ndarray[num_a, r] + cost_2: jnp.ndarray[num_b, r] bias: constant added to entire cost matrix. scale: Value used to rescale the factors of the low-rank geometry. scale_cost: option to rescale the cost matrix. Implemented scalings are @@ -51,8 +51,8 @@ class LRCGeometry(geometry.Geometry): def __init__( self, - cost_1: jax.Array, - cost_2: jax.Array, + cost_1: jnp.ndarray, + cost_2: jnp.ndarray, bias: float = 0.0, scale_factor: float = 1.0, scale_cost: Union[bool, int, float, Literal["mean", "max_bound", @@ -69,13 +69,13 @@ def __init__( self.batch_size = batch_size @property - def cost_1(self) -> jax.Array: + def cost_1(self) -> jnp.ndarray: """First factor of the :attr:`cost_matrix`.""" scale_factor = jnp.sqrt(self._scale_factor * self.inv_scale_cost) return scale_factor * self._cost_1 @property - def cost_2(self) -> jax.Array: + def cost_2(self) -> jnp.ndarray: """Second factor of the :attr:`cost_matrix`.""" scale_factor = jnp.sqrt(self._scale_factor * self.inv_scale_cost) return scale_factor * self._cost_2 @@ -90,7 +90,7 @@ def cost_rank(self) -> int: # noqa: D102 return self._cost_1.shape[1] @property - def cost_matrix(self) -> jax.Array: + def cost_matrix(self) -> jnp.ndarray: """Materialize the cost matrix.""" return jnp.matmul(self.cost_1, self.cost_2.T) + self.bias @@ -107,7 +107,7 @@ def is_symmetric(self) -> bool: # noqa: D102 @property def inv_scale_cost(self) -> float: # noqa: D102 - if isinstance(self._scale_cost, (int, float, jax.Array)): + if isinstance(self._scale_cost, (int, float, jnp.ndarray)): return 1.0 / self._scale_cost self = self._masked_geom() if self._scale_cost == "max_bound": @@ -124,7 +124,7 @@ def inv_scale_cost(self) -> float: # noqa: D102 return 1.0 / self.compute_max_cost() raise ValueError(f"Scaling {self._scale_cost} not implemented.") - def apply_square_cost(self, arr: jax.Array, axis: int = 0) -> jax.Array: + def apply_square_cost(self, arr: jnp.ndarray, axis: int = 0) -> jnp.ndarray: """Apply elementwise-square of cost matrix to array (vector or matrix).""" (n, m), r = self.shape, self.cost_rank # When applying square of a LRCGeometry, one can either elementwise square @@ -142,15 +142,15 @@ def apply_square_cost(self, arr: jax.Array, axis: int = 0) -> jax.Array: def _apply_cost_to_vec( self, - vec: jax.Array, + vec: jnp.ndarray, axis: int = 0, - fn: Optional[Callable[[jax.Array], jax.Array]] = None, + fn: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, is_linear: bool = False, - ) -> jax.Array: + ) -> jnp.ndarray: """Apply [num_a, num_b] fn(cost) (or transpose) to vector. Args: - vec: jax.Array [num_a,] ([num_b,] if axis=1) vector + vec: jnp.ndarray [num_a,] ([num_b,] if axis=1) vector axis: axis on which the reduction is done. fn: function optionally applied to cost matrix element-wise, before the doc product @@ -159,12 +159,12 @@ def _apply_cost_to_vec( for a heuristic to help determine if a function is linear. Returns: - A jax.Array corresponding to cost x vector + A jnp.ndarray corresponding to cost x vector """ def linear_apply( - vec: jax.Array, axis: int, fn: Callable[[jax.Array], jax.Array] - ) -> jax.Array: + vec: jnp.ndarray, axis: int, fn: Callable[[jnp.ndarray], jnp.ndarray] + ) -> jnp.ndarray: c1 = self.cost_1 if axis == 1 else self.cost_2 c2 = self.cost_2 if axis == 1 else self.cost_1 c2 = fn(c2) if fn is not None else c2 @@ -229,7 +229,7 @@ def to_LRCGeometry( self, rank: int = 0, tol: float = 1e-2, - rng: Optional[jax.Array] = None, + rng: Optional[jnp.ndarray] = None, scale: float = 1.0, ) -> "LRCGeometry": """Return self.""" @@ -241,14 +241,14 @@ def can_LRC(self): # noqa: D102 return True def subset( # noqa: D102 - self, src_ixs: Optional[jax.Array], tgt_ixs: Optional[jax.Array], + self, src_ixs: Optional[jnp.ndarray], tgt_ixs: Optional[jnp.ndarray], **kwargs: Any ) -> "LRCGeometry": def subset_fn( - arr: Optional[jax.Array], - ixs: Optional[jax.Array], - ) -> jax.Array: + arr: Optional[jnp.ndarray], + ixs: Optional[jnp.ndarray], + ) -> jnp.ndarray: return arr if arr is None or ixs is None else arr[jnp.atleast_1d(ixs)] return self._mask_subset_helper( @@ -257,15 +257,15 @@ def subset_fn( def mask( # noqa: D102 self, - src_mask: Optional[jax.Array], - tgt_mask: Optional[jax.Array], + src_mask: Optional[jnp.ndarray], + tgt_mask: Optional[jnp.ndarray], mask_value: float = 0., ) -> "LRCGeometry": def mask_fn( - arr: Optional[jax.Array], - mask: Optional[jax.Array], - ) -> Optional[jax.Array]: + arr: Optional[jnp.ndarray], + mask: Optional[jnp.ndarray], + ) -> Optional[jnp.ndarray]: if arr is None or mask is None: return arr return jnp.where(mask[:, None], arr, mask_value) @@ -278,11 +278,11 @@ def mask_fn( def _mask_subset_helper( self, - src_ixs: Optional[jax.Array], - tgt_ixs: Optional[jax.Array], + src_ixs: Optional[jnp.ndarray], + tgt_ixs: Optional[jnp.ndarray], *, - fn: Callable[[Optional[jax.Array], Optional[jax.Array]], - Optional[jax.Array]], + fn: Callable[[Optional[jnp.ndarray], Optional[jnp.ndarray]], + Optional[jnp.ndarray]], propagate_mask: bool, **kwargs: Any, ) -> "LRCGeometry": diff --git a/src/ott/geometry/pointcloud.py b/src/ott/geometry/pointcloud.py index c5d48a096..2050e1562 100644 --- a/src/ott/geometry/pointcloud.py +++ b/src/ott/geometry/pointcloud.py @@ -56,8 +56,8 @@ class PointCloud(geometry.Geometry): def __init__( self, - x: jax.Array, - y: Optional[jax.Array] = None, + x: jnp.ndarray, + y: Optional[jnp.ndarray] = None, cost_fn: Optional[costs.CostFn] = None, batch_size: Optional[int] = None, scale_cost: Union[bool, int, float, @@ -77,13 +77,13 @@ def __init__( self._scale_cost = "mean" if scale_cost is True else scale_cost @property - def _norm_x(self) -> Union[float, jax.Array]: + def _norm_x(self) -> Union[float, jnp.ndarray]: if self._axis_norm == 0: return self.cost_fn.norm(self.x) return 0. @property - def _norm_y(self) -> Union[float, jax.Array]: + def _norm_y(self) -> Union[float, jnp.ndarray]: if self._axis_norm == 0: return self.cost_fn.norm(self.y) return 0. @@ -98,14 +98,14 @@ def _check_LRC_dim(self): return n * m > (n + m) * d @property - def cost_matrix(self) -> Optional[jax.Array]: # noqa: D102 + def cost_matrix(self) -> Optional[jnp.ndarray]: # noqa: D102 if self.is_online: return None cost_matrix = self._compute_cost_matrix() return cost_matrix * self.inv_scale_cost @property - def kernel_matrix(self) -> Optional[jax.Array]: # noqa: D102 + def kernel_matrix(self) -> Optional[jnp.ndarray]: # noqa: D102 if self.is_online: return None return jnp.exp(-self.cost_matrix / self.epsilon) @@ -141,7 +141,7 @@ def cost_rank(self) -> int: # noqa: D102 @property def inv_scale_cost(self) -> float: # noqa: D102 - if isinstance(self._scale_cost, (int, float, jax.Array)): + if isinstance(self._scale_cost, (int, float, jnp.ndarray)): return 1.0 / self._scale_cost self = self._masked_geom() if self._scale_cost == "max_cost": @@ -183,7 +183,7 @@ def inv_scale_cost(self) -> float: # noqa: D102 ) raise ValueError(f"Scaling {self._scale_cost} not implemented.") - def _compute_cost_matrix(self) -> jax.Array: + def _compute_cost_matrix(self) -> jnp.ndarray: cost_matrix = self.cost_fn.all_pairs_pairwise(self.x, self.y) if self._axis_norm is not None: cost_matrix += self._norm_x[:, jnp.newaxis] + self._norm_y[jnp.newaxis, :] @@ -191,12 +191,12 @@ def _compute_cost_matrix(self) -> jax.Array: def apply_lse_kernel( # noqa: D102 self, - f: jax.Array, - g: jax.Array, + f: jnp.ndarray, + g: jnp.ndarray, eps: float, - vec: Optional[jax.Array] = None, + vec: Optional[jnp.ndarray] = None, axis: int = 0 - ) -> jax.Array: + ) -> jnp.ndarray: def body0(carry, i: int): f, g, eps, vec = carry @@ -278,10 +278,10 @@ def finalize(i: int): def apply_kernel( # noqa: D102 self, - scaling: jax.Array, + scaling: jnp.ndarray, eps: Optional[float] = None, axis: int = 0 - ) -> jax.Array: + ) -> jnp.ndarray: if eps is None: eps = self.epsilon @@ -303,8 +303,8 @@ def apply_kernel( # noqa: D102 ) def transport_from_potentials( # noqa: D102 - self, f: jax.Array, g: jax.Array - ) -> jax.Array: + self, f: jnp.ndarray, g: jnp.ndarray + ) -> jnp.ndarray: if not self.is_online: return super().transport_from_potentials(f, g) transport = jax.vmap( @@ -317,8 +317,8 @@ def transport_from_potentials( # noqa: D102 ) def transport_from_scalings( # noqa: D102 - self, u: jax.Array, v: jax.Array - ) -> jax.Array: + self, u: jnp.ndarray, v: jnp.ndarray + ) -> jnp.ndarray: if not self.is_online: return super().transport_from_scalings(u, v) transport = jax.vmap( @@ -342,11 +342,11 @@ def transport_from_scalings( # noqa: D102 def apply_cost( self, - arr: jax.Array, + arr: jnp.ndarray, axis: int = 0, - fn: Optional[Callable[[jax.Array], jax.Array]] = None, + fn: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, is_linear: bool = False, - ) -> jax.Array: + ) -> jnp.ndarray: """Apply cost matrix to array (vector or matrix). This function applies the geometry's cost matrix, to perform either @@ -356,7 +356,7 @@ def apply_cost( application of fn to each entry of the :attr:`cost_matrix`. Args: - arr: jax.Array [num_a or num_b, batch], vector that will be multiplied + arr: jnp.ndarray [num_a or num_b, batch], vector that will be multiplied by the cost matrix. axis: standard cost matrix if axis=1, transpose if 0. fn: function optionally applied to cost matrix element-wise, before the @@ -367,7 +367,7 @@ def apply_cost( for a heuristic to help determine if a function is linear. Returns: - A jax.Array, [num_b, batch] if axis=0 or [num_a, batch] if axis=1 + A jnp.ndarray, [num_b, batch] if axis=0 or [num_a, batch] if axis=1 """ # switch to efficient computation for the squared euclidean case. if self.is_squared_euclidean and (fn is None or is_linear): @@ -375,7 +375,9 @@ def apply_cost( return self._apply_cost(arr, axis, fn=fn) - def _apply_cost(self, arr: jax.Array, axis: int = 0, fn=None) -> jax.Array: + def _apply_cost( + self, arr: jnp.ndarray, axis: int = 0, fn=None + ) -> jnp.ndarray: """See :meth:`apply_cost`.""" if not self.is_online: return super().apply_cost(arr, axis, fn) @@ -399,24 +401,24 @@ def _apply_cost(self, arr: jax.Array, axis: int = 0, fn=None) -> jax.Array: def vec_apply_cost( self, - arr: jax.Array, + arr: jnp.ndarray, axis: int = 0, - fn: Optional[Callable[[jax.Array], jax.Array]] = None - ) -> jax.Array: + fn: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None + ) -> jnp.ndarray: """Apply the geometry's cost matrix in a vectorized way. This function can be used when the cost matrix is squared euclidean and ``fn`` is a linear function. Args: - arr: jax.Array [num_a or num_b, p], vector that will be multiplied + arr: jnp.ndarray [num_a or num_b, p], vector that will be multiplied by the cost matrix. axis: standard cost matrix if axis=1, transport if 0. fn: function optionally applied to cost matrix element-wise, before the application. Returns: - A jax.Array, [num_b, p] if axis=0 or [num_a, p] if axis=1 + A jnp.ndarray, [num_b, p] if axis=0 or [num_a, p] if axis=1 """ assert self.is_squared_euclidean, "Cost matrix is not a squared Euclidean." rank = arr.ndim @@ -432,7 +434,7 @@ def vec_apply_cost( applied_cost = fn(applied_cost) return self.inv_scale_cost * applied_cost - def _leading_slice(self, t: jax.Array, i: int) -> jax.Array: + def _leading_slice(self, t: jnp.ndarray, i: int) -> jnp.ndarray: start_indices = [i * self.batch_size] + (t.ndim - 1) * [0] slice_sizes = [self.batch_size] + list(t.shape[1:]) return jax.lax.dynamic_slice(t, start_indices, slice_sizes) @@ -523,18 +525,18 @@ def finalize(i: int): f"Scaling method {summary} does not exist for online mode." ) - def barycenter(self, weights: jax.Array) -> jax.Array: + def barycenter(self, weights: jnp.ndarray) -> jnp.ndarray: """Compute barycenter of points in self.x using weights.""" return self.cost_fn.barycenter(self.x, weights)[0] @classmethod def prepare_divergences( cls, - x: jax.Array, - y: jax.Array, + x: jnp.ndarray, + y: jnp.ndarray, static_b: bool = False, - src_mask: Optional[jax.Array] = None, - tgt_mask: Optional[jax.Array] = None, + src_mask: Optional[jnp.ndarray] = None, + tgt_mask: Optional[jnp.ndarray] = None, **kwargs: Any ) -> Tuple["PointCloud", ...]: """Instantiate the geometries used for a divergence computation.""" @@ -638,14 +640,14 @@ def _sqeucl_to_lr(self, scale: float = 1.0) -> low_rank.LRCGeometry: ) def subset( # noqa: D102 - self, src_ixs: Optional[jax.Array], tgt_ixs: Optional[jax.Array], + self, src_ixs: Optional[jnp.ndarray], tgt_ixs: Optional[jnp.ndarray], **kwargs: Any ) -> "PointCloud": def subset_fn( - arr: Optional[jax.Array], - ixs: Optional[jax.Array], - ) -> jax.Array: + arr: Optional[jnp.ndarray], + ixs: Optional[jnp.ndarray], + ) -> jnp.ndarray: return arr if arr is None or ixs is None else arr[jnp.atleast_1d(ixs)] return self._mask_subset_helper( @@ -654,15 +656,15 @@ def subset_fn( def mask( # noqa: D102 self, - src_mask: Optional[jax.Array], - tgt_mask: Optional[jax.Array], + src_mask: Optional[jnp.ndarray], + tgt_mask: Optional[jnp.ndarray], mask_value: float = 0., ) -> "PointCloud": def mask_fn( - arr: Optional[jax.Array], - mask: Optional[jax.Array], - ) -> Optional[jax.Array]: + arr: Optional[jnp.ndarray], + mask: Optional[jnp.ndarray], + ) -> Optional[jnp.ndarray]: if arr is None or mask is None: return arr return jnp.where(mask[:, None], arr, mask_value) @@ -675,11 +677,11 @@ def mask_fn( def _mask_subset_helper( self, - src_ixs: Optional[jax.Array], - tgt_ixs: Optional[jax.Array], + src_ixs: Optional[jnp.ndarray], + tgt_ixs: Optional[jnp.ndarray], *, - fn: Callable[[Optional[jax.Array], Optional[jax.Array]], - Optional[jax.Array]], + fn: Callable[[Optional[jnp.ndarray], Optional[jnp.ndarray]], + Optional[jnp.ndarray]], propagate_mask: bool, **kwargs: Any, ) -> "PointCloud": @@ -765,18 +767,18 @@ def _apply_cost_xy(x, y, norm_x, norm_y, vec, cost_fn, scale_cost, fn=None): fn(cost) matrix (or transpose) to vector. Args: - x: jax.Array [num_a, d], first pointcloud - y: jax.Array [num_b, d], second pointcloud - norm_x: jax.Array [num_a,], (squared) norm as defined in by cost_fn - norm_y: jax.Array [num_b,], (squared) norm as defined in by cost_fn - vec: jax.Array [num_a,] ([num_b,] if axis=1 from `apply_cost`) vector + x: jnp.ndarray [num_a, d], first pointcloud + y: jnp.ndarray [num_b, d], second pointcloud + norm_x: jnp.ndarray [num_a,], (squared) norm as defined in by cost_fn + norm_y: jnp.ndarray [num_b,], (squared) norm as defined in by cost_fn + vec: jnp.ndarray [num_a,] ([num_b,] if axis=1 from `apply_cost`) vector cost_fn: a CostFn function between two points in dimension d. scale_cost: scaling factor of the cost matrix. fn: function optionally applied to cost matrix element-wise, before the apply. Returns: - A jax.Array corresponding to cost x vector + A jnp.ndarray corresponding to cost x vector """ c = _cost(x, y, norm_x, norm_y, cost_fn, scale_cost) return jnp.dot(c, vec) if fn is None else jnp.dot(fn(c), vec) diff --git a/src/ott/geometry/segment.py b/src/ott/geometry/segment.py index 5e2c764c8..20a1ee92b 100644 --- a/src/ott/geometry/segment.py +++ b/src/ott/geometry/segment.py @@ -21,15 +21,15 @@ def segment_point_cloud( - x: jax.Array, - a: Optional[jax.Array] = None, + x: jnp.ndarray, + a: Optional[jnp.ndarray] = None, num_segments: Optional[int] = None, max_measure_size: Optional[int] = None, - segment_ids: Optional[jax.Array] = None, + segment_ids: Optional[jnp.ndarray] = None, indices_are_sorted: bool = False, num_per_segment: Optional[Tuple[int, ...]] = None, - padding_vector: Optional[jax.Array] = None -) -> Tuple[jax.Array, jax.Array]: + padding_vector: Optional[jnp.ndarray] = None +) -> Tuple[jnp.ndarray, jnp.ndarray]: """Segment and pad as needed the entries of a point cloud. There are two interfaces: @@ -129,20 +129,21 @@ def segment_point_cloud( def _segment_interface( - x: jax.Array, - y: jax.Array, - eval_fn: Callable[[jax.Array, jax.Array, jax.Array, jax.Array], jax.Array], + x: jnp.ndarray, + y: jnp.ndarray, + eval_fn: Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray], + jnp.ndarray], num_segments: Optional[int] = None, max_measure_size: Optional[int] = None, - segment_ids_x: Optional[jax.Array] = None, - segment_ids_y: Optional[jax.Array] = None, + segment_ids_x: Optional[jnp.ndarray] = None, + segment_ids_y: Optional[jnp.ndarray] = None, indices_are_sorted: bool = False, - num_per_segment_x: Optional[jax.Array] = None, - num_per_segment_y: Optional[jax.Array] = None, - weights_x: Optional[jax.Array] = None, - weights_y: Optional[jax.Array] = None, - padding_vector: Optional[jax.Array] = None, -) -> jax.Array: + num_per_segment_x: Optional[jnp.ndarray] = None, + num_per_segment_y: Optional[jnp.ndarray] = None, + weights_x: Optional[jnp.ndarray] = None, + weights_y: Optional[jnp.ndarray] = None, + padding_vector: Optional[jnp.ndarray] = None, +) -> jnp.ndarray: """Wrapper to segment two point clouds and return parallel evaluations. Utility function that segments two point clouds using the approach outlined diff --git a/src/ott/initializers/linear/initializers.py b/src/ott/initializers/linear/initializers.py index 58744cfb0..bc4871841 100644 --- a/src/ott/initializers/linear/initializers.py +++ b/src/ott/initializers/linear/initializers.py @@ -36,8 +36,8 @@ def init_dual_a( self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jax.Array] = None, - ) -> jax.Array: + rng: Optional[jnp.ndarray] = None, + ) -> jnp.ndarray: """Initialize Sinkhorn potential/scaling f_u. Args: @@ -54,8 +54,8 @@ def init_dual_b( self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jax.Array] = None, - ) -> jax.Array: + rng: Optional[jnp.ndarray] = None, + ) -> jnp.ndarray: """Initialize Sinkhorn potential/scaling g_v. Args: @@ -70,11 +70,11 @@ def init_dual_b( def __call__( self, ot_prob: linear_problem.LinearProblem, - a: Optional[jax.Array], - b: Optional[jax.Array], + a: Optional[jnp.ndarray], + b: Optional[jnp.ndarray], lse_mode: bool, - rng: Optional[jax.Array] = None, - ) -> Tuple[jax.Array, jax.Array]: + rng: Optional[jnp.ndarray] = None, + ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Initialize Sinkhorn potentials/scalings f_u and g_v. Args: @@ -128,8 +128,8 @@ def init_dual_a( # noqa: D102 self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jax.Array] = None, - ) -> jax.Array: + rng: Optional[jnp.ndarray] = None, + ) -> jnp.ndarray: del rng return jnp.zeros_like(ot_prob.a) if lse_mode else jnp.ones_like(ot_prob.a) @@ -137,8 +137,8 @@ def init_dual_b( # noqa: D102 self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jax.Array] = None, - ) -> jax.Array: + rng: Optional[jnp.ndarray] = None, + ) -> jnp.ndarray: del rng return jnp.zeros_like(ot_prob.b) if lse_mode else jnp.ones_like(ot_prob.b) @@ -158,8 +158,8 @@ def init_dual_a( # noqa: D102 self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jax.Array] = None, - ) -> jax.Array: + rng: Optional[jnp.ndarray] = None, + ) -> jnp.ndarray: # import Gaussian here due to circular imports from ott.tools.gaussian_mixture import gaussian @@ -207,8 +207,8 @@ def __init__( self.vectorized_update = vectorized_update def _init_sorting_dual( - self, modified_cost: jax.Array, init_f: jax.Array - ) -> jax.Array: + self, modified_cost: jnp.ndarray, init_f: jnp.ndarray + ) -> jnp.ndarray: """Run DualSort algorithm. Args: @@ -221,15 +221,15 @@ def _init_sorting_dual( """ def body_fn( - state: Tuple[jax.Array, float, int] - ) -> Tuple[jax.Array, float, int]: + state: Tuple[jnp.ndarray, float, int] + ) -> Tuple[jnp.ndarray, float, int]: prev_f, _, it = state new_f = fn(prev_f, modified_cost) diff = jnp.sum((new_f - prev_f) ** 2) it += 1 return new_f, diff, it - def cond_fn(state: Tuple[jax.Array, float, int]) -> bool: + def cond_fn(state: Tuple[jnp.ndarray, float, int]) -> bool: _, diff, it = state return jnp.logical_and(diff > self.tolerance, it < self.max_iter) @@ -245,9 +245,9 @@ def init_dual_a( self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jax.Array] = None, - init_f: Optional[jax.Array] = None, - ) -> jax.Array: + rng: Optional[jnp.ndarray] = None, + init_f: Optional[jnp.ndarray] = None, + ) -> jnp.ndarray: """Apply DualSort algorithm. Args: @@ -324,8 +324,8 @@ def init_dual_a( # noqa: D102 self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jax.Array] = None, - ) -> jax.Array: + rng: Optional[jnp.ndarray] = None, + ) -> jnp.ndarray: from ott.solvers import linear assert isinstance( @@ -373,7 +373,9 @@ def tree_flatten(self) -> Tuple[Sequence[Any], Dict[str, Any]]: # noqa: D102 }) -def _vectorized_update(f: jax.Array, modified_cost: jax.Array) -> jax.Array: +def _vectorized_update( + f: jnp.ndarray, modified_cost: jnp.ndarray +) -> jnp.ndarray: """Inner loop DualSort Update. Args: @@ -386,7 +388,9 @@ def _vectorized_update(f: jax.Array, modified_cost: jax.Array) -> jax.Array: return jnp.min(modified_cost + f[None, :], axis=1) -def _coordinate_update(f: jax.Array, modified_cost: jax.Array) -> jax.Array: +def _coordinate_update( + f: jnp.ndarray, modified_cost: jnp.ndarray +) -> jnp.ndarray: """Coordinate-wise updates within inner loop. Args: @@ -397,7 +401,7 @@ def _coordinate_update(f: jax.Array, modified_cost: jax.Array) -> jax.Array: updated potential vector, f. """ - def body_fn(i: int, f: jax.Array) -> jax.Array: + def body_fn(i: int, f: jnp.ndarray) -> jnp.ndarray: new_f = jnp.min(modified_cost[i, :] + f) return f.at[i].set(new_f) diff --git a/src/ott/initializers/linear/initializers_lr.py b/src/ott/initializers/linear/initializers_lr.py index 9eb8e1231..b1f70d912 100644 --- a/src/ott/initializers/linear/initializers_lr.py +++ b/src/ott/initializers/linear/initializers_lr.py @@ -67,11 +67,11 @@ def __init__(self, rank: int, **kwargs: Any): def init_q( self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, *, - init_g: jax.Array, + init_g: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: """Initialize the low-rank factor :math:`Q`. Args: @@ -88,11 +88,11 @@ def init_q( def init_r( self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, *, - init_g: jax.Array, + init_g: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: """Initialize the low-rank factor :math:`R`. Args: @@ -109,9 +109,9 @@ def init_r( def init_g( self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: """Initialize the low-rank factor :math:`g`. Args: @@ -165,13 +165,13 @@ def from_solver( def __call__( self, ot_prob: Problem_t, - q: Optional[jax.Array] = None, - r: Optional[jax.Array] = None, - g: Optional[jax.Array] = None, + q: Optional[jnp.ndarray] = None, + r: Optional[jnp.ndarray] = None, + g: Optional[jnp.ndarray] = None, *, - rng: Optional[jax.Array] = None, + rng: Optional[jnp.ndarray] = None, **kwargs: Any - ) -> Tuple[jax.Array, jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: """Initialize the factors :math:`Q`, :math:`R` and :math:`g`. Args: @@ -232,11 +232,11 @@ class RandomInitializer(LRInitializer): def init_q( # noqa: D102 self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, *, - init_g: jax.Array, + init_g: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: del kwargs, init_g a = ot_prob.a init_q = jnp.abs(jax.random.normal(rng, (a.shape[0], self.rank))) @@ -245,11 +245,11 @@ def init_q( # noqa: D102 def init_r( # noqa: D102 self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, *, - init_g: jax.Array, + init_g: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: del kwargs, init_g b = ot_prob.b init_r = jnp.abs(jax.random.normal(rng, (b.shape[0], self.rank))) @@ -258,9 +258,9 @@ def init_r( # noqa: D102 def init_g( # noqa: D102 self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: del kwargs init_g = jnp.abs(jax.random.uniform(rng, (self.rank,))) + 1. return init_g / jnp.sum(init_g) @@ -278,10 +278,10 @@ class Rank2Initializer(LRInitializer): def _compute_factor( self, ot_prob: Problem_t, - init_g: jax.Array, + init_g: jnp.ndarray, *, which: Literal["q", "r"], - ) -> jax.Array: + ) -> jnp.ndarray: a, b = ot_prob.a, ot_prob.b marginal = a if which == "q" else b n, r = marginal.shape[0], self.rank @@ -305,31 +305,31 @@ def _compute_factor( def init_q( # noqa: D102 self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, *, - init_g: jax.Array, + init_g: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: del rng, kwargs return self._compute_factor(ot_prob, init_g, which="q") def init_r( # noqa: D102 self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, *, - init_g: jax.Array, + init_g: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: del rng, kwargs return self._compute_factor(ot_prob, init_g, which="r") def init_g( # noqa: D102 self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: del rng, kwargs return jnp.ones((self.rank,)) / self.rank @@ -364,7 +364,7 @@ def __init__( self._sinkhorn_kwargs = {} if sinkhorn_kwargs is None else sinkhorn_kwargs @staticmethod - def _extract_array(geom: geometry.Geometry, *, first: bool) -> jax.Array: + def _extract_array(geom: geometry.Geometry, *, first: bool) -> jnp.ndarray: if isinstance(geom, pointcloud.PointCloud): return geom.x if first else geom.y if isinstance(geom, low_rank.LRCGeometry): @@ -376,12 +376,12 @@ def _extract_array(geom: geometry.Geometry, *, first: bool) -> jax.Array: def _compute_factor( self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, *, - init_g: jax.Array, + init_g: jnp.ndarray, which: Literal["q", "r"], **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: from ott.problems.linear import linear_problem from ott.problems.quadratic import quadratic_problem from ott.solvers.linear import sinkhorn @@ -418,11 +418,11 @@ def _compute_factor( def init_q( # noqa: D102 self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, *, - init_g: jax.Array, + init_g: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: return self._compute_factor( ot_prob, rng, init_g=init_g, which="q", **kwargs ) @@ -430,11 +430,11 @@ def init_q( # noqa: D102 def init_r( # noqa: D102 self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, *, - init_g: jax.Array, + init_g: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: return self._compute_factor( ot_prob, rng, init_g=init_g, which="r", **kwargs ) @@ -442,9 +442,9 @@ def init_r( # noqa: D102 def init_g( # noqa: D102 self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: del rng, kwargs return jnp.ones((self.rank,)) / self.rank @@ -498,25 +498,25 @@ def __init__( class Constants(NamedTuple): # noqa: D106 solver: "sinkhorn.Sinkhorn" geom: geometry.Geometry # (n, n) - marginal: jax.Array # (n,) - g: jax.Array # (r,) + marginal: jnp.ndarray # (n,) + g: jnp.ndarray # (r,) gamma: float threshold: float class State(NamedTuple): # noqa: D106 - factor: jax.Array - criterions: jax.Array + factor: jnp.ndarray + criterions: jnp.ndarray crossed_threshold: bool def _compute_factor( self, ot_prob: Problem_t, - rng: jax.Array, + rng: jnp.ndarray, *, - init_g: jax.Array, + init_g: jnp.ndarray, which: Literal["q", "r"], **kwargs: Any, - ) -> jax.Array: + ) -> jnp.ndarray: from ott.problems.linear import linear_problem from ott.problems.quadratic import quadratic_problem from ott.solvers.linear import sinkhorn diff --git a/src/ott/initializers/quadratic/initializers.py b/src/ott/initializers/quadratic/initializers.py index 323570770..795e81ccc 100644 --- a/src/ott/initializers/quadratic/initializers.py +++ b/src/ott/initializers/quadratic/initializers.py @@ -125,7 +125,9 @@ class QuadraticInitializer(BaseQuadraticInitializer): defaults to the product coupling :math:`ab^T`. """ - def __init__(self, init_coupling: Optional[jax.Array] = None, **kwargs: Any): + def __init__( + self, init_coupling: Optional[jnp.ndarray] = None, **kwargs: Any + ): super().__init__(**kwargs) self.init_coupling = init_coupling diff --git a/src/ott/math/fixed_point_loop.py b/src/ott/math/fixed_point_loop.py index 5c8b7b94d..9034eba62 100644 --- a/src/ott/math/fixed_point_loop.py +++ b/src/ott/math/fixed_point_loop.py @@ -179,7 +179,7 @@ def fixpoint_iter_bwd( # The tree may contain some python floats g_constants = jax.tree_util.tree_map( lambda x: jnp.zeros_like(x, dtype=x.dtype) - if isinstance(x, (np.ndarray, jax.Array)) else 0, constants + if isinstance(x, (np.ndarray, jnp.ndarray)) else 0, constants ) def bwd_cond_fn(iteration_g_gconst): diff --git a/src/ott/math/matrix_square_root.py b/src/ott/math/matrix_square_root.py index 5089f14a0..4a0177780 100644 --- a/src/ott/math/matrix_square_root.py +++ b/src/ott/math/matrix_square_root.py @@ -25,13 +25,13 @@ @functools.partial(jax.custom_vjp, nondiff_argnums=(1, 2, 3, 4, 5)) def sqrtm( - x: jax.Array, + x: jnp.ndarray, threshold: float = 1e-6, min_iterations: int = 0, inner_iterations: int = 10, max_iterations: int = 1000, regularization: float = 1e-6 -) -> Tuple[jax.Array, jax.Array, jax.Array]: +) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: """Higham algorithm to compute matrix square root of p.d. matrix. See :cite:`higham:97`, eq. 2.6b @@ -118,10 +118,10 @@ def new_err(x, norm_x, y): def solve_sylvester_bartels_stewart( - a: jax.Array, - b: jax.Array, - c: jax.Array, -) -> jax.Array: + a: jnp.ndarray, + b: jnp.ndarray, + c: jnp.ndarray, +) -> jnp.ndarray: """Solve the real Sylvester equation AX - XB = C using Bartels-Stewart.""" # See https://nhigham.com/2020/09/01/what-is-the-sylvester-equation/ for # discussion of the algorithm (but note that in the derivation, the sign on @@ -153,13 +153,14 @@ def solve_sylvester_bartels_stewart( def sqrtm_fwd( - x: jax.Array, + x: jnp.ndarray, threshold: float, min_iterations: int, inner_iterations: int, max_iterations: int, regularization: float, -) -> Tuple[Tuple[jax.Array, jax.Array, jax.Array], Tuple[jax.Array, jax.Array]]: +) -> Tuple[Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray], Tuple[jnp.ndarray, + jnp.ndarray]]: """Forward pass of custom VJP.""" sqrt_x, inv_sqrt_x, errors = sqrtm( x=x, @@ -178,9 +179,9 @@ def sqrtm_bwd( inner_iterations: int, max_iterations: int, regularization: float, - residual: Tuple[jax.Array, jax.Array], - cotangent: Tuple[jax.Array, jax.Array, jax.Array], -) -> Tuple[jax.Array]: + residual: Tuple[jnp.ndarray, jnp.ndarray], + cotangent: Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray], +) -> Tuple[jnp.ndarray]: """Compute the derivative by solving a Sylvester equation.""" del threshold, min_iterations, inner_iterations, \ max_iterations, regularization @@ -236,13 +237,13 @@ def sqrtm_bwd( @functools.partial(jax.custom_vjp, nondiff_argnums=(1, 2, 3, 4, 5)) def sqrtm_only( # noqa: D103 - x: jax.Array, + x: jnp.ndarray, threshold: float = 1e-6, min_iterations: int = 0, inner_iterations: int = 10, max_iterations: int = 1000, regularization: float = 1e-6 -) -> jax.Array: +) -> jnp.ndarray: return sqrtm( x, threshold, min_iterations, inner_iterations, max_iterations, regularization @@ -250,9 +251,9 @@ def sqrtm_only( # noqa: D103 def sqrtm_only_fwd( # noqa: D103 - x: jax.Array, threshold: float, min_iterations: int, + x: jnp.ndarray, threshold: float, min_iterations: int, inner_iterations: int, max_iterations: int, regularization: float -) -> Tuple[jax.Array, jax.Array]: +) -> Tuple[jnp.ndarray, jnp.ndarray]: sqrt_x = sqrtm( x, threshold, min_iterations, inner_iterations, max_iterations, regularization @@ -262,9 +263,9 @@ def sqrtm_only_fwd( # noqa: D103 def sqrtm_only_bwd( # noqa: D103 threshold: float, min_iterations: int, inner_iterations: int, - max_iterations: int, regularization: float, sqrt_x: jax.Array, - cotangent: jax.Array -) -> Tuple[jax.Array]: + max_iterations: int, regularization: float, sqrt_x: jnp.ndarray, + cotangent: jnp.ndarray +) -> Tuple[jnp.ndarray]: del threshold, min_iterations, inner_iterations, \ max_iterations, regularization vjp = jnp.swapaxes( @@ -282,13 +283,13 @@ def sqrtm_only_bwd( # noqa: D103 @functools.partial(jax.custom_vjp, nondiff_argnums=(1, 2, 3, 4, 5)) def inv_sqrtm_only( # noqa: D103 - x: jax.Array, + x: jnp.ndarray, threshold: float = 1e-6, min_iterations: int = 0, inner_iterations: int = 10, max_iterations: int = 1000, regularization: float = 1e-6 -) -> jax.Array: +) -> jnp.ndarray: return sqrtm( x, threshold, min_iterations, inner_iterations, max_iterations, regularization @@ -296,13 +297,13 @@ def inv_sqrtm_only( # noqa: D103 def inv_sqrtm_only_fwd( # noqa: D103 - x: jax.Array, + x: jnp.ndarray, threshold: float, min_iterations: int, inner_iterations: int, max_iterations: int, regularization: float, -) -> Tuple[jax.Array, jax.Array]: +) -> Tuple[jnp.ndarray, jnp.ndarray]: inv_sqrt_x = sqrtm( x, threshold, min_iterations, inner_iterations, max_iterations, regularization @@ -312,9 +313,9 @@ def inv_sqrtm_only_fwd( # noqa: D103 def inv_sqrtm_only_bwd( # noqa: D103 threshold: float, min_iterations: int, inner_iterations: int, - max_iterations: int, regularization: float, residual: jax.Array, - cotangent: jax.Array -) -> Tuple[jax.Array]: + max_iterations: int, regularization: float, residual: jnp.ndarray, + cotangent: jnp.ndarray +) -> Tuple[jnp.ndarray]: del threshold, min_iterations, inner_iterations, \ max_iterations, regularization diff --git a/src/ott/math/unbalanced_functions.py b/src/ott/math/unbalanced_functions.py index 2d7baebb7..fc1aca9f3 100644 --- a/src/ott/math/unbalanced_functions.py +++ b/src/ott/math/unbalanced_functions.py @@ -13,32 +13,31 @@ # limitations under the License. from typing import Callable -import jax import jax.numpy as jnp -def phi_star(h: jax.Array, rho: float) -> jax.Array: +def phi_star(h: jnp.ndarray, rho: float) -> jnp.ndarray: """Legendre transform of KL, :cite:`sejourne:19`, p. 9.""" return rho * (jnp.exp(h / rho) - 1) -def derivative_phi_star(f: jax.Array, rho: float) -> jax.Array: +def derivative_phi_star(f: jnp.ndarray, rho: float) -> jnp.ndarray: """Derivative of Legendre transform of phi_starKL, see phi_star.""" # TODO(cuturi): use jax.grad directly. return jnp.exp(f / rho) def grad_of_marginal_fit( - c: jax.Array, h: jax.Array, tau: float, epsilon: float -) -> jax.Array: + c: jnp.ndarray, h: jnp.ndarray, tau: float, epsilon: float +) -> jnp.ndarray: """Compute grad of terms linked to marginals in objective. Computes gradient w.r.t. f ( or g) of terms in :cite:`sejourne:19`, left-hand-side of eq. 15 terms involving phi_star). Args: - c: jax.Array, first target marginal (either a or b in practice) - h: jax.Array, potential (either f or g in practice) + c: jnp.ndarray, first target marginal (either a or b in practice) + h: jnp.ndarray, potential (either f or g in practice) tau: float, strength (in ]0,1]) of regularizer w.r.t. marginal epsilon: regularization @@ -51,14 +50,14 @@ def grad_of_marginal_fit( return jnp.where(c > 0, c * derivative_phi_star(-h, r), 0.0) -def second_derivative_phi_star(f: jax.Array, rho: float) -> jax.Array: +def second_derivative_phi_star(f: jnp.ndarray, rho: float) -> jnp.ndarray: """Second Derivative of Legendre transform of KL, see phi_star.""" return jnp.exp(f / rho) / rho def diag_jacobian_of_marginal_fit( - c: jax.Array, h: jax.Array, tau: float, epsilon: float, - derivative: Callable[[jax.Array, float], jax.Array] + c: jnp.ndarray, h: jnp.ndarray, tau: float, epsilon: float, + derivative: Callable[[jnp.ndarray, float], jnp.ndarray] ): """Compute grad of terms linked to marginals in objective. @@ -66,8 +65,8 @@ def diag_jacobian_of_marginal_fit( left-hand-side of eq. 32 (terms involving phi_star) Args: - c: jax.Array, first target marginal (either a or b in practice) - h: jax.Array, potential (either f or g in practice) + c: jnp.ndarray, first target marginal (either a or b in practice) + h: jnp.ndarray, potential (either f or g in practice) tau: float, strength (in ]0,1]) of regularizer w.r.t. marginal epsilon: regularization derivative: Callable diff --git a/src/ott/math/utils.py b/src/ott/math/utils.py index 188707c10..8e7ea90ee 100644 --- a/src/ott/math/utils.py +++ b/src/ott/math/utils.py @@ -34,10 +34,10 @@ def safe_log( # noqa: D103 - x: jax.Array, + x: jnp.ndarray, *, eps: Optional[float] = None -) -> jax.Array: +) -> jnp.ndarray: if eps is None: eps = jnp.finfo(x.dtype).tiny return jnp.where(x > 0., jnp.log(x), jnp.log(eps)) @@ -46,11 +46,11 @@ def safe_log( # noqa: D103 @functools.partial(jax.custom_jvp, nondiff_argnums=[1, 2, 3]) @functools.partial(jax.jit, static_argnames=("ord", "axis", "keepdims")) def norm( - x: jax.Array, + x: jnp.ndarray, ord: Union[int, str, None] = None, axis: Union[None, Sequence[int], int] = None, keepdims: bool = False -) -> jax.Array: +) -> jnp.ndarray: """Computes order ord norm of vector, using `jnp.linalg` in forward pass. Evaluations of distances between a vector and itself using translation @@ -105,18 +105,18 @@ def norm_jvp(ord, axis, keepdims, primals, tangents): # TODO(michalk8): add axis argument -def kl(p: jax.Array, q: jax.Array) -> float: +def kl(p: jnp.ndarray, q: jnp.ndarray) -> float: """Kullback-Leibler divergence.""" return jnp.vdot(p, (safe_log(p) - safe_log(q))) -def gen_kl(p: jax.Array, q: jax.Array) -> float: +def gen_kl(p: jnp.ndarray, q: jnp.ndarray) -> float: """Generalized Kullback-Leibler divergence.""" return jnp.vdot(p, (safe_log(p) - safe_log(q))) + jnp.sum(q) - jnp.sum(p) # TODO(michalk8): add axis argument -def gen_js(p: jax.Array, q: jax.Array, c: float = 0.5) -> float: +def gen_js(p: jnp.ndarray, q: jnp.ndarray, c: float = 0.5) -> float: """Jensen-Shannon divergence.""" return c * (gen_kl(p, q) + gen_kl(q, p)) @@ -176,8 +176,8 @@ def logsumexp_jvp(axis, keepdims, return_sign, primals, tangents): @functools.partial(jax.custom_vjp, nondiff_argnums=(2,)) def softmin( - x: jax.Array, gamma: float, axis: Optional[int] = None -) -> jax.Array: + x: jnp.ndarray, gamma: float, axis: Optional[int] = None +) -> jnp.ndarray: r"""Soft-min operator. Args: @@ -205,8 +205,8 @@ def softmin( @functools.partial(jax.vmap, in_axes=[0, 0, None]) def barycentric_projection( - matrix: jax.Array, y: jax.Array, cost_fn: "costs.CostFn" -) -> jax.Array: + matrix: jnp.ndarray, y: jnp.ndarray, cost_fn: "costs.CostFn" +) -> jnp.ndarray: """Compute the barycentric projection of a matrix. Args: diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index 0ebfc77a0..466460384 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -19,14 +19,14 @@ class ConditionalDataLoader: #TODO(@MUCDK) uncomment, resolve installation issu #def __init__( # self, rng: jax.random.KeyArray, dataloaders: Dict[str, tf.Dataloader], - # p: jax.Array + # p: jnp.ndarray #) -> None: # super().__init__() # self.rng = rng # self.conditions = dataloaders.keys() # self.p = p - #def __next__(self) -> jax.Array: + #def __next__(self) -> jnp.ndarray: # self.rng, rng = jax.random.split(self.rng, 2) # condition = jax.random.choice(rng, self.conditions, p=self.p) # return next(self.dataloaders[condition]) diff --git a/src/ott/neural/models/base_models.py b/src/ott/neural/models/base_models.py index 74a87df93..c96ad5b29 100644 --- a/src/ott/neural/models/base_models.py +++ b/src/ott/neural/models/base_models.py @@ -15,7 +15,7 @@ from typing import Optional import flax.linen as nn -import jax +import jax.numpy as jnp __all__ = ["BaseNeuralVectorField", "BaseRescalingNet"] @@ -25,11 +25,11 @@ class BaseNeuralVectorField(nn.Module, abc.ABC): @abc.abstractmethod def __call__( self, - t: jax.Array, - x: jax.Array, - condition: Optional[jax.Array] = None, - keys_model: Optional[jax.Array] = None - ) -> jax.Array: # noqa: D102): + t: jnp.ndarray, + x: jnp.ndarray, + condition: Optional[jnp.ndarray] = None, + keys_model: Optional[jnp.ndarray] = None + ) -> jnp.ndarray: # noqa: D102): pass @@ -37,6 +37,8 @@ class BaseRescalingNet(nn.Module, abc.ABC): @abc.abstractmethod def __call__( - self, x: jax.Array, condition: Optional[jax.Array] = None - ) -> jax.Array: + self, + x: jnp.ndarray, + condition: Optional[jnp.ndarray] = None + ) -> jnp.ndarray: pass diff --git a/src/ott/neural/models/conjugate_solvers.py b/src/ott/neural/models/conjugate_solvers.py index 4d3d8eea0..0758cf1ad 100644 --- a/src/ott/neural/models/conjugate_solvers.py +++ b/src/ott/neural/models/conjugate_solvers.py @@ -14,7 +14,6 @@ import abc from typing import Callable, Literal, NamedTuple, Optional -import jax import jax.numpy as jnp from jaxopt import LBFGS @@ -37,7 +36,7 @@ class ConjugateResults(NamedTuple): num_iter: the number of iterations taken by the solver """ val: float - grad: jax.Array + grad: jnp.ndarray num_iter: int @@ -51,9 +50,9 @@ class FenchelConjugateSolver(abc.ABC): @abc.abstractmethod def solve( self, - f: Callable[[jax.Array], jax.Array], - y: jax.Array, - x_init: Optional[jax.Array] = None + f: Callable[[jnp.ndarray], jnp.ndarray], + y: jnp.ndarray, + x_init: Optional[jnp.ndarray] = None ) -> ConjugateResults: """Solve for the conjugate. @@ -91,8 +90,8 @@ class FenchelConjugateLBFGS(FenchelConjugateSolver): def solve( # noqa: D102 self, - f: Callable[[jax.Array], jax.Array], - y: jax.Array, + f: Callable[[jnp.ndarray], jnp.ndarray], + y: jnp.ndarray, x_init: Optional[jnp.array] = None ) -> ConjugateResults: assert y.ndim == 1, y.ndim diff --git a/src/ott/neural/models/layers.py b/src/ott/neural/models/layers.py index 0eac7e626..dffd48276 100644 --- a/src/ott/neural/models/layers.py +++ b/src/ott/neural/models/layers.py @@ -19,7 +19,7 @@ __all__ = ["PositiveDense", "PosDefPotentials"] -PRNGKey = jax.Array +PRNGKey = jnp.ndarray Shape = Tuple[int, ...] Dtype = Any Array = Any @@ -40,9 +40,9 @@ class PositiveDense(nn.Module): bias_init: initializer function for the bias. """ dim_hidden: int - rectifier_fn: Callable[[jax.Array], jax.Array] = nn.softplus - inv_rectifier_fn: Callable[[jax.Array], - jax.Array] = lambda x: jnp.log(jnp.exp(x) - 1) + rectifier_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.softplus + inv_rectifier_fn: Callable[[jnp.ndarray], + jnp.ndarray] = lambda x: jnp.log(jnp.exp(x) - 1) use_bias: bool = True dtype: Any = jnp.float32 precision: Any = None @@ -51,7 +51,7 @@ class PositiveDense(nn.Module): bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = nn.initializers.zeros @nn.compact - def __call__(self, inputs: jax.Array) -> jax.Array: + def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray: """Applies a linear transformation to inputs along the last dimension. Args: @@ -99,7 +99,7 @@ class PosDefPotentials(nn.Module): bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = nn.initializers.zeros @nn.compact - def __call__(self, inputs: jax.Array) -> jax.Array: + def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray: """Apply a few quadratic forms. Args: diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 6bb075ff3..80326d3ca 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -67,9 +67,9 @@ class ICNN(neuraldual.BaseW2NeuralDual): dim_hidden: Sequence[int] init_std: float = 1e-2 init_fn: Callable = jax.nn.initializers.normal - act_fn: Callable[[jax.Array], jax.Array] = nn.relu + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.relu pos_weights: bool = True - gaussian_map_samples: Optional[Tuple[jax.Array, jax.Array]] = None + gaussian_map_samples: Optional[Tuple[jnp.ndarray, jnp.ndarray]] = None @property def is_potential(self) -> bool: # noqa: D102 @@ -151,8 +151,8 @@ def setup(self) -> None: # noqa: D102 @staticmethod def _compute_gaussian_map_params( - samples: Tuple[jax.Array, jax.Array] - ) -> Tuple[jax.Array, jax.Array]: + samples: Tuple[jnp.ndarray, jnp.ndarray] + ) -> Tuple[jnp.ndarray, jnp.ndarray]: from ott.tools.gaussian_mixture import gaussian source, target = samples g_s = gaussian.Gaussian.from_samples(source) @@ -165,13 +165,13 @@ def _compute_gaussian_map_params( @staticmethod def _compute_identity_map_params( input_dim: int - ) -> Tuple[jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray]: A = jnp.eye(input_dim).reshape((1, input_dim, input_dim)) b = jnp.zeros((1, input_dim)) return A, b @nn.compact - def __call__(self, x: jax.Array) -> float: # noqa: D102 + def __call__(self, x: jnp.ndarray) -> float: # noqa: D102 z = self.act_fn(self.w_xs[0](x)) for i in range(self.num_hidden): z = jnp.add(self.w_zs[i](z), self.w_xs[i + 1](x)) @@ -194,10 +194,10 @@ class MLP(neuraldual.BaseW2NeuralDual): dim_hidden: Sequence[int] is_potential: bool = True - act_fn: Callable[[jax.Array], jax.Array] = nn.leaky_relu + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.leaky_relu @nn.compact - def __call__(self, x: jax.Array) -> jax.Array: # noqa: D102 + def __call__(self, x: jnp.ndarray) -> jnp.ndarray: # noqa: D102 squeeze = x.ndim == 1 if squeeze: x = jnp.expand_dims(x, 0) @@ -267,7 +267,7 @@ def __init__( meta_model: nn.Module, opt: Optional[optax.GradientTransformation ] = optax.adam(learning_rate=1e-3), # noqa: B008 - rng: Optional[jax.Array] = None, + rng: Optional[jnp.ndarray] = None, state: Optional[train_state.TrainState] = None ): self.geom = geom @@ -294,8 +294,8 @@ def __init__( self.update_impl = self._get_update_fn() def update( - self, state: train_state.TrainState, a: jax.Array, b: jax.Array - ) -> Tuple[jax.Array, jax.Array, train_state.TrainState]: + self, state: train_state.TrainState, a: jnp.ndarray, b: jnp.ndarray + ) -> Tuple[jnp.ndarray, jnp.ndarray, train_state.TrainState]: r"""Update the meta model with the dual objective. The goal is for the model to match the optimal duals, i.e., @@ -333,8 +333,8 @@ def init_dual_a( # noqa: D102 self, ot_prob: "linear_problem.LinearProblem", lse_mode: bool, - rng: Optional[jax.Array] = None, - ) -> jax.Array: + rng: Optional[jnp.ndarray] = None, + ) -> jnp.ndarray: del rng # Detect if the problem is batched. assert ot_prob.a.ndim in (1, 2) @@ -387,9 +387,9 @@ def update(state, a, b): return update def _compute_f( - self, a: jax.Array, b: jax.Array, - params: frozen_dict.FrozenDict[str, jax.Array] - ) -> jax.Array: + self, a: jnp.ndarray, b: jnp.ndarray, + params: frozen_dict.FrozenDict[str, jnp.ndarray] + ) -> jnp.ndarray: r"""Predict the optimal :math:`f` potential. Args: @@ -431,10 +431,10 @@ class NeuralVectorField(BaseNeuralVectorField): t_embed_dim: Optional[int] = None joint_hidden_dim: Optional[int] = None num_layers_per_block: int = 3 - act_fn: Callable[[jax.Array], jax.Array] = nn.silu + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu n_frequencies: int = 128 - def time_encoder(self, t: jax.Array) -> jnp.array: + def time_encoder(self, t: jnp.ndarray) -> jnp.array: freq = 2 * jnp.arange(self.n_frequencies) * jnp.pi t = freq * t return jnp.concatenate((jnp.cos(t), jnp.sin(t)), axis=-1) @@ -464,11 +464,11 @@ def __post_init__(self): @nn.compact def __call__( self, - t: jax.Array, - x: jax.Array, - condition: Optional[jax.Array], - keys_model: Optional[jax.Array] = None, - ) -> jax.Array: + t: jnp.ndarray, + x: jnp.ndarray, + condition: Optional[jnp.ndarray], + keys_model: Optional[jnp.ndarray] = None, + ) -> jnp.ndarray: t = self.time_encoder(t) t = Block( @@ -537,12 +537,12 @@ class Rescaling_MLP(BaseRescalingNet): hidden_dim: int condition_dim: int num_layers_per_block: int = 3 - act_fn: Callable[[jax.Array], jax.Array] = nn.selu + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.selu @nn.compact def __call__( - self, x: jax.Array, condition: Optional[jax.Array] - ) -> jax.Array: # noqa: D102 + self, x: jnp.ndarray, condition: Optional[jnp.ndarray] + ) -> jnp.ndarray: # noqa: D102 x = Block( dim=self.hidden_dim, out_dim=self.hidden_dim, diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index e7216da46..0ad159a8f 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -83,10 +83,10 @@ def __init__(*args, **kwargs): def _resample_data( self, key: jax.random.KeyArray, - tmat: jax.Array, - source_arrays: Tuple[jax.Array, ...], - target_arrays: Tuple[jax.Array, ...], - ) -> Tuple[jax.Array, ...]: + tmat: jnp.ndarray, + source_arrays: Tuple[jnp.ndarray, ...], + target_arrays: Tuple[jnp.ndarray, ...], + ) -> Tuple[jnp.ndarray, ...]: """Resample a batch according to coupling `tmat`.""" tmat_flattened = tmat.flatten() indices = random.choice(key, len(tmat_flattened), shape=[tmat.shape[0]]) @@ -101,10 +101,10 @@ def _resample_data( def _sample_conditional_indices_from_tmap( self, key: jax.random.PRNGKeyArray, - tmat: jax.Array, - k_samples_per_x: Union[int, jax.Array], - source_arrays: Tuple[jax.Array, ...], - target_arrays: Tuple[jax.Array, ...], + tmat: jnp.ndarray, + k_samples_per_x: Union[int, jnp.ndarray], + source_arrays: Tuple[jnp.ndarray, ...], + target_arrays: Tuple[jnp.ndarray, ...], *, source_is_balanced: bool, ) -> Tuple[jnp.array, jnp.array]: @@ -161,8 +161,8 @@ def _get_sinkhorn_match_fn( ) -> Callable: def match_pairs( - x: jax.Array, y: jax.Array - ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]: + x: jnp.ndarray, y: jnp.ndarray + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: geom = pointcloud.PointCloud( x, y, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn ) @@ -171,8 +171,9 @@ def match_pairs( ).matrix def match_pairs_filtered( - x_lin: jax.Array, x_quad: jax.Array, y_lin: jax.Array, y_quad: jax.Array - ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]: + x_lin: jnp.ndarray, x_quad: jnp.ndarray, y_lin: jnp.ndarray, + y_quad: jnp.ndarray + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: geom = pointcloud.PointCloud( x_lin, y_lin, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn ) @@ -214,10 +215,10 @@ def _get_gromov_match_fn( x_scale_cost = y_scale_cost = xy_scale_cost = scale_cost def match_pairs( - x_lin: Optional[jax.Array], - x_quad: Tuple[jax.Array, jax.Array], - y_lin: Optional[jax.Array], - y_quad: Tuple[jax.Array, jax.Array], + x_lin: Optional[jnp.ndarray], + x_quad: Tuple[jnp.ndarray, jnp.ndarray], + y_lin: Optional[jnp.ndarray], + y_quad: Tuple[jnp.ndarray, jnp.ndarray], ) -> Tuple[jnp.array, jnp.array]: geom_xx = pointcloud.PointCloud( x=x_quad, y=x_quad, cost_fn=x_cost_fn, scale_cost=x_scale_cost @@ -250,7 +251,7 @@ class UnbalancednessMixin: def __init__( self, - rng: jax.Array, + rng: jnp.ndarray, source_dim: int, target_dim: int, cond_dim: Optional[int], @@ -298,13 +299,13 @@ def _get_compute_unbalanced_marginals( scale_cost: Union[bool, int, float, Literal["mean", "max_cost", "median"]] = "mean", sinkhorn_kwargs: Dict[str, Any] = MappingProxyType({}), - ) -> Tuple[jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Compute the unbalanced source and target marginals for a batch.""" @jax.jit def compute_unbalanced_marginals( - batch_source: jax.Array, batch_target: jax.Array - ) -> Tuple[jax.Array, jax.Array]: + batch_source: jnp.ndarray, batch_target: jnp.ndarray + ) -> Tuple[jnp.ndarray, jnp.ndarray]: geom = PointCloud( batch_source, batch_target, @@ -322,9 +323,9 @@ def compute_unbalanced_marginals( def _resample_unbalanced( self, key: jax.random.KeyArray, - batch: Tuple[jax.Array, ...], - marginals: jax.Array, - ) -> Tuple[jax.Array, ...]: + batch: Tuple[jnp.ndarray, ...], + marginals: jnp.ndarray, + ) -> Tuple[jnp.ndarray, ...]: """Resample a batch based upon marginals.""" indices = jax.random.choice( key, a=len(marginals), p=jnp.squeeze(marginals), shape=[len(marginals)] @@ -356,13 +357,14 @@ def _setup(self, source_dim: int, target_dim: int, cond_dim: int): def _get_rescaling_step_fn(self) -> Callable: # type:ignore[type-arg] def loss_a_fn( - params_eta: Optional[jax.Array], - apply_fn_eta: Callable[[Dict[str, jax.Array], jax.Array], jax.Array], - x: jax.Array, - condition: Optional[jax.Array], - a: jax.Array, + params_eta: Optional[jnp.ndarray], + apply_fn_eta: Callable[[Dict[str, jnp.ndarray], jnp.ndarray], + jnp.ndarray], + x: jnp.ndarray, + condition: Optional[jnp.ndarray], + a: jnp.ndarray, expectation_reweighting: float, - ) -> Tuple[float, jax.Array]: + ) -> Tuple[float, jnp.ndarray]: eta_predictions = apply_fn_eta({"params": params_eta}, x, condition) return ( optax.l2_loss(eta_predictions[:, 0], a).mean() + @@ -371,13 +373,14 @@ def loss_a_fn( ) def loss_b_fn( - params_xi: Optional[jax.Array], - apply_fn_xi: Callable[[Dict[str, jax.Array], jax.Array], jax.Array], - x: jax.Array, - condition: Optional[jax.Array], - b: jax.Array, + params_xi: Optional[jnp.ndarray], + apply_fn_xi: Callable[[Dict[str, jnp.ndarray], jnp.ndarray], + jnp.ndarray], + x: jnp.ndarray, + condition: Optional[jnp.ndarray], + b: jnp.ndarray, expectation_reweighting: float, - ) -> Tuple[float, jax.Array]: + ) -> Tuple[float, jnp.ndarray]: xi_predictions = apply_fn_xi({"params": params_xi}, x, condition) return ( optax.l2_loss(xi_predictions[:, 0], b).mean() + @@ -387,11 +390,11 @@ def loss_b_fn( @jax.jit def step_fn( - source: jax.Array, - target: jax.Array, - condition: Optional[jax.Array], - a: jax.Array, - b: jax.Array, + source: jnp.ndarray, + target: jnp.ndarray, + condition: Optional[jnp.ndarray], + a: jnp.ndarray, + b: jnp.ndarray, state_eta: Optional[train_state.TrainState] = None, state_xi: Optional[train_state.TrainState] = None, *, @@ -434,8 +437,8 @@ def step_fn( return step_fn def evaluate_eta( - self, source: jax.Array, condition: Optional[jax.Array] - ) -> jax.Array: + self, source: jnp.ndarray, condition: Optional[jnp.ndarray] + ) -> jnp.ndarray: """Evaluate the left learnt rescaling factor. Args: @@ -448,12 +451,12 @@ def evaluate_eta( if self.state_eta is None: raise ValueError("The left rescaling factor was not parameterized.") return self.state_eta.apply_fn({"params": self.state_eta.params}, - x=source, - condition=condition) + x=source, + condition=condition) def evaluate_xi( - self, target: jax.Array, condition: Optional[jax.Array] - ) -> jax.Array: + self, target: jnp.ndarray, condition: Optional[jnp.ndarray] + ) -> jnp.ndarray: """Evaluate the right learnt rescaling factor. Args: diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py index 6450e2c1b..b02981fc9 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/solvers/flows.py @@ -33,7 +33,7 @@ def __init__(self, sigma: float) -> None: self.sigma = sigma @abc.abstractmethod - def compute_mu_t(self, t: jax.Array, x_0: jax.Array, x_1: jax.Array): + def compute_mu_t(self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray): """Compute the mean of the probablitiy path between :math:`x` and :math:`y` at time :math:`t`. Args: @@ -44,7 +44,7 @@ def compute_mu_t(self, t: jax.Array, x_0: jax.Array, x_1: jax.Array): pass @abc.abstractmethod - def compute_sigma_t(self, t: jax.Array): + def compute_sigma_t(self, t: jnp.ndarray): """Compute the standard deviation of the probablity path at time :math:`t`. Args: @@ -54,8 +54,8 @@ def compute_sigma_t(self, t: jax.Array): @abc.abstractmethod def compute_ut( - self, t: jax.Array, x_0: jax.Array, x_1: jax.Array - ) -> jax.Array: + self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray + ) -> jnp.ndarray: """Evaluate the conditional vector field defined between :math:`x_0` and :math:`x_1` at time :math:`t`. Args: @@ -66,8 +66,9 @@ def compute_ut( pass def compute_xt( - self, noise: jax.Array, t: jax.Array, x_0: jax.Array, x_1: jax.Array - ) -> jax.Array: + self, noise: jnp.ndarray, t: jnp.ndarray, x_0: jnp.ndarray, + x_1: jnp.ndarray + ) -> jnp.ndarray: """Sample from the probability path between :math:`x_0` and :math:`x_1` at time :math:`t`. Args: @@ -88,8 +89,8 @@ class StraightFlow(BaseFlow, abc.ABC): """Base class for flows with straight paths.""" def compute_mu_t( - self, t: jax.Array, x_0: jax.Array, x_1: jax.Array - ) -> jax.Array: + self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray + ) -> jnp.ndarray: """Compute the mean of the probablitiy path between :math:`x` and :math:`y` at time :math:`t`. Args: @@ -100,8 +101,8 @@ def compute_mu_t( return t * x_0 + (1 - t) * x_1 def compute_ut( - self, t: jax.Array, x_0: jax.Array, x_1: jax.Array - ) -> jax.Array: + self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray + ) -> jnp.ndarray: """Evaluate the conditional vector field defined between :math:`x_0` and :math:`x_1` at time :math:`t`. Args: @@ -118,7 +119,7 @@ def compute_ut( class ConstantNoiseFlow(StraightFlow): r"""Flow with straight paths and constant flow noise :math:`\sigma`.""" - def compute_sigma_t(self, t: jax.Array): + def compute_sigma_t(self, t: jnp.ndarray): r"""Compute noise of the flow at time :math:`t`. Args: @@ -133,7 +134,7 @@ def compute_sigma_t(self, t: jax.Array): class BrownianNoiseFlow(StraightFlow): r"""Sampler for sampling noise implicitly defined by a Schroedinger Bridge problem with parameter `\sigma` such that :math:`\sigma_t = \sigma * \sqrt(t * (1-t))`.""" - def compute_sigma_t(self, t: jax.Array): + def compute_sigma_t(self, t: jnp.ndarray): """Compute the standard deviation of the probablity path at time :math:`t`. Args: @@ -158,7 +159,7 @@ def __init__(self, low: float, high: float) -> None: self.high = high @abc.abstractmethod - def __call__(self, rng: jax.Array, num_samples: int) -> jax.Array: + def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: """Generate `num_samples` samples of the time `math`:t:. Args: @@ -179,7 +180,7 @@ class UniformSampler(BaseTimeSampler): def __init__(self, low: float = 0.0, high: float = 1.0) -> None: super().__init__(low=low, high=high) - def __call__(self, rng: jax.Array, num_samples: int) -> jax.Array: + def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: """Generate `num_samples` samples of the time `math`:t:. Args: @@ -209,7 +210,7 @@ def __init__( super().__init__(low=low, high=high) self.offset = offset - def __call__(self, rng: jax.Array, num_samples: int) -> jax.Array: + def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: """Generate `num_samples` samples of the time `math`:t:. Args: diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 377ef033d..7b0867e44 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -113,10 +113,10 @@ def __init__( fused_penalty: float = 0.0, tau_a: float = 1.0, tau_b: float = 1.0, - mlp_eta: Callable[[jax.Array], float] = None, - mlp_xi: Callable[[jax.Array], float] = None, + mlp_eta: Callable[[jnp.ndarray], float] = None, + mlp_xi: Callable[[jnp.ndarray], float] = None, unbalanced_kwargs: Dict[str, Any] = {}, - callback_fn: Optional[Callable[[jax.Array, jax.Array, jax.Array], + callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, rng: random.PRNGKeyArray = random.PRNGKey(0), ) -> None: @@ -323,7 +323,7 @@ def step_fn( ): def loss_fn( - params: jax.Array, batch: Dict[str, jnp.array], + params: jnp.ndarray, batch: Dict[str, jnp.array], keys_model: random.PRNGKeyArray ): x_t = self.flow.compute_xt( @@ -356,12 +356,12 @@ def loss_fn( def transport( self, - source: jax.Array, - condition: Optional[jax.Array], + source: jnp.ndarray, + condition: Optional[jnp.ndarray], rng: random.PRNGKeyArray = random.PRNGKey(0), forward: bool = True, diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}), - ) -> Union[jnp.array, diffrax.Solution, Optional[jax.Array]]: + ) -> Union[jnp.array, diffrax.Solution, Optional[jnp.ndarray]]: """Transport data with the learnt plan. This method pushes-forward the `source` to its conditional distribution by solving the neural ODE parameterized by the :attr:`~ott.neural.solvers.GENOTg.neural_vector_field` from @@ -390,7 +390,7 @@ def transport( axis=-1) t0, t1 = (0.0, 1.0) - def solve_ode(input: jax.Array, cond: jax.Array): + def solve_ode(input: jnp.ndarray, cond: jnp.ndarray): return diffrax.diffeqsolve( diffrax.ODETerm( lambda t, x, args: self.state_neural_vector_field. @@ -446,7 +446,7 @@ def training_logs(self) -> Dict[str, Any]: """Logs of the training.""" raise NotImplementedError - def sample_noise(self, key: random.PRNGKey, batch_size: int) -> jax.Array: + def sample_noise(self, key: random.PRNGKey, batch_size: int) -> jnp.ndarray: """Sample noise from a standard-normal distribution. Args: diff --git a/src/ott/neural/solvers/losses.py b/src/ott/neural/solvers/losses.py index fbf091b22..bec0f3916 100644 --- a/src/ott/neural/solvers/losses.py +++ b/src/ott/neural/solvers/losses.py @@ -25,8 +25,8 @@ def monge_gap( - map_fn: Callable[[jax.Array], jax.Array], - reference_points: jax.Array, + map_fn: Callable[[jnp.ndarray], jnp.ndarray], + reference_points: jnp.ndarray, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, relative_epsilon: Optional[bool] = None, @@ -91,8 +91,8 @@ def monge_gap( def monge_gap_from_samples( - source: jax.Array, - target: jax.Array, + source: jnp.ndarray, + target: jnp.ndarray, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, relative_epsilon: Optional[bool] = None, diff --git a/src/ott/neural/solvers/map_estimator.py b/src/ott/neural/solvers/map_estimator.py index 53bcdc7dd..65edb1d60 100644 --- a/src/ott/neural/solvers/map_estimator.py +++ b/src/ott/neural/solvers/map_estimator.py @@ -79,15 +79,15 @@ def __init__( dim_data: int, model: neuraldual.BaseW2NeuralDual, optimizer: Optional[optax.OptState] = None, - fitting_loss: Optional[Callable[[jax.Array, jax.Array], + fitting_loss: Optional[Callable[[jnp.ndarray, jnp.ndarray], Tuple[float, Optional[Any]]]] = None, - regularizer: Optional[Callable[[jax.Array, jax.Array], + regularizer: Optional[Callable[[jnp.ndarray, jnp.ndarray], Tuple[float, Optional[Any]]]] = None, regularizer_strength: Union[float, Sequence[float]] = 1., num_train_iters: int = 10_000, logging: bool = False, valid_freq: int = 500, - rng: Optional[jax.Array] = None, + rng: Optional[jnp.ndarray] = None, ): self._fitting_loss = fitting_loss self._regularizer = regularizer @@ -126,7 +126,7 @@ def setup( self.step_fn = self._get_step_fn() @property - def regularizer(self) -> Callable[[jax.Array, jax.Array], float]: + def regularizer(self) -> Callable[[jnp.ndarray, jnp.ndarray], float]: """Regularizer added to the fitting loss. Can be e.g. the :func:`~ott.solvers.nn.losses.monge_gap_from_samples`. @@ -139,7 +139,7 @@ def regularizer(self) -> Callable[[jax.Array, jax.Array], float]: return lambda *args, **kwargs: (0., None) @property - def fitting_loss(self) -> Callable[[jax.Array, jax.Array], float]: + def fitting_loss(self) -> Callable[[jnp.ndarray, jnp.ndarray], float]: """Fitting loss to fit the marginal constraint. Can be for instance the @@ -153,9 +153,9 @@ def fitting_loss(self) -> Callable[[jax.Array, jax.Array], float]: @staticmethod def _generate_batch( - loader_source: Iterator[jax.Array], - loader_target: Iterator[jax.Array], - ) -> Dict[str, jax.Array]: + loader_source: Iterator[jnp.ndarray], + loader_target: Iterator[jnp.ndarray], + ) -> Dict[str, jnp.ndarray]: """Generate batches a batch of samples. ``loader_source`` and ``loader_target`` can be training or @@ -168,10 +168,10 @@ def _generate_batch( def train_map_estimator( self, - trainloader_source: Iterator[jax.Array], - trainloader_target: Iterator[jax.Array], - validloader_source: Iterator[jax.Array], - validloader_target: Iterator[jax.Array], + trainloader_source: Iterator[jnp.ndarray], + trainloader_target: Iterator[jnp.ndarray], + validloader_source: Iterator[jnp.ndarray], + validloader_target: Iterator[jnp.ndarray], ) -> Tuple[train_state.TrainState, Dict[str, Any]]: """Training loop.""" # define logs @@ -230,7 +230,7 @@ def _get_step_fn(self) -> Callable: def loss_fn( params: frozen_dict.FrozenDict, apply_fn: Callable, - batch: Dict[str, jax.Array], step: int + batch: Dict[str, jnp.ndarray], step: int ) -> Tuple[float, Dict[str, float]]: """Loss function.""" # map samples with the fitted map @@ -261,8 +261,8 @@ def loss_fn( @functools.partial(jax.jit, static_argnums=3) def step_fn( state_neural_net: train_state.TrainState, - train_batch: Dict[str, jax.Array], - valid_batch: Optional[Dict[str, jax.Array]] = None, + train_batch: Dict[str, jnp.ndarray], + valid_batch: Optional[Dict[str, jnp.ndarray]] = None, is_logging_step: bool = False, step: int = 0 ) -> Tuple[train_state.TrainState, Dict[str, float]]: diff --git a/src/ott/neural/solvers/neuraldual.py b/src/ott/neural/solvers/neuraldual.py index 7d4d5800f..23c63fa3f 100644 --- a/src/ott/neural/solvers/neuraldual.py +++ b/src/ott/neural/solvers/neuraldual.py @@ -44,8 +44,8 @@ Callback_t = Callable[[int, potentials.DualPotentials], None] Conj_t = Optional[conjugate_solvers.FenchelConjugateSolver] -PotentialValueFn_t = Callable[[jax.Array], jax.Array] -PotentialGradientFn_t = Callable[[jax.Array], jax.Array] +PotentialValueFn_t = Callable[[jnp.ndarray], jnp.ndarray] +PotentialGradientFn_t = Callable[[jnp.ndarray], jnp.ndarray] class W2NeuralTrainState(train_state.TrainState): @@ -60,9 +60,9 @@ class W2NeuralTrainState(train_state.TrainState): potential_gradient_fn: the potential's gradient function """ potential_value_fn: Callable[ - [frozen_dict.FrozenDict[str, jax.Array], Optional[PotentialValueFn_t]], + [frozen_dict.FrozenDict[str, jnp.ndarray], Optional[PotentialValueFn_t]], PotentialValueFn_t] = struct.field(pytree_node=False) - potential_gradient_fn: Callable[[frozen_dict.FrozenDict[str, jax.Array]], + potential_gradient_fn: Callable[[frozen_dict.FrozenDict[str, jnp.ndarray]], PotentialGradientFn_t] = struct.field( pytree_node=False ) @@ -87,7 +87,7 @@ def is_potential(self) -> bool: def potential_value_fn( self, - params: frozen_dict.FrozenDict[str, jax.Array], + params: frozen_dict.FrozenDict[str, jnp.ndarray], other_potential_value_fn: Optional[PotentialValueFn_t] = None, ) -> PotentialValueFn_t: r"""Return a function giving the value of the potential. @@ -119,7 +119,7 @@ def potential_value_fn( "The value of the gradient-based potential depends " \ "on the value of the other potential." - def value_fn(x: jax.Array) -> jax.Array: + def value_fn(x: jnp.ndarray) -> jnp.ndarray: squeeze = x.ndim == 1 if squeeze: x = jnp.expand_dims(x, 0) @@ -132,7 +132,7 @@ def value_fn(x: jax.Array) -> jax.Array: def potential_gradient_fn( self, - params: frozen_dict.FrozenDict[str, jax.Array], + params: frozen_dict.FrozenDict[str, jnp.ndarray], ) -> PotentialGradientFn_t: """Return a function returning a vector or the gradient of the potential. @@ -148,7 +148,7 @@ def potential_gradient_fn( def create_train_state( self, - rng: jax.Array, + rng: jnp.ndarray, optimizer: optax.OptState, input: Union[int, Tuple[int, ...]], **kwargs: Any, @@ -243,7 +243,7 @@ def __init__( valid_freq: int = 1000, log_freq: int = 1000, logging: bool = False, - rng: Optional[jax.Array] = None, + rng: Optional[jnp.ndarray] = None, pos_weights: bool = True, beta: float = 1.0, conjugate_solver: Conj_t = conjugate_solvers.DEFAULT_CONJUGATE_SOLVER, @@ -288,7 +288,7 @@ def __init__( def setup( self, - rng: jax.Array, + rng: jnp.ndarray, neural_f: BaseW2NeuralDual, neural_g: BaseW2NeuralDual, dim_data: int, @@ -358,10 +358,10 @@ def setup( def __call__( # noqa: D102 self, - trainloader_source: Iterator[jax.Array], - trainloader_target: Iterator[jax.Array], - validloader_source: Iterator[jax.Array], - validloader_target: Iterator[jax.Array], + trainloader_source: Iterator[jnp.ndarray], + trainloader_target: Iterator[jnp.ndarray], + validloader_source: Iterator[jnp.ndarray], + validloader_target: Iterator[jnp.ndarray], callback: Optional[Callback_t] = None, ) -> Union[potentials.DualPotentials, Tuple[potentials.DualPotentials, Train_t]]: @@ -378,10 +378,10 @@ def __call__( # noqa: D102 def train_neuraldual_parallel( self, - trainloader_source: Iterator[jax.Array], - trainloader_target: Iterator[jax.Array], - validloader_source: Iterator[jax.Array], - validloader_target: Iterator[jax.Array], + trainloader_source: Iterator[jnp.ndarray], + trainloader_target: Iterator[jnp.ndarray], + validloader_source: Iterator[jnp.ndarray], + validloader_target: Iterator[jnp.ndarray], callback: Optional[Callback_t] = None, ) -> Train_t: """Training and validation with parallel updates.""" @@ -453,10 +453,10 @@ def train_neuraldual_parallel( def train_neuraldual_alternating( self, - trainloader_source: Iterator[jax.Array], - trainloader_target: Iterator[jax.Array], - validloader_source: Iterator[jax.Array], - validloader_target: Iterator[jax.Array], + trainloader_source: Iterator[jnp.ndarray], + trainloader_target: Iterator[jnp.ndarray], + validloader_source: Iterator[jnp.ndarray], + validloader_target: Iterator[jnp.ndarray], callback: Optional[Callback_t] = None, ) -> Train_t: """Training and validation with alternating updates.""" @@ -533,7 +533,7 @@ def loss_fn(params_f, params_g, f_value, g_value, g_gradient, batch): init_source_hat = g_gradient(params_g)(target) - def g_value_partial(y: jax.Array) -> jax.Array: + def g_value_partial(y: jnp.ndarray) -> jnp.ndarray: """Lazy way of evaluating g if f's computation needs it.""" return g_value(params_g)(y) @@ -661,7 +661,7 @@ def to_dual_potentials( self.state_g.params, f_value ) - def g_value_finetuned(y: jax.Array) -> jax.Array: + def g_value_finetuned(y: jnp.ndarray) -> jnp.ndarray: x_hat = jax.grad(g_value_prediction)(y) grad_g_y = jax.lax.stop_gradient( self.conjugate_solver.solve(f_value, y, x_init=x_hat).grad @@ -686,7 +686,7 @@ def _clip_weights_icnn(params): return core.freeze(params) @staticmethod - def _penalize_weights_icnn(params: Dict[str, jax.Array]) -> float: + def _penalize_weights_icnn(params: Dict[str, jnp.ndarray]) -> float: penalty = 0.0 for k, param in params.items(): if k.startswith("w_z"): @@ -696,9 +696,9 @@ def _penalize_weights_icnn(params: Dict[str, jax.Array]) -> float: @staticmethod def _update_logs( logs: Dict[str, List[Union[float, str]]], - loss_f: jax.Array, - loss_g: jax.Array, - w_dist: jax.Array, + loss_f: jnp.ndarray, + loss_g: jnp.ndarray, + w_dist: jnp.ndarray, ) -> None: logs["loss_f"].append(float(loss_f)) logs["loss_g"].append(float(loss_g)) diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 82a6b67aa..2afc94e6d 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -11,9 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from collections import defaultdict import functools import types +from collections import defaultdict from typing import Any, Callable, Dict, Mapping, Optional, Tuple, Type import diffrax @@ -36,7 +36,6 @@ BaseTimeSampler, ) from ott.solvers import was_solver -from ott.tools.sinkhorn_divergence import sinkhorn_divergence __all__ = ["OTFlowMatching"] @@ -88,10 +87,10 @@ def __init__( cost_fn: Type[costs.CostFn] = costs.SqEuclidean(), tau_a: float = 1.0, tau_b: float = 1.0, - mlp_eta: Callable[[jax.Array], float] = None, - mlp_xi: Callable[[jax.Array], float] = None, + mlp_eta: Callable[[jnp.ndarray], float] = None, + mlp_xi: Callable[[jnp.ndarray], float] = None, unbalanced_kwargs: Dict[str, Any] = {}, - callback_fn: Optional[Callable[[jax.Array, jax.Array, jax.Array], + callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, logging_freq: int = 100, valid_freq: int = 5000, @@ -158,20 +157,25 @@ def _get_step_fn(self) -> Callable: def step_fn( key: random.PRNGKeyArray, state_neural_vector_field: train_state.TrainState, - batch: Dict[str, jax.Array], + batch: Dict[str, jnp.ndarray], ) -> Tuple[Any, Any]: def loss_fn( - params: jax.Array, t: jax.Array, noise: jax.Array, - batch: Dict[str, jax.Array], keys_model: random.PRNGKeyArray - ) -> jax.Array: + params: jnp.ndarray, t: jnp.ndarray, noise: jnp.ndarray, + batch: Dict[str, jnp.ndarray], keys_model: random.PRNGKeyArray + ) -> jnp.ndarray: - x_t = self.flow.compute_xt(noise, t, batch["source_lin"], batch["target_lin"]) + x_t = self.flow.compute_xt( + noise, t, batch["source_lin"], batch["target_lin"] + ) apply_fn = functools.partial( state_neural_vector_field.apply_fn, {"params": params} ) v_t = jax.vmap(apply_fn)( - t=t, x=x_t, condition=batch["source_conditions"], keys_model=keys_model + t=t, + x=x_t, + condition=batch["source_conditions"], + keys_model=keys_model ) u_t = self.flow.compute_ut(t, batch["source_lin"], batch["target_lin"]) return jnp.mean((v_t - u_t) ** 2) @@ -199,19 +203,21 @@ def __call__(self, train_loader, valid_loader) -> None: Returns: None """ - batch: Mapping[str, jax.Array] = {} + batch: Mapping[str, jnp.ndarray] = {} curr_loss = 0.0 - + for iter in range(self.iterations): rng_resample, rng_step_fn, self.rng = random.split(self.rng, 3) batch = next(train_loader) if self.ot_solver is not None: tmat = self.match_fn(batch["source_lin"], batch["target_lin"]) - (batch["source_lin"], - batch["source_conditions"]), (batch["target_lin"], batch["target_conditions"]) = self._resample_data( - rng_resample, tmat, (batch["source_lin"], batch["source_conditions"]), - (batch["target_lin"], batch["target_conditions"]) - ) + (batch["source_lin"], batch["source_conditions"] + ), (batch["target_lin"], + batch["target_conditions"]) = self._resample_data( + rng_resample, tmat, + (batch["source_lin"], batch["source_conditions"]), + (batch["target_lin"], batch["target_conditions"]) + ) self.state_neural_vector_field, loss = self.step_fn( rng_step_fn, self.state_neural_vector_field, batch ) @@ -244,7 +250,7 @@ def __call__(self, train_loader, valid_loader) -> None: def transport( self, data: jnp.array, - condition: Optional[jax.Array], + condition: Optional[jnp.ndarray], forward: bool = True, diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) ) -> diffrax.Solution: @@ -270,7 +276,7 @@ def transport( ) if forward else (self.time_sampler.high, self.time_sampler.low) @jax.jit - def solve_ode(input: jax.Array, cond: jax.Array): + def solve_ode(input: jnp.ndarray, cond: jnp.ndarray): return diffrax.diffeqsolve( diffrax.ODETerm( lambda t, x, args: self.state_neural_vector_field. @@ -296,7 +302,6 @@ def solve_ode(input: jax.Array, cond: jax.Array): def _valid_step(self, valid_loader, iter) -> None: next(valid_loader) # TODO: add callback and logging - @property def learn_rescaling(self) -> bool: @@ -327,7 +332,7 @@ def training_logs(self) -> Dict[str, Any]: """Logs of the training.""" raise NotImplementedError - def sample_noise(self, key: random.PRNGKey, batch_size: int) -> jax.Array: + def sample_noise(self, key: random.PRNGKey, batch_size: int) -> jnp.ndarray: """Sample noise from a standard-normal distribution. Args: diff --git a/src/ott/problems/linear/barycenter_problem.py b/src/ott/problems/linear/barycenter_problem.py index c94cc578d..ca5333a8e 100644 --- a/src/ott/problems/linear/barycenter_problem.py +++ b/src/ott/problems/linear/barycenter_problem.py @@ -50,9 +50,9 @@ class FreeBarycenterProblem: def __init__( self, - y: jax.Array, - b: Optional[jax.Array] = None, - weights: Optional[jax.Array] = None, + y: jnp.ndarray, + b: Optional[jnp.ndarray] = None, + weights: Optional[jnp.ndarray] = None, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, **kwargs: Any, @@ -76,7 +76,7 @@ def __init__( assert self._b is None or self._y.shape[0] == self._b.shape[0] @property - def segmented_y_b(self) -> Tuple[jax.Array, jax.Array]: + def segmented_y_b(self) -> Tuple[jnp.ndarray, jnp.ndarray]: """Tuple of arrays containing the segmented measures and weights. - Segmented measures of shape ``[num_measures, max_measure_size, ndim]``. @@ -94,14 +94,14 @@ def segmented_y_b(self) -> Tuple[jax.Array, jax.Array]: return y, b @property - def flattened_y(self) -> jax.Array: + def flattened_y(self) -> jnp.ndarray: """Array of shape ``[num_measures * (N_1 + N_2 + ...), ndim]``.""" if self._is_segmented: return self._y.reshape((-1, self._y.shape[-1])) return self._y @property - def flattened_b(self) -> Optional[jax.Array]: + def flattened_b(self) -> Optional[jnp.ndarray]: """Array of shape ``[num_measures * (N_1 + N_2 + ...),]``.""" return None if self._b is None else self._b.ravel() @@ -121,7 +121,7 @@ def ndim(self) -> int: return self._y.shape[-1] @property - def weights(self) -> jax.Array: + def weights(self) -> jnp.ndarray: """Barycenter weights of shape ``[num_measures,]`` that sum to 1.""" if self._weights is None: return jnp.ones((self.num_measures,)) / self.num_measures @@ -165,8 +165,8 @@ class FixedBarycenterProblem: def __init__( self, geom: geometry.Geometry, - a: jax.Array, - weights: Optional[jax.Array] = None, + a: jnp.ndarray, + weights: Optional[jnp.ndarray] = None, ): self.geom = geom self.a = a @@ -178,7 +178,7 @@ def num_measures(self) -> int: return self.a.shape[0] @property - def weights(self) -> jax.Array: + def weights(self) -> jnp.ndarray: """Barycenter weights of shape ``[num_measures,]`` that sum to :math`1`.""" if self._weights is None: return jnp.ones((self.num_measures,)) / self.num_measures diff --git a/src/ott/problems/linear/linear_problem.py b/src/ott/problems/linear/linear_problem.py index 3e09c0e59..7c206aa63 100644 --- a/src/ott/problems/linear/linear_problem.py +++ b/src/ott/problems/linear/linear_problem.py @@ -21,8 +21,9 @@ __all__ = ["LinearProblem"] # TODO(michalk8): move to typing.py when refactoring the types -MarginalFunc = Callable[[jax.Array, jax.Array], jax.Array] -TransportAppFunc = Callable[[jax.Array, jax.Array, jax.Array, int], jax.Array] +MarginalFunc = Callable[[jnp.ndarray, jnp.ndarray], jnp.ndarray] +TransportAppFunc = Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray, int], + jnp.ndarray] @jax.tree_util.register_pytree_node_class @@ -49,8 +50,8 @@ class LinearProblem: def __init__( self, geom: geometry.Geometry, - a: Optional[jax.Array] = None, - b: Optional[jax.Array] = None, + a: Optional[jnp.ndarray] = None, + b: Optional[jnp.ndarray] = None, tau_a: float = 1.0, tau_b: float = 1.0 ): @@ -61,13 +62,13 @@ def __init__( self.tau_b = tau_b @property - def a(self) -> jax.Array: + def a(self) -> jnp.ndarray: """First marginal.""" num_a = self.geom.shape[0] return jnp.ones((num_a,)) / num_a if self._a is None else self._a @property - def b(self) -> jax.Array: + def b(self) -> jnp.ndarray: """Second marginal.""" num_b = self.geom.shape[1] return jnp.ones((num_b,)) / num_b if self._b is None else self._b diff --git a/src/ott/problems/linear/potentials.py b/src/ott/problems/linear/potentials.py index 718aa22a1..7ab226072 100644 --- a/src/ott/problems/linear/potentials.py +++ b/src/ott/problems/linear/potentials.py @@ -37,7 +37,7 @@ mpl = plt = None __all__ = ["DualPotentials", "EntropicPotentials"] -Potential_t = Callable[[jax.Array], float] +Potential_t = Callable[[jnp.ndarray], float] @jtu.register_pytree_node_class @@ -72,7 +72,7 @@ def __init__( self.cost_fn = cost_fn self._corr = corr - def transport(self, vec: jax.Array, forward: bool = True) -> jax.Array: + def transport(self, vec: jnp.ndarray, forward: bool = True) -> jnp.ndarray: r"""Transport ``vec`` according to Brenier formula :cite:`brenier:91`. Uses Theorem 1.17 from :cite:`santambrogio:15` to compute an OT map when @@ -105,7 +105,7 @@ def transport(self, vec: jax.Array, forward: bool = True) -> jax.Array: return vec - self._grad_h_inv(self._grad_f(vec)) return vec - self._grad_h_inv(self._grad_g(vec)) - def distance(self, src: jax.Array, tgt: jax.Array) -> float: + def distance(self, src: jnp.ndarray, tgt: jnp.ndarray) -> float: r"""Evaluate Wasserstein distance between samples using dual potentials. This uses direct estimation of potentials against measures when dual @@ -146,17 +146,17 @@ def g(self) -> Potential_t: return self._g @property - def _grad_f(self) -> Callable[[jax.Array], jax.Array]: + def _grad_f(self) -> Callable[[jnp.ndarray], jnp.ndarray]: """Vectorized gradient of the potential function :attr:`f`.""" return jax.vmap(jax.grad(self.f, argnums=0)) @property - def _grad_g(self) -> Callable[[jax.Array], jax.Array]: + def _grad_g(self) -> Callable[[jnp.ndarray], jnp.ndarray]: """Vectorized gradient of the potential function :attr:`g`.""" return jax.vmap(jax.grad(self.g, argnums=0)) @property - def _grad_h_inv(self) -> Callable[[jax.Array], jax.Array]: + def _grad_h_inv(self) -> Callable[[jnp.ndarray], jnp.ndarray]: from ott.geometry import costs assert isinstance(self.cost_fn, costs.TICost), ( @@ -181,9 +181,9 @@ def tree_unflatten( # noqa: D102 def plot_ot_map( self, - source: jax.Array, - target: jax.Array, - samples: Optional[jax.Array] = None, + source: jnp.ndarray, + target: jnp.ndarray, + samples: Optional[jnp.ndarray] = None, forward: bool = True, ax: Optional["plt.Axes"] = None, legend_kwargs: Optional[Dict[str, Any]] = None, @@ -348,11 +348,11 @@ class EntropicPotentials(DualPotentials): def __init__( self, - f_xy: jax.Array, - g_xy: jax.Array, + f_xy: jnp.ndarray, + g_xy: jnp.ndarray, prob: linear_problem.LinearProblem, - f_xx: Optional[jax.Array] = None, - g_yy: Optional[jax.Array] = None, + f_xx: Optional[jnp.ndarray] = None, + g_yy: Optional[jnp.ndarray] = None, ): # we pass directly the arrays and override the properties # since only the properties need to be callable @@ -373,11 +373,11 @@ def _potential_fn(self, *, kind: Literal["f", "g"]) -> Potential_t: from ott.geometry import pointcloud def callback( - x: jax.Array, + x: jnp.ndarray, *, - potential: jax.Array, - y: jax.Array, - weights: jax.Array, + potential: jnp.ndarray, + y: jnp.ndarray, + weights: jnp.ndarray, epsilon: float, ) -> float: x = jnp.atleast_2d(x) diff --git a/src/ott/problems/quadratic/gw_barycenter.py b/src/ott/problems/quadratic/gw_barycenter.py index 7170f1064..dfe562d98 100644 --- a/src/ott/problems/quadratic/gw_barycenter.py +++ b/src/ott/problems/quadratic/gw_barycenter.py @@ -60,11 +60,11 @@ class GWBarycenterProblem(barycenter_problem.FreeBarycenterProblem): def __init__( self, - y: Optional[jax.Array] = None, - b: Optional[jax.Array] = None, - weights: Optional[jax.Array] = None, - costs: Optional[jax.Array] = None, - y_fused: Optional[jax.Array] = None, + y: Optional[jnp.ndarray] = None, + b: Optional[jnp.ndarray] = None, + weights: Optional[jnp.ndarray] = None, + costs: Optional[jnp.ndarray] = None, + y_fused: Optional[jnp.ndarray] = None, fused_penalty: float = 1.0, gw_loss: Literal["sqeucl", "kl"] = "sqeucl", scale_cost: Union[int, float, Literal["mean", "max_cost"]] = 1.0, @@ -98,7 +98,9 @@ def __init__( # TODO(michalk8): in the future, consider checking the other 2 cases # using `segmented_y` and `segmented_y_fused`? - def update_barycenter(self, transports: jax.Array, a: jax.Array) -> jax.Array: + def update_barycenter( + self, transports: jnp.ndarray, a: jnp.ndarray + ) -> jnp.ndarray: """Update the barycenter cost matrix. Uses the eq. 14 and 15 of :cite:`peyre:16`. @@ -114,11 +116,11 @@ def update_barycenter(self, transports: jax.Array, a: jax.Array) -> jax.Array: @functools.partial(jax.vmap, in_axes=[0, 0, 0, None]) def project( - y: jax.Array, - b: jax.Array, - transport: jax.Array, + y: jnp.ndarray, + b: jnp.ndarray, + transport: jnp.ndarray, fn: Optional[quadratic_costs.Loss], - ) -> jax.Array: + ) -> jnp.ndarray: geom = self._create_y_geometry(y, mask=b > 0.) fn, lin = (None, True) if fn is None else (fn.func, fn.is_linear) @@ -144,8 +146,8 @@ def project( return jnp.exp(barycenter) return barycenter - def update_features(self, transports: jax.Array, - a: jax.Array) -> Optional[jax.Array]: + def update_features(self, transports: jnp.ndarray, + a: jnp.ndarray) -> Optional[jnp.ndarray]: """Update the barycenter features in the fused case :cite:`vayer:19`. Uses :cite:`cuturi:14` eq. 8, and is implemented only @@ -179,8 +181,8 @@ def update_features(self, transports: jax.Array, def _create_bary_geometry( self, - cost_matrix: jax.Array, - mask: Optional[jax.Array] = None + cost_matrix: jnp.ndarray, + mask: Optional[jnp.ndarray] = None ) -> geometry.Geometry: return geometry.Geometry( cost_matrix=cost_matrix, @@ -192,8 +194,8 @@ def _create_bary_geometry( def _create_y_geometry( self, - y: jax.Array, - mask: Optional[jax.Array] = None + y: jnp.ndarray, + mask: Optional[jnp.ndarray] = None ) -> geometry.Geometry: if self._y_as_costs: assert y.shape[0] == y.shape[1], y.shape @@ -215,10 +217,10 @@ def _create_y_geometry( def _create_fused_geometry( self, - x: jax.Array, - y: jax.Array, - src_mask: Optional[jax.Array] = None, - tgt_mask: Optional[jax.Array] = None + x: jnp.ndarray, + y: jnp.ndarray, + src_mask: Optional[jnp.ndarray] = None, + tgt_mask: Optional[jnp.ndarray] = None ) -> pointcloud.PointCloud: return pointcloud.PointCloud( x, @@ -233,9 +235,9 @@ def _create_fused_geometry( def _create_problem( self, state: "GWBarycenterState", # noqa: F821 - y: jax.Array, - b: jax.Array, - f: Optional[jax.Array] = None + y: jnp.ndarray, + b: jnp.ndarray, + f: Optional[jnp.ndarray] = None ) -> quadratic_problem.QuadraticProblem: # TODO(michalk8): in future, mask in the problem for convenience? bary_mask = state.a > 0. @@ -267,7 +269,7 @@ def is_fused(self) -> bool: return self._y_fused is not None @property - def segmented_y_fused(self) -> Optional[jax.Array]: + def segmented_y_fused(self) -> Optional[jnp.ndarray]: """Feature array of shape used in the fused case.""" if not self.is_fused or self._y_fused.ndim == 3: return self._y_fused diff --git a/src/ott/problems/quadratic/quadratic_costs.py b/src/ott/problems/quadratic/quadratic_costs.py index 060c3c537..70f2bf5ad 100644 --- a/src/ott/problems/quadratic/quadratic_costs.py +++ b/src/ott/problems/quadratic/quadratic_costs.py @@ -13,7 +13,6 @@ # limitations under the License. from typing import Callable, NamedTuple -import jax import jax.numpy as jnp import jax.scipy as jsp @@ -21,7 +20,7 @@ class Loss(NamedTuple): # noqa: D101 - func: Callable[[jax.Array], jax.Array] + func: Callable[[jnp.ndarray], jnp.ndarray] is_linear: bool diff --git a/src/ott/problems/quadratic/quadratic_problem.py b/src/ott/problems/quadratic/quadratic_problem.py index cf5b804a2..a17aaf9fb 100644 --- a/src/ott/problems/quadratic/quadratic_problem.py +++ b/src/ott/problems/quadratic/quadratic_problem.py @@ -91,8 +91,8 @@ def __init__( geom_xy: Optional[geometry.Geometry] = None, fused_penalty: float = 1.0, scale_cost: Optional[Union[bool, float, str]] = False, - a: Optional[jax.Array] = None, - b: Optional[jax.Array] = None, + a: Optional[jnp.ndarray] = None, + b: Optional[jnp.ndarray] = None, loss: Union[Literal["sqeucl", "kl"], quadratic_costs.GWLoss] = "sqeucl", tau_a: float = 1.0, tau_b: float = 1.0, @@ -125,8 +125,8 @@ def __init__( def marginal_dependent_cost( self, - marginal_1: jax.Array, - marginal_2: jax.Array, + marginal_1: jnp.ndarray, + marginal_2: jnp.ndarray, ) -> low_rank.LRCGeometry: r"""Initialize cost term that depends on the marginals of the transport. @@ -169,9 +169,9 @@ def marginal_dependent_cost( def cost_unbalanced_correction( self, - transport_matrix: jax.Array, - marginal_1: jax.Array, - marginal_2: jax.Array, + transport_matrix: jnp.ndarray, + marginal_1: jnp.ndarray, + marginal_2: jnp.ndarray, epsilon: epsilon_scheduler.Epsilon, ) -> float: r"""Calculate cost term from the quadratic divergence when unbalanced. @@ -193,10 +193,10 @@ def cost_unbalanced_correction( :math:`+ epsilon * \sum(KL(P|ab'))` Args: - transport_matrix: jax.Array[num_a, num_b], transport matrix. - marginal_1: jax.Array[num_a,], marginal of the transport matrix + transport_matrix: jnp.ndarray[num_a, num_b], transport matrix. + marginal_1: jnp.ndarray[num_a,], marginal of the transport matrix for samples from :attr:`geom_xx`. - marginal_2: jax.Array[num_b,], marginal of the transport matrix + marginal_2: jnp.ndarray[num_b,], marginal of the transport matrix for samples from :attr:`geom_yy`. epsilon: entropy regularizer. @@ -353,7 +353,7 @@ def update_lr_linearization( ) @property - def _fused_cost_matrix(self) -> Union[float, jax.Array]: + def _fused_cost_matrix(self) -> Union[float, jnp.ndarray]: if not self.is_fused: return 0.0 geom_xy = self.geom_xy @@ -382,7 +382,7 @@ def convertible(geom: geometry.Geometry) -> bool: def to_low_rank( self, - rng: Optional[jax.Array] = None, + rng: Optional[jnp.ndarray] = None, ) -> "QuadraticProblem": """Convert geometries to low-rank. @@ -442,13 +442,13 @@ def geom_xy(self) -> Optional[geometry.Geometry]: return self._geom_xy @property - def a(self) -> jax.Array: + def a(self) -> jnp.ndarray: """First marginal.""" num_a = self.geom_xx.shape[0] return jnp.ones((num_a,)) / num_a if self._a is None else self._a @property - def b(self) -> jax.Array: + def b(self) -> jnp.ndarray: """Second marginal.""" num_b = self.geom_yy.shape[0] return jnp.ones((num_b,)) / num_b if self._b is None else self._b @@ -510,7 +510,7 @@ def update_epsilon_unbalanced( # noqa: D103 def apply_cost( # noqa: D103 - geom: geometry.Geometry, arr: jax.Array, *, axis: int, + geom: geometry.Geometry, arr: jnp.ndarray, *, axis: int, fn: quadratic_costs.Loss -) -> jax.Array: +) -> jnp.ndarray: return geom.apply_cost(arr, axis=axis, fn=fn.func, is_linear=fn.is_linear) diff --git a/src/ott/solvers/linear/_solve.py b/src/ott/solvers/linear/_solve.py index fad5a4e7d..2bca6a825 100644 --- a/src/ott/solvers/linear/_solve.py +++ b/src/ott/solvers/linear/_solve.py @@ -13,7 +13,7 @@ # limitations under the License. from typing import Any, Optional, Union -import jax +import jax.numpy as jnp from ott.geometry import geometry from ott.problems.linear import linear_problem @@ -24,8 +24,8 @@ def solve( geom: geometry.Geometry, - a: Optional[jax.Array] = None, - b: Optional[jax.Array] = None, + a: Optional[jnp.ndarray] = None, + b: Optional[jnp.ndarray] = None, tau_a: float = 1.0, tau_b: float = 1.0, rank: int = -1, diff --git a/src/ott/solvers/linear/acceleration.py b/src/ott/solvers/linear/acceleration.py index 7ce602194..4529e7f78 100644 --- a/src/ott/solvers/linear/acceleration.py +++ b/src/ott/solvers/linear/acceleration.py @@ -34,7 +34,7 @@ class AndersonAcceleration: refresh_every: int = 1 # Recompute interpolation periodically. ridge_identity: float = 1e-2 # Ridge used in the linear system. - def extrapolation(self, xs: jax.Array, fxs: jax.Array) -> jax.Array: + def extrapolation(self, xs: jnp.ndarray, fxs: jnp.ndarray) -> jnp.ndarray: """Compute Anderson extrapolation from past observations.""" # Remove -inf values to instantiate quadratic problem. All others # remain since they might be caused by a valid issue. @@ -161,10 +161,10 @@ def lehmann(self, state: "sinkhorn.SinkhornState") -> float: def __call__( # noqa: D102 self, weight: float, - value: jax.Array, - new_value: jax.Array, + value: jnp.ndarray, + new_value: jnp.ndarray, lse_mode: bool = True - ) -> jax.Array: + ) -> jnp.ndarray: if lse_mode: value = jnp.where(jnp.isfinite(value), value, 0.0) return (1.0 - weight) * value + weight * new_value diff --git a/src/ott/solvers/linear/continuous_barycenter.py b/src/ott/solvers/linear/continuous_barycenter.py index 0094c3a3c..b93c14032 100644 --- a/src/ott/solvers/linear/continuous_barycenter.py +++ b/src/ott/solvers/linear/continuous_barycenter.py @@ -41,11 +41,11 @@ class FreeBarycenterState(NamedTuple): a: barycenter weights. """ - costs: Optional[jax.Array] = None - linear_convergence: Optional[jax.Array] = None - errors: Optional[jax.Array] = None - x: Optional[jax.Array] = None - a: Optional[jax.Array] = None + costs: Optional[jnp.ndarray] = None + linear_convergence: Optional[jnp.ndarray] = None + errors: Optional[jnp.ndarray] = None + x: Optional[jnp.ndarray] = None + a: Optional[jnp.ndarray] = None def set(self, **kwargs: Any) -> "FreeBarycenterState": """Return a copy of self, possibly with overwrites.""" @@ -70,7 +70,7 @@ def update( @functools.partial(jax.vmap, in_axes=[None, None, 0, 0]) def solve_linear_ot( - a: Optional[jax.Array], x: jax.Array, b: jax.Array, y: jax.Array + a: Optional[jnp.ndarray], x: jnp.ndarray, b: jnp.ndarray, y: jnp.ndarray ): out = linear_ot_solver( linear_problem.LinearProblem( @@ -129,8 +129,8 @@ def __call__( # noqa: D102 self, bar_prob: barycenter_problem.FreeBarycenterProblem, bar_size: int = 100, - x_init: Optional[jax.Array] = None, - rng: Optional[jax.Array] = None, + x_init: Optional[jnp.ndarray] = None, + rng: Optional[jnp.ndarray] = None, ) -> FreeBarycenterState: # TODO(michalk8): no reason for iterations to be outside this class rng = utils.default_prng_key(rng) @@ -140,8 +140,8 @@ def init_state( self, bar_prob: barycenter_problem.FreeBarycenterProblem, bar_size: int, - x_init: Optional[jax.Array] = None, - rng: Optional[jax.Array] = None, + x_init: Optional[jnp.ndarray] = None, + rng: Optional[jnp.ndarray] = None, ) -> FreeBarycenterState: """Initialize the state of the Wasserstein barycenter iterations. @@ -195,8 +195,8 @@ def output_from_state( # noqa: D102 def iterations( solver: FreeWassersteinBarycenter, bar_size: int, - bar_prob: barycenter_problem.FreeBarycenterProblem, x_init: jax.Array, - rng: jax.Array + bar_prob: barycenter_problem.FreeBarycenterProblem, x_init: jnp.ndarray, + rng: jnp.ndarray ) -> FreeBarycenterState: """Jittable Wasserstein barycenter outer loop.""" diff --git a/src/ott/solvers/linear/discrete_barycenter.py b/src/ott/solvers/linear/discrete_barycenter.py index 85adaa795..dcfdc1470 100644 --- a/src/ott/solvers/linear/discrete_barycenter.py +++ b/src/ott/solvers/linear/discrete_barycenter.py @@ -26,10 +26,10 @@ class SinkhornBarycenterOutput(NamedTuple): # noqa: D101 - f: jax.Array - g: jax.Array - histogram: jax.Array - errors: jax.Array + f: jnp.ndarray + g: jnp.ndarray + histogram: jnp.ndarray + errors: jnp.ndarray @jax.tree_util.register_pytree_node_class @@ -79,7 +79,7 @@ def __init__( def __call__( self, fixed_bp: barycenter_problem.FixedBarycenterProblem, - dual_initialization: Optional[jax.Array] = None, + dual_initialization: Optional[jnp.ndarray] = None, ) -> SinkhornBarycenterOutput: """Solve barycenter problem, possibly using clever initialization. @@ -128,10 +128,10 @@ def tree_unflatten(cls, aux_data, children): # noqa: D102 @functools.partial(jax.jit, static_argnums=(5, 6, 7, 8, 9, 10, 11, 12)) def _discrete_barycenter( - geom: geometry.Geometry, a: jax.Array, weights: jax.Array, - dual_initialization: jax.Array, threshold: float, norm_error: Sequence[int], - inner_iterations: int, min_iterations: int, max_iterations: int, - lse_mode: bool, debiased: bool, num_a: int, num_b: int + geom: geometry.Geometry, a: jnp.ndarray, weights: jnp.ndarray, + dual_initialization: jnp.ndarray, threshold: float, + norm_error: Sequence[int], inner_iterations: int, min_iterations: int, + max_iterations: int, lse_mode: bool, debiased: bool, num_a: int, num_b: int ) -> SinkhornBarycenterOutput: """Jit'able function to compute discrete barycenters.""" if lse_mode: diff --git a/src/ott/solvers/linear/implicit_differentiation.py b/src/ott/solvers/linear/implicit_differentiation.py index c5e7cb0f3..fbf98ce81 100644 --- a/src/ott/solvers/linear/implicit_differentiation.py +++ b/src/ott/solvers/linear/implicit_differentiation.py @@ -23,8 +23,9 @@ if TYPE_CHECKING: from ott.problems.linear import linear_problem -LinOp_t = Callable[[jax.Array], jax.Array] -Solver_t = Callable[[LinOp_t, jax.Array, Optional[LinOp_t], bool], jax.Array] +LinOp_t = Callable[[jnp.ndarray], jnp.ndarray] +Solver_t = Callable[[LinOp_t, jnp.ndarray, Optional[LinOp_t], bool], + jnp.ndarray] __all__ = ["ImplicitDiff", "solve_jax_cg"] @@ -69,16 +70,16 @@ class ImplicitDiff: solver: Optional[Solver_t] = None solver_kwargs: Optional[Dict[str, Any]] = None symmetric: bool = False - precondition_fun: Optional[Callable[[jax.Array], jax.Array]] = None + precondition_fun: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None def solve( self, - gr: Tuple[jax.Array, jax.Array], + gr: Tuple[jnp.ndarray, jnp.ndarray], ot_prob: "linear_problem.LinearProblem", - f: jax.Array, - g: jax.Array, + f: jnp.ndarray, + g: jnp.ndarray, lse_mode: bool, - ) -> jax.Array: + ) -> jnp.ndarray: r"""Apply minus inverse of [hessian ``reg_ot_cost`` w.r.t. ``f``, ``g``]. This function is used to carry out implicit differentiation of ``sinkhorn`` @@ -223,7 +224,7 @@ def solve( return jnp.concatenate((-vjp_gr_f, -vjp_gr_g)) def first_order_conditions( - self, prob, f: jax.Array, g: jax.Array, lse_mode: bool + self, prob, f: jnp.ndarray, g: jnp.ndarray, lse_mode: bool ): r"""Compute vector of first order conditions for the reg-OT problem. @@ -237,12 +238,12 @@ def first_order_conditions( Args: prob: definition of the linear optimal transport problem. - f: jax.Array, first potential - g: jax.Array, second potential + f: jnp.ndarray, first potential + g: jnp.ndarray, second potential lse_mode: bool Returns: - a jax.Array of size (size of ``n + m``) quantifying deviation to + a jnp.ndarray of size (size of ``n + m``) quantifying deviation to optimality for variables ``f`` and ``g``. """ geom = prob.geom @@ -265,8 +266,8 @@ def first_order_conditions( return jnp.concatenate((result_a, result_b)) def gradient( - self, prob: "linear_problem.LinearProblem", f: jax.Array, g: jax.Array, - lse_mode: bool, gr: Tuple[jax.Array, jax.Array] + self, prob: "linear_problem.LinearProblem", f: jnp.ndarray, + g: jnp.ndarray, lse_mode: bool, gr: Tuple[jnp.ndarray, jnp.ndarray] ) -> "linear_problem.LinearProblem": """Apply VJP to recover gradient in reverse mode differentiation.""" # Applies first part of vjp to gr: inverse part of implicit function theorem @@ -286,13 +287,13 @@ def replace(self, **kwargs: Any) -> "ImplicitDiff": # noqa: D102 def solve_jax_cg( lin: LinOp_t, - b: jax.Array, + b: jnp.ndarray, lin_t: Optional[LinOp_t] = None, symmetric: bool = False, ridge_identity: float = 0.0, ridge_kernel: float = 0.0, **kwargs: Any -) -> jax.Array: +) -> jnp.ndarray: """Wrapper around JAX native linear solvers. Args: diff --git a/src/ott/solvers/linear/lineax_implicit.py b/src/ott/solvers/linear/lineax_implicit.py index ac3978462..79b9e7c95 100644 --- a/src/ott/solvers/linear/lineax_implicit.py +++ b/src/ott/solvers/linear/lineax_implicit.py @@ -46,14 +46,14 @@ def transpose(self): def solve_lineax( lin: Callable, - b: jax.Array, + b: jnp.ndarray, lin_t: Optional[Callable] = None, symmetric: bool = False, nonsym_solver: Optional[lx.AbstractLinearSolver] = None, ridge_identity: float = 0.0, ridge_kernel: float = 0.0, **kwargs: Any -) -> jax.Array: +) -> jnp.ndarray: """Wrapper around lineax solvers. Args: diff --git a/src/ott/solvers/linear/lr_utils.py b/src/ott/solvers/linear/lr_utils.py index 2eb4c32ed..8ade265c9 100644 --- a/src/ott/solvers/linear/lr_utils.py +++ b/src/ott/solvers/linear/lr_utils.py @@ -24,27 +24,27 @@ class State(NamedTuple): # noqa: D101 - v1: jax.Array - v2: jax.Array - u1: jax.Array - u2: jax.Array - g: jax.Array + v1: jnp.ndarray + v2: jnp.ndarray + u1: jnp.ndarray + u2: jnp.ndarray + g: jnp.ndarray err: float class Constants(NamedTuple): # noqa: D101 - a: jax.Array - b: jax.Array + a: jnp.ndarray + b: jnp.ndarray rho_a: float rho_b: float - supp_a: Optional[jax.Array] = None - supp_b: Optional[jax.Array] = None + supp_a: Optional[jnp.ndarray] = None + supp_b: Optional[jnp.ndarray] = None def unbalanced_dykstra_lse( - c_q: jax.Array, - c_r: jax.Array, - c_g: jax.Array, + c_q: jnp.ndarray, + c_r: jnp.ndarray, + c_g: jnp.ndarray, gamma: float, ot_prob: linear_problem.LinearProblem, translation_invariant: bool = True, @@ -52,7 +52,7 @@ def unbalanced_dykstra_lse( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 -) -> Tuple[jax.Array, jax.Array, jax.Array]: +) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: """Dykstra's algorithm for the unbalanced :class:`~ott.solvers.linear.sinkhorn_lr.LRSinkhorn` in LSE mode. @@ -74,10 +74,10 @@ def unbalanced_dykstra_lse( """ # noqa: D205 def _softm( - v: jax.Array, - c: jax.Array, + v: jnp.ndarray, + c: jnp.ndarray, axis: int, - ) -> jax.Array: + ) -> jnp.ndarray: v = jnp.expand_dims(v, axis=1 - axis) return jsp.special.logsumexp(v + c, axis=axis) @@ -181,9 +181,9 @@ def body_fn( def unbalanced_dykstra_kernel( - k_q: jax.Array, - k_r: jax.Array, - k_g: jax.Array, + k_q: jnp.ndarray, + k_r: jnp.ndarray, + k_g: jnp.ndarray, gamma: float, ot_prob: linear_problem.LinearProblem, translation_invariant: bool = True, @@ -191,7 +191,7 @@ def unbalanced_dykstra_kernel( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 -) -> Tuple[jax.Array, jax.Array, jax.Array]: +) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: """Dykstra's algorithm for the unbalanced :class:`~ott.solvers.linear.sinkhorn_lr.LRSinkhorn` in kernel mode. @@ -317,7 +317,7 @@ def body_fn( def compute_lambdas( - const: Constants, state: State, gamma: float, g: jax.Array, *, + const: Constants, state: State, gamma: float, g: jnp.ndarray, *, lse_mode: bool ) -> Tuple[float, float]: """TODO.""" diff --git a/src/ott/solvers/linear/sinkhorn.py b/src/ott/solvers/linear/sinkhorn.py index 44afe1833..058c2905b 100644 --- a/src/ott/solvers/linear/sinkhorn.py +++ b/src/ott/solvers/linear/sinkhorn.py @@ -52,11 +52,11 @@ class SinkhornState(NamedTuple): """Holds the state variables used to solve OT with Sinkhorn.""" - errors: Optional[jax.Array] = None - fu: Optional[jax.Array] = None - gv: Optional[jax.Array] = None - old_fus: Optional[jax.Array] = None - old_mapped_fus: Optional[jax.Array] = None + errors: Optional[jnp.ndarray] = None + fu: Optional[jnp.ndarray] = None + gv: Optional[jnp.ndarray] = None + old_fus: Optional[jnp.ndarray] = None + old_mapped_fus: Optional[jnp.ndarray] = None def set(self, **kwargs: Any) -> "SinkhornState": """Return a copy of self, with potential overwrites.""" @@ -70,7 +70,7 @@ def solution_error( lse_mode: bool, parallel_dual_updates: bool, recenter: bool, - ) -> jax.Array: + ) -> jnp.ndarray: """State dependent function to return error.""" fu, gv = self.fu, self.gv if recenter and lse_mode: @@ -92,10 +92,10 @@ def compute_kl_reg_cost( # noqa: D102 def recenter( self, - f: jax.Array, - g: jax.Array, + f: jnp.ndarray, + g: jnp.ndarray, ot_prob: linear_problem.LinearProblem, - ) -> Tuple[jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Re-center dual potentials. If the ``ot_prob`` is balanced, the ``f`` potential is zero-centered. @@ -132,14 +132,14 @@ def recenter( def solution_error( - f_u: jax.Array, - g_v: jax.Array, + f_u: jnp.ndarray, + g_v: jnp.ndarray, ot_prob: linear_problem.LinearProblem, *, norm_error: Sequence[int], lse_mode: bool, parallel_dual_updates: bool, -) -> jax.Array: +) -> jnp.ndarray: """Given two potential/scaling solutions, computes deviation to optimality. When the ``ot_prob`` problem is balanced and the usual Sinkhorn updates are @@ -153,8 +153,8 @@ def solution_error( additional quantities to qualify optimality must be taken into account. Args: - f_u: jax.Array, potential or scaling - g_v: jax.Array, potential or scaling + f_u: jnp.ndarray, potential or scaling + g_v: jnp.ndarray, potential or scaling ot_prob: linear OT problem norm_error: int, p-norm used to compute error. lse_mode: True if log-sum-exp operations, False if kernel vector products. @@ -196,9 +196,9 @@ def solution_error( def marginal_error( - f_u: jax.Array, - g_v: jax.Array, - target: jax.Array, + f_u: jnp.ndarray, + g_v: jnp.ndarray, + target: jnp.ndarray, geom: geometry.Geometry, axis: int = 0, norm_error: Sequence[int] = (1,), @@ -229,7 +229,7 @@ def marginal_error( def compute_kl_reg_cost( - f: jax.Array, g: jax.Array, ot_prob: linear_problem.LinearProblem, + f: jnp.ndarray, g: jnp.ndarray, ot_prob: linear_problem.LinearProblem, lse_mode: bool ) -> float: r"""Compute objective of Sinkhorn for OT problem given dual solutions. @@ -243,8 +243,8 @@ def compute_kl_reg_cost( values, ``jnp.where`` is used to cancel these contributions. Args: - f: jax.Array, potential - g: jax.Array, potential + f: jnp.ndarray, potential + g: jnp.ndarray, potential ot_prob: linear optimal transport problem. lse_mode: bool, whether to compute total mass in lse or kernel mode. @@ -320,12 +320,12 @@ class SinkhornOutput(NamedTuple): computations of errors. """ - f: Optional[jax.Array] = None - g: Optional[jax.Array] = None - errors: Optional[jax.Array] = None + f: Optional[jnp.ndarray] = None + g: Optional[jnp.ndarray] = None + errors: Optional[jnp.ndarray] = None reg_ot_cost: Optional[float] = None ot_prob: Optional[linear_problem.LinearProblem] = None - threshold: Optional[jax.Array] = None + threshold: Optional[jnp.ndarray] = None converged: Optional[bool] = None inner_iterations: Optional[int] = None @@ -342,7 +342,7 @@ def set_cost( # noqa: D102 return self.set(reg_ot_cost=compute_kl_reg_cost(f, g, ot_prob, lse_mode)) @property - def dual_cost(self) -> jax.Array: + def dual_cost(self) -> jnp.ndarray: """Return dual transport cost, without considering regularizer.""" a, b = self.ot_prob.a, self.ot_prob.b dual_cost = jnp.sum(jnp.where(a > 0.0, a * self.f, 0)) @@ -399,7 +399,9 @@ def kl_reg_cost(self) -> float: """ return self.reg_ot_cost - def transport_cost_at_geom(self, other_geom: geometry.Geometry) -> jax.Array: + def transport_cost_at_geom( + self, other_geom: geometry.Geometry + ) -> jnp.ndarray: r"""Return bare transport cost of current solution at any geometry. In order to compute cost, we check first if the geometry can be converted @@ -426,11 +428,11 @@ def geom(self) -> geometry.Geometry: # noqa: D102 return self.ot_prob.geom @property - def a(self) -> jax.Array: # noqa: D102 + def a(self) -> jnp.ndarray: # noqa: D102 return self.ot_prob.a @property - def b(self) -> jax.Array: # noqa: D102 + def b(self) -> jnp.ndarray: # noqa: D102 return self.ot_prob.b @property @@ -439,13 +441,13 @@ def n_iters(self) -> int: # noqa: D102 return jnp.sum(self.errors != -1) * self.inner_iterations @property - def scalings(self) -> Tuple[jax.Array, jax.Array]: # noqa: D102 + def scalings(self) -> Tuple[jnp.ndarray, jnp.ndarray]: # noqa: D102 u = self.ot_prob.geom.scaling_from_potential(self.f) v = self.ot_prob.geom.scaling_from_potential(self.g) return u, v @property - def matrix(self) -> jax.Array: + def matrix(self) -> jnp.ndarray: """Transport matrix if it can be instantiated.""" try: return self.ot_prob.geom.transport_from_potentials(self.f, self.g) @@ -457,13 +459,13 @@ def transport_mass(self) -> float: """Sum of transport matrix.""" return self.marginal(0).sum() - def apply(self, inputs: jax.Array, axis: int = 0) -> jax.Array: + def apply(self, inputs: jnp.ndarray, axis: int = 0) -> jnp.ndarray: """Apply the transport to a ndarray; axis=1 for its transpose.""" return self.ot_prob.geom.apply_transport_from_potentials( self.f, self.g, inputs, axis=axis ) - def marginal(self, axis: int) -> jax.Array: # noqa: D102 + def marginal(self, axis: int) -> jnp.ndarray: # noqa: D102 return self.ot_prob.geom.marginal_from_potentials(self.f, self.g, axis=axis) def cost_at_geom(self, other_geom: geometry.Geometry) -> float: @@ -830,8 +832,8 @@ def __init__( def __call__( self, ot_prob: linear_problem.LinearProblem, - init: Tuple[Optional[jax.Array], Optional[jax.Array]] = (None, None), - rng: Optional[jax.Array] = None, + init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray]] = (None, None), + rng: Optional[jnp.ndarray] = None, ) -> SinkhornOutput: """Run Sinkhorn algorithm. @@ -866,7 +868,9 @@ def xi(tau_i: float, tau_j: float) -> float: k_ij = k(tau_i, tau_j) return k_ij / (1. - k_ij) - def smin(potential: jax.Array, marginal: jax.Array, tau: float) -> float: + def smin( + potential: jnp.ndarray, marginal: jnp.ndarray, tau: float + ) -> float: rho = uf.rho(ot_prob.epsilon, tau) return -rho * mu.logsumexp(-potential / rho, b=marginal) @@ -1011,8 +1015,8 @@ def outer_iterations(self) -> int: return np.ceil(self.max_iterations / self.inner_iterations).astype(int) def init_state( - self, ot_prob: linear_problem.LinearProblem, init: Tuple[jax.Array, - jax.Array] + self, ot_prob: linear_problem.LinearProblem, init: Tuple[jnp.ndarray, + jnp.ndarray] ) -> SinkhornState: """Return the initial state of the loop.""" fu, gv = init @@ -1120,7 +1124,7 @@ def tree_unflatten(cls, aux_data, children): # noqa: D102 def run( ot_prob: linear_problem.LinearProblem, solver: Sinkhorn, - init: Tuple[jax.Array, ...] + init: Tuple[jnp.ndarray, ...] ) -> SinkhornOutput: """Run loop of the solver, outputting a state upgraded to an output.""" iter_fun = _iterations_implicit if solver.implicit_diff else iterations @@ -1133,7 +1137,7 @@ def run( def iterations( ot_prob: linear_problem.LinearProblem, solver: Sinkhorn, - init: Tuple[jax.Array, ...] + init: Tuple[jnp.ndarray, ...] ) -> SinkhornOutput: """Jittable Sinkhorn loop. args contain initialization variables.""" @@ -1170,8 +1174,8 @@ def body_fn( def _iterations_taped( ot_prob: linear_problem.LinearProblem, solver: Sinkhorn, - init: Tuple[jax.Array, ...] -) -> Tuple[SinkhornOutput, Tuple[jax.Array, jax.Array, + init: Tuple[jnp.ndarray, ...] +) -> Tuple[SinkhornOutput, Tuple[jnp.ndarray, jnp.ndarray, linear_problem.LinearProblem, Sinkhorn]]: """Run forward pass of the Sinkhorn algorithm storing side information.""" state = iterations(ot_prob, solver, init) @@ -1190,7 +1194,7 @@ def _iterations_implicit_bwd(res, gr): considered. Returns: - a tuple of gradients: PyTree for geom, one jax.Array for each of a and b. + a tuple of gradients: PyTree for geom, one jnp.ndarray for each of a and b. """ f, g, ot_prob, solver = res gr = gr[:2] diff --git a/src/ott/solvers/linear/sinkhorn_lr.py b/src/ott/solvers/linear/sinkhorn_lr.py index b6732f76f..ba83aeb99 100644 --- a/src/ott/solvers/linear/sinkhorn_lr.py +++ b/src/ott/solvers/linear/sinkhorn_lr.py @@ -43,12 +43,12 @@ class LRSinkhornState(NamedTuple): """State of the Low Rank Sinkhorn algorithm.""" - q: jax.Array - r: jax.Array - g: jax.Array + q: jnp.ndarray + r: jnp.ndarray + g: jnp.ndarray gamma: float - costs: jax.Array - errors: jax.Array + costs: jnp.ndarray + errors: jnp.ndarray crossed_threshold: bool def compute_error( # noqa: D102 @@ -79,7 +79,7 @@ def reg_ot_cost( # noqa: D102 def solution_error( # noqa: D102 self, ot_prob: linear_problem.LinearProblem, norm_error: Tuple[int, ...] - ) -> jax.Array: + ) -> jnp.ndarray: return solution_error(self.q, self.r, ot_prob, norm_error) def set(self, **kwargs: Any) -> "LRSinkhornState": @@ -88,9 +88,9 @@ def set(self, **kwargs: Any) -> "LRSinkhornState": def compute_reg_ot_cost( - q: jax.Array, - r: jax.Array, - g: jax.Array, + q: jnp.ndarray, + r: jnp.ndarray, + g: jnp.ndarray, ot_prob: linear_problem.LinearProblem, epsilon: float, use_danskin: bool = False @@ -110,7 +110,7 @@ def compute_reg_ot_cost( regularized OT cost, the (primal) transport cost of the low-rank solution. """ - def ent(x: jax.Array) -> float: + def ent(x: jnp.ndarray) -> float: # generalized entropy return jnp.sum(jsp.special.entr(x) + x) @@ -131,9 +131,9 @@ def ent(x: jax.Array) -> float: def solution_error( - q: jax.Array, r: jax.Array, ot_prob: linear_problem.LinearProblem, + q: jnp.ndarray, r: jnp.ndarray, ot_prob: linear_problem.LinearProblem, norm_error: Tuple[int, ...] -) -> jax.Array: +) -> jnp.ndarray: """Compute solution error. Since only balanced case is available for LR, this is marginal deviation. @@ -166,13 +166,13 @@ def solution_error( class LRSinkhornOutput(NamedTuple): """Transport interface for a low-rank Sinkhorn solution.""" - q: jax.Array - r: jax.Array - g: jax.Array - costs: jax.Array + q: jnp.ndarray + r: jnp.ndarray + g: jnp.ndarray + costs: jnp.ndarray # TODO(michalk8): must be called `errors`, because of `store_inner_errors` # in future, enforce via class hierarchy - errors: jax.Array + errors: jnp.ndarray ot_prob: linear_problem.LinearProblem epsilon: float inner_iterations: int @@ -211,11 +211,11 @@ def geom(self) -> geometry.Geometry: # noqa: D102 return self.ot_prob.geom @property - def a(self) -> jax.Array: # noqa: D102 + def a(self) -> jnp.ndarray: # noqa: D102 return self.ot_prob.a @property - def b(self) -> jax.Array: # noqa: D102 + def b(self) -> jnp.ndarray: # noqa: D102 return self.ot_prob.b @property @@ -229,17 +229,17 @@ def converged(self) -> bool: # noqa: D102 ) @property - def matrix(self) -> jax.Array: + def matrix(self) -> jnp.ndarray: """Transport matrix if it can be instantiated.""" return (self.q * self._inv_g) @ self.r.T - def apply(self, inputs: jax.Array, axis: int = 0) -> jax.Array: + def apply(self, inputs: jnp.ndarray, axis: int = 0) -> jnp.ndarray: """Apply the transport to a array; axis=1 for its transpose.""" q, r = (self.q, self.r) if axis == 1 else (self.r, self.q) # for `axis=0`: (batch, m), (m, r), (r,), (r, n) return ((inputs @ r) * self._inv_g) @ q.T - def marginal(self, axis: int) -> jax.Array: # noqa: D102 + def marginal(self, axis: int) -> jnp.ndarray: # noqa: D102 length = self.q.shape[0] if axis == 0 else self.r.shape[0] return self.apply(jnp.ones(length,), axis=axis) @@ -262,7 +262,7 @@ def transport_mass(self) -> float: return self.marginal(0).sum() @property - def _inv_g(self) -> jax.Array: + def _inv_g(self) -> jnp.ndarray: return 1. / self.g @@ -341,9 +341,9 @@ def __init__( def __call__( self, ot_prob: linear_problem.LinearProblem, - init: Tuple[Optional[jax.Array], Optional[jax.Array], - Optional[jax.Array]] = (None, None, None), - rng: Optional[jax.Array] = None, + init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], + Optional[jnp.ndarray]] = (None, None, None), + rng: Optional[jnp.ndarray] = None, **kwargs: Any, ) -> LRSinkhornOutput: """Run low-rank Sinkhorn. @@ -371,7 +371,7 @@ def _get_costs( self, ot_prob: linear_problem.LinearProblem, state: LRSinkhornState, - ) -> Tuple[jax.Array, jax.Array, jax.Array, float]: + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, float]: log_q, log_r, log_g = ( mu.safe_log(state.q), mu.safe_log(state.r), mu.safe_log(state.g) ) @@ -407,9 +407,9 @@ def _get_costs( # TODO(michalk8): move to `lr_utils` when refactoring this def dykstra_update_lse( self, - c_q: jax.Array, - c_r: jax.Array, - h: jax.Array, + c_q: jnp.ndarray, + c_r: jnp.ndarray, + h: jnp.ndarray, gamma: float, ot_prob: linear_problem.LinearProblem, min_entry_value: float = 1e-6, @@ -417,7 +417,7 @@ def dykstra_update_lse( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 - ) -> Tuple[jax.Array, jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: """Run Dykstra's algorithm.""" # shortcuts for problem's definition. r = self.rank @@ -435,24 +435,24 @@ def dykstra_update_lse( constants = c_q, c_r, loga, logb def cond_fn( - iteration: int, constants: Tuple[jax.Array, ...], - state_inner: Tuple[jax.Array, ...] + iteration: int, constants: Tuple[jnp.ndarray, ...], + state_inner: Tuple[jnp.ndarray, ...] ) -> bool: del iteration, constants *_, err = state_inner return err > tolerance def _softm( - f: jax.Array, g: jax.Array, c: jax.Array, axis: int - ) -> jax.Array: + f: jnp.ndarray, g: jnp.ndarray, c: jnp.ndarray, axis: int + ) -> jnp.ndarray: return jsp.special.logsumexp( gamma * (f[:, None] + g[None, :] - c), axis=axis ) def body_fn( - iteration: int, constants: Tuple[jax.Array, ...], - state_inner: Tuple[jax.Array, ...], compute_error: bool - ) -> Tuple[jax.Array, ...]: + iteration: int, constants: Tuple[jnp.ndarray, ...], + state_inner: Tuple[jnp.ndarray, ...], compute_error: bool + ) -> Tuple[jnp.ndarray, ...]: # TODO(michalk8): in the future, use `NamedTuple` f1, f2, g1_old, g2_old, h_old, w_gi, w_gp, w_q, w_r, err = state_inner c_q, c_r, loga, logb = constants @@ -501,15 +501,15 @@ def body_fn( return f1, f2, g1_old, g2_old, h_old, w_gi, w_gp, w_q, w_r, err def recompute_couplings( - f1: jax.Array, - g1: jax.Array, - c_q: jax.Array, - f2: jax.Array, - g2: jax.Array, - c_r: jax.Array, - h: jax.Array, + f1: jnp.ndarray, + g1: jnp.ndarray, + c_q: jnp.ndarray, + f2: jnp.ndarray, + g2: jnp.ndarray, + c_r: jnp.ndarray, + h: jnp.ndarray, gamma: float, - ) -> Tuple[jax.Array, jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: q = jnp.exp(gamma * (f1[:, None] + g1[None, :] - c_q)) r = jnp.exp(gamma * (f2[:, None] + g2[None, :] - c_r)) g = jnp.exp(gamma * h) @@ -524,9 +524,9 @@ def recompute_couplings( def dykstra_update_kernel( self, - k_q: jax.Array, - k_r: jax.Array, - k_g: jax.Array, + k_q: jnp.ndarray, + k_r: jnp.ndarray, + k_g: jnp.ndarray, gamma: float, ot_prob: linear_problem.LinearProblem, min_entry_value: float = 1e-6, @@ -534,7 +534,7 @@ def dykstra_update_kernel( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 - ) -> Tuple[jax.Array, jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: """Run Dykstra's algorithm.""" # shortcuts for problem's definition. rank = self.rank @@ -553,17 +553,17 @@ def dykstra_update_kernel( constants = k_q, k_r, k_g, a, b def cond_fn( - iteration: int, constants: Tuple[jax.Array, ...], - state_inner: Tuple[jax.Array, ...] + iteration: int, constants: Tuple[jnp.ndarray, ...], + state_inner: Tuple[jnp.ndarray, ...] ) -> bool: del iteration, constants *_, err = state_inner return err > tolerance def body_fn( - iteration: int, constants: Tuple[jax.Array, ...], - state_inner: Tuple[jax.Array, ...], compute_error: bool - ) -> Tuple[jax.Array, ...]: + iteration: int, constants: Tuple[jnp.ndarray, ...], + state_inner: Tuple[jnp.ndarray, ...], compute_error: bool + ) -> Tuple[jnp.ndarray, ...]: # TODO(michalk8): in the future, use `NamedTuple` u1, u2, v1_old, v2_old, g_old, q_gi, q_gp, q_q, q_r, err = state_inner k_q, k_r, k_g, a, b = constants @@ -600,14 +600,14 @@ def body_fn( return u1, u2, v1_old, v2_old, g_old, q_gi, q_gp, q_q, q_r, err def recompute_couplings( - u1: jax.Array, - v1: jax.Array, - k_q: jax.Array, - u2: jax.Array, - v2: jax.Array, - k_r: jax.Array, - g: jax.Array, - ) -> Tuple[jax.Array, jax.Array, jax.Array]: + u1: jnp.ndarray, + v1: jnp.ndarray, + k_q: jnp.ndarray, + u2: jnp.ndarray, + v2: jnp.ndarray, + k_r: jnp.ndarray, + g: jnp.ndarray, + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: q = u1.reshape((-1, 1)) * k_q * v1.reshape((1, -1)) r = u2.reshape((-1, 1)) * k_r * v2.reshape((1, -1)) return q, r, g @@ -736,7 +736,7 @@ def create_initializer( def init_state( self, ot_prob: linear_problem.LinearProblem, - init: Tuple[jax.Array, jax.Array, jax.Array] + init: Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray] ) -> LRSinkhornState: """Return the initial state of the loop.""" q, r, g = init @@ -811,7 +811,8 @@ def _diverged(self, state: LRSinkhornState, iteration: int) -> bool: def run( ot_prob: linear_problem.LinearProblem, solver: LRSinkhorn, - init: Tuple[Optional[jax.Array], Optional[jax.Array], Optional[jax.Array]], + init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], + Optional[jnp.ndarray]], ) -> LRSinkhornOutput: """Run loop of the solver, outputting a state upgraded to an output.""" out = sinkhorn.iterations(ot_prob, solver, init) diff --git a/src/ott/solvers/linear/univariate.py b/src/ott/solvers/linear/univariate.py index 1f2a47b6f..2b6392227 100644 --- a/src/ott/solvers/linear/univariate.py +++ b/src/ott/solvers/linear/univariate.py @@ -53,7 +53,7 @@ class UnivariateSolver: def __init__( self, - sort_fn: Optional[Callable[[jax.Array], jax.Array]] = None, + sort_fn: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, cost_fn: Optional[costs.CostFn] = None, method: Literal["subsample", "quantile", "wasserstein", "equal"] = "subsample", @@ -66,10 +66,10 @@ def __init__( def __call__( self, - x: jax.Array, - y: jax.Array, - a: Optional[jax.Array] = None, - b: Optional[jax.Array] = None + x: jnp.ndarray, + y: jnp.ndarray, + a: Optional[jnp.ndarray] = None, + b: Optional[jnp.ndarray] = None ) -> float: """Computes the Univariate OT Distance between `x` and `y`. @@ -113,8 +113,8 @@ def __call__( return self.cost_fn.pairwise(xx, yy) * (n / xx.shape[0]) def _cdf_distance( - self, x: jax.Array, y: jax.Array, a: Optional[jax.Array], - b: Optional[jax.Array] + self, x: jnp.ndarray, y: jnp.ndarray, a: Optional[jnp.ndarray], + b: Optional[jnp.ndarray] ): # Implementation based on `scipy` implementation for # :func: diff --git a/src/ott/solvers/quadratic/_solve.py b/src/ott/solvers/quadratic/_solve.py index 986680637..9cdefec93 100644 --- a/src/ott/solvers/quadratic/_solve.py +++ b/src/ott/solvers/quadratic/_solve.py @@ -13,7 +13,7 @@ # limitations under the License. from typing import Any, Literal, Optional, Union -import jax +import jax.numpy as jnp from ott.geometry import geometry from ott.problems.quadratic import quadratic_costs, quadratic_problem @@ -28,8 +28,8 @@ def solve( geom_yy: geometry.Geometry, geom_xy: Optional[geometry.Geometry] = None, fused_penalty: float = 1.0, - a: Optional[jax.Array] = None, - b: Optional[jax.Array] = None, + a: Optional[jnp.ndarray] = None, + b: Optional[jnp.ndarray] = None, tau_a: float = 1.0, tau_b: float = 1.0, loss: Union[Literal["sqeucl", "kl"], quadratic_costs.GWLoss] = "sqeucl", diff --git a/src/ott/solvers/quadratic/gromov_wasserstein.py b/src/ott/solvers/quadratic/gromov_wasserstein.py index 554cdaaed..862b91999 100644 --- a/src/ott/solvers/quadratic/gromov_wasserstein.py +++ b/src/ott/solvers/quadratic/gromov_wasserstein.py @@ -63,10 +63,10 @@ class GWOutput(NamedTuple): old_transport_mass: Holds total mass of transport at previous iteration. """ - costs: Optional[jax.Array] = None - linear_convergence: Optional[jax.Array] = None + costs: Optional[jnp.ndarray] = None + linear_convergence: Optional[jnp.ndarray] = None converged: bool = False - errors: Optional[jax.Array] = None + errors: Optional[jnp.ndarray] = None linear_state: Optional[LinearOutput] = None geom: Optional[geometry.Geometry] = None # Intermediate values. @@ -77,11 +77,11 @@ def set(self, **kwargs: Any) -> "GWOutput": return self._replace(**kwargs) @property - def matrix(self) -> jax.Array: + def matrix(self) -> jnp.ndarray: """Transport matrix.""" return self._rescale_factor * self.linear_state.matrix - def apply(self, inputs: jax.Array, axis: int = 0) -> jax.Array: + def apply(self, inputs: jnp.ndarray, axis: int = 0) -> jnp.ndarray: """Apply the transport to an array; axis=1 for its transpose.""" return self._rescale_factor * self.linear_state.apply(inputs, axis=axis) @@ -124,13 +124,13 @@ class GWState(NamedTuple): at each iteration. """ - costs: jax.Array - linear_convergence: jax.Array + costs: jnp.ndarray + linear_convergence: jnp.ndarray linear_state: LinearOutput linear_pb: linear_problem.LinearProblem old_transport_mass: float - rngs: Optional[jax.Array] = None - errors: Optional[jax.Array] = None + rngs: Optional[jnp.ndarray] = None + errors: Optional[jnp.ndarray] = None def set(self, **kwargs: Any) -> "GWState": """Return a copy of self, possibly with overwrites.""" @@ -213,7 +213,7 @@ def __call__( self, prob: quadratic_problem.QuadraticProblem, init: Optional[linear_problem.LinearProblem] = None, - rng: Optional[jax.Array] = None, + rng: Optional[jnp.ndarray] = None, **kwargs: Any, ) -> GWOutput: """Run the Gromov-Wasserstein solver. @@ -272,7 +272,7 @@ def init_state( self, prob: quadratic_problem.QuadraticProblem, init: linear_problem.LinearProblem, - rng: jax.Array, + rng: jnp.ndarray, ) -> GWState: """Initialize the state of the Gromov-Wasserstein iterations. @@ -361,7 +361,7 @@ def iterations( solver: GromovWasserstein, prob: quadratic_problem.QuadraticProblem, init: linear_problem.LinearProblem, - rng: jax.Array, + rng: jnp.ndarray, ) -> GWOutput: """Jittable Gromov-Wasserstein outer loop.""" diff --git a/src/ott/solvers/quadratic/gromov_wasserstein_lr.py b/src/ott/solvers/quadratic/gromov_wasserstein_lr.py index 62a5592bc..710d8f617 100644 --- a/src/ott/solvers/quadratic/gromov_wasserstein_lr.py +++ b/src/ott/solvers/quadratic/gromov_wasserstein_lr.py @@ -46,12 +46,12 @@ class LRGWState(NamedTuple): """State of the low-rank GW algorithm.""" - q: jax.Array - r: jax.Array - g: jax.Array + q: jnp.ndarray + r: jnp.ndarray + g: jnp.ndarray gamma: float - costs: jax.Array - errors: jax.Array + costs: jnp.ndarray + errors: jnp.ndarray crossed_threshold: bool def compute_error( # noqa: D102 @@ -85,9 +85,9 @@ def set(self, **kwargs: Any) -> "LRGWState": def compute_reg_gw_cost( - q: jax.Array, - r: jax.Array, - g: jax.Array, + q: jnp.ndarray, + r: jnp.ndarray, + g: jnp.ndarray, ot_prob: quadratic_problem.QuadraticProblem, epsilon: float, use_danskin: bool = False @@ -107,7 +107,7 @@ def compute_reg_gw_cost( regularized OT cost, the (primal) transport cost of the low-rank solution. """ - def ent(x: jax.Array) -> float: + def ent(x: jnp.ndarray) -> float: # generalized entropy return jnp.sum(jsp.special.entr(x) + x) @@ -139,13 +139,13 @@ def ent(x: jax.Array) -> float: class LRGWOutput(NamedTuple): """Transport interface for a low-rank GW solution.""" - q: jax.Array - r: jax.Array - g: jax.Array - costs: jax.Array + q: jnp.ndarray + r: jnp.ndarray + g: jnp.ndarray + costs: jnp.ndarray # TODO(michalk8): must be called `errors`, because of `store_inner_errors` # in future, enforce via class hierarchy - errors: jax.Array + errors: jnp.ndarray ot_prob: quadratic_problem.QuadraticProblem epsilon: float inner_iterations: int @@ -184,11 +184,11 @@ def geom(self) -> geometry.Geometry: # noqa: D102 return _linearized_geometry(self.ot_prob, q=self.q, r=self.r, g=self.g) @property - def a(self) -> jax.Array: # noqa: D102 + def a(self) -> jnp.ndarray: # noqa: D102 return self.ot_prob.a @property - def b(self) -> jax.Array: # noqa: D102 + def b(self) -> jnp.ndarray: # noqa: D102 return self.ot_prob.b @property @@ -202,17 +202,17 @@ def converged(self) -> bool: # noqa: D102 ) @property - def matrix(self) -> jax.Array: + def matrix(self) -> jnp.ndarray: """Transport matrix if it can be instantiated.""" return (self.q * self._inv_g) @ self.r.T - def apply(self, inputs: jax.Array, axis: int = 0) -> jax.Array: + def apply(self, inputs: jnp.ndarray, axis: int = 0) -> jnp.ndarray: """Apply the transport to a array; axis=1 for its transpose.""" q, r = (self.q, self.r) if axis == 1 else (self.r, self.q) # for `axis=0`: (batch, m), (m, r), (r,), (r, n) return ((inputs @ r) * self._inv_g) @ q.T - def marginal(self, axis: int) -> jax.Array: # noqa: D102 + def marginal(self, axis: int) -> jnp.ndarray: # noqa: D102 length = self.q.shape[0] if axis == 0 else self.r.shape[0] return self.apply(jnp.ones(length,), axis=axis) @@ -250,7 +250,7 @@ def transport_mass(self) -> float: return self.marginal(0).sum() @property - def _inv_g(self) -> jax.Array: + def _inv_g(self) -> jnp.ndarray: return 1.0 / self.g @@ -334,9 +334,9 @@ def __init__( def __call__( self, ot_prob: quadratic_problem.QuadraticProblem, - init: Tuple[Optional[jax.Array], Optional[jax.Array], - Optional[jax.Array]] = (None, None, None), - rng: Optional[jax.Array] = None, + init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], + Optional[jnp.ndarray]] = (None, None, None), + rng: Optional[jnp.ndarray] = None, **kwargs: Any, ) -> LRGWOutput: """Run low-rank Gromov-Wasserstein solver. @@ -370,7 +370,7 @@ def _get_costs( self, ot_prob: quadratic_problem.QuadraticProblem, state: LRGWState, - ) -> Tuple[jax.Array, jax.Array, jax.Array, float]: + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, float]: q, r, g = state.q, state.r, state.g log_q, log_r, log_g = mu.safe_log(q), mu.safe_log(r), mu.safe_log(g) inv_g = 1.0 / g[None, :] @@ -427,9 +427,9 @@ def _get_costs( # TODO(michalk8): move to `lr_utils` when refactoring this the future def dykstra_update_lse( self, - c_q: jax.Array, - c_r: jax.Array, - h: jax.Array, + c_q: jnp.ndarray, + c_r: jnp.ndarray, + h: jnp.ndarray, gamma: float, ot_prob: quadratic_problem.QuadraticProblem, min_entry_value: float = 1e-6, @@ -437,7 +437,7 @@ def dykstra_update_lse( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 - ) -> Tuple[jax.Array, jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: """Run Dykstra's algorithm.""" # shortcuts for problem's definition. r = self.rank @@ -455,24 +455,24 @@ def dykstra_update_lse( constants = c_q, c_r, loga, logb def cond_fn( - iteration: int, constants: Tuple[jax.Array, ...], - state_inner: Tuple[jax.Array, ...] + iteration: int, constants: Tuple[jnp.ndarray, ...], + state_inner: Tuple[jnp.ndarray, ...] ) -> bool: del iteration, constants *_, err = state_inner return err > tolerance def _softm( - f: jax.Array, g: jax.Array, c: jax.Array, axis: int - ) -> jax.Array: + f: jnp.ndarray, g: jnp.ndarray, c: jnp.ndarray, axis: int + ) -> jnp.ndarray: return jsp.special.logsumexp( gamma * (f[:, None] + g[None, :] - c), axis=axis ) def body_fn( - iteration: int, constants: Tuple[jax.Array, ...], - state_inner: Tuple[jax.Array, ...], compute_error: bool - ) -> Tuple[jax.Array, ...]: + iteration: int, constants: Tuple[jnp.ndarray, ...], + state_inner: Tuple[jnp.ndarray, ...], compute_error: bool + ) -> Tuple[jnp.ndarray, ...]: # TODO(michalk8): in the future, use `NamedTuple` f1, f2, g1_old, g2_old, h_old, w_gi, w_gp, w_q, w_r, err = state_inner c_q, c_r, loga, logb = constants @@ -522,15 +522,15 @@ def body_fn( return f1, f2, g1_old, g2_old, h_old, w_gi, w_gp, w_q, w_r, err def recompute_couplings( - f1: jax.Array, - g1: jax.Array, - c_q: jax.Array, - f2: jax.Array, - g2: jax.Array, - c_r: jax.Array, - h: jax.Array, + f1: jnp.ndarray, + g1: jnp.ndarray, + c_q: jnp.ndarray, + f2: jnp.ndarray, + g2: jnp.ndarray, + c_r: jnp.ndarray, + h: jnp.ndarray, gamma: float, - ) -> Tuple[jax.Array, jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: q = jnp.exp(gamma * (f1[:, None] + g1[None, :] - c_q)) r = jnp.exp(gamma * (f2[:, None] + g2[None, :] - c_r)) g = jnp.exp(gamma * h) @@ -545,9 +545,9 @@ def recompute_couplings( def dykstra_update_kernel( self, - k_q: jax.Array, - k_r: jax.Array, - k_g: jax.Array, + k_q: jnp.ndarray, + k_r: jnp.ndarray, + k_g: jnp.ndarray, gamma: float, ot_prob: quadratic_problem.QuadraticProblem, min_entry_value: float = 1e-6, @@ -555,7 +555,7 @@ def dykstra_update_kernel( min_iter: int = 0, inner_iter: int = 10, max_iter: int = 10000 - ) -> Tuple[jax.Array, jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: """Run Dykstra's algorithm.""" # shortcuts for problem's definition. del gamma @@ -575,17 +575,17 @@ def dykstra_update_kernel( constants = k_q, k_r, k_g, a, b def cond_fn( - iteration: int, constants: Tuple[jax.Array, ...], - state_inner: Tuple[jax.Array, ...] + iteration: int, constants: Tuple[jnp.ndarray, ...], + state_inner: Tuple[jnp.ndarray, ...] ) -> bool: del iteration, constants *_, err = state_inner return err > tolerance def body_fn( - iteration: int, constants: Tuple[jax.Array, ...], - state_inner: Tuple[jax.Array, ...], compute_error: bool - ) -> Tuple[jax.Array, ...]: + iteration: int, constants: Tuple[jnp.ndarray, ...], + state_inner: Tuple[jnp.ndarray, ...], compute_error: bool + ) -> Tuple[jnp.ndarray, ...]: # TODO(michalk8): in the future, use `NamedTuple` u1, u2, v1_old, v2_old, g_old, q_gi, q_gp, q_q, q_r, err = state_inner k_q, k_r, k_g, a, b = constants @@ -623,14 +623,14 @@ def body_fn( return u1, u2, v1_old, v2_old, g_old, q_gi, q_gp, q_q, q_r, err def recompute_couplings( - u1: jax.Array, - v1: jax.Array, - k_q: jax.Array, - u2: jax.Array, - v2: jax.Array, - k_r: jax.Array, - g: jax.Array, - ) -> Tuple[jax.Array, jax.Array, jax.Array]: + u1: jnp.ndarray, + v1: jnp.ndarray, + k_q: jnp.ndarray, + u2: jnp.ndarray, + v2: jnp.ndarray, + k_r: jnp.ndarray, + g: jnp.ndarray, + ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: q = u1.reshape((-1, 1)) * k_q * v1.reshape((1, -1)) r = u2.reshape((-1, 1)) * k_r * v2.reshape((1, -1)) return q, r, g @@ -762,7 +762,7 @@ def create_initializer( def init_state( self, ot_prob: quadratic_problem.QuadraticProblem, - init: Tuple[jax.Array, jax.Array, jax.Array] + init: Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray] ) -> LRGWState: """Return the initial state of the loop.""" q, r, g = init @@ -837,7 +837,8 @@ def _diverged(self, state: LRGWState, iteration: int) -> bool: def run( ot_prob: quadratic_problem.QuadraticProblem, solver: LRGromovWasserstein, - init: Tuple[Optional[jax.Array], Optional[jax.Array], Optional[jax.Array]], + init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], + Optional[jnp.ndarray]], ) -> LRGWOutput: """Run loop of the solver, outputting a state upgraded to an output.""" out = sinkhorn.iterations(ot_prob, solver, init) @@ -848,9 +849,9 @@ def run( def dykstra_solution_error( - q: jax.Array, r: jax.Array, ot_prob: quadratic_problem.QuadraticProblem, + q: jnp.ndarray, r: jnp.ndarray, ot_prob: quadratic_problem.QuadraticProblem, norm_error: Tuple[int, ...] -) -> jax.Array: +) -> jnp.ndarray: """Compute solution error. Since only balanced case is available for LR, this is marginal deviation. @@ -883,9 +884,9 @@ def dykstra_solution_error( def _linearized_geometry( prob: quadratic_problem.QuadraticProblem, *, - q: jax.Array, - r: jax.Array, - g: jax.Array, + q: jnp.ndarray, + r: jnp.ndarray, + g: jnp.ndarray, ) -> low_rank.LRCGeometry: inv_sqrt_g = 1.0 / jnp.sqrt(g[None, :]) diff --git a/src/ott/solvers/quadratic/gw_barycenter.py b/src/ott/solvers/quadratic/gw_barycenter.py index 0f753793e..8816c5ada 100644 --- a/src/ott/solvers/quadratic/gw_barycenter.py +++ b/src/ott/solvers/quadratic/gw_barycenter.py @@ -45,13 +45,13 @@ class GWBarycenterState(NamedTuple): gw_convergence: Array of shape ``[max_iter,]`` containing the convergence of all GW problems at each iteration. """ - cost: Optional[jax.Array] = None - x: Optional[jax.Array] = None - a: Optional[jax.Array] = None - errors: Optional[jax.Array] = None - costs: Optional[jax.Array] = None - costs_bary: Optional[jax.Array] = None - gw_convergence: Optional[jax.Array] = None + cost: Optional[jnp.ndarray] = None + x: Optional[jnp.ndarray] = None + a: Optional[jnp.ndarray] = None + errors: Optional[jnp.ndarray] = None + costs: Optional[jnp.ndarray] = None + costs_bary: Optional[jnp.ndarray] = None + gw_convergence: Optional[jnp.ndarray] = None def set(self, **kwargs: Any) -> "GWBarycenterState": """Return a copy of self, possibly with overwrites.""" @@ -133,9 +133,10 @@ def init_state( self, problem: gw_barycenter.GWBarycenterProblem, bar_size: int, - bar_init: Optional[Union[jax.Array, Tuple[jax.Array, jax.Array]]] = None, - a: Optional[jax.Array] = None, - rng: Optional[jax.Array] = None, + bar_init: Optional[Union[jnp.ndarray, Tuple[jnp.ndarray, + jnp.ndarray]]] = None, + a: Optional[jnp.ndarray] = None, + rng: Optional[jnp.ndarray] = None, ) -> GWBarycenterState: """Initialize the (fused) Gromov-Wasserstein barycenter state. @@ -209,13 +210,13 @@ def update_state( iteration: int, problem: gw_barycenter.GWBarycenterProblem, store_errors: bool = True, - ) -> Tuple[float, bool, jax.Array, Optional[jax.Array]]: + ) -> Tuple[float, bool, jnp.ndarray, Optional[jnp.ndarray]]: """Solve the (fused) Gromov-Wasserstein barycenter problem.""" def solve_gw( - state: GWBarycenterState, b: jax.Array, y: jax.Array, - f: Optional[jax.Array] - ) -> Tuple[float, bool, jax.Array, Optional[jax.Array]]: + state: GWBarycenterState, b: jnp.ndarray, y: jnp.ndarray, + f: Optional[jnp.ndarray] + ) -> Tuple[float, bool, jnp.ndarray, Optional[jnp.ndarray]]: quad_problem = problem._create_problem(state, y=y, b=b, f=f) out = self._quad_solver(quad_problem) return ( @@ -281,8 +282,9 @@ def tree_unflatten( # noqa: D102 @partial(jax.vmap, in_axes=[None, 0, None, 0, None]) def init_transports( - solver, rng: jax.Array, a: jax.Array, b: jax.Array, epsilon: Optional[float] -) -> jax.Array: + solver, rng: jnp.ndarray, a: jnp.ndarray, b: jnp.ndarray, + epsilon: Optional[float] +) -> jnp.ndarray: """Initialize random 2D point cloud and solve the linear OT problem. Args: diff --git a/src/ott/tools/gaussian_mixture/fit_gmm.py b/src/ott/tools/gaussian_mixture/fit_gmm.py index 45d8e0935..0e3fbc4e8 100644 --- a/src/ott/tools/gaussian_mixture/fit_gmm.py +++ b/src/ott/tools/gaussian_mixture/fit_gmm.py @@ -62,8 +62,8 @@ def get_assignment_probs( - gmm: gaussian_mixture.GaussianMixture, points: jax.Array -) -> jax.Array: + gmm: gaussian_mixture.GaussianMixture, points: jnp.ndarray +) -> jnp.ndarray: r"""Get component assignment probabilities used in the E step of EM. Here we compute the component assignment probabilities p(Z|X, \Theta^{(t)}) @@ -81,9 +81,9 @@ def get_assignment_probs( def get_q( gmm: gaussian_mixture.GaussianMixture, - assignment_probs: jax.Array, - points: jax.Array, - point_weights: Optional[jax.Array] = None, + assignment_probs: jnp.ndarray, + points: jnp.ndarray, + point_weights: Optional[jnp.ndarray] = None, ) -> float: r"""Get Q(\Theta|\Theta^{(t)}). @@ -109,8 +109,8 @@ def get_q( def log_prob_loss( gmm: gaussian_mixture.GaussianMixture, - points: jax.Array, - point_weights: Optional[jax.Array] = None, + points: jnp.ndarray, + point_weights: Optional[jnp.ndarray] = None, ) -> float: """Loss function: weighted mean of (-log prob of observations). @@ -130,8 +130,8 @@ def log_prob_loss( def fit_model_em( gmm: gaussian_mixture.GaussianMixture, - points: jax.Array, - point_weights: Optional[jax.Array], + points: jnp.ndarray, + point_weights: Optional[jnp.ndarray], steps: int, jit: bool = True, verbose: bool = False, @@ -184,10 +184,10 @@ def fit_model_em( # See https://en.wikipedia.org/wiki/K-means%2B%2B for details -def _get_dist_sq(points: jax.Array, loc: jax.Array) -> jax.Array: +def _get_dist_sq(points: jnp.ndarray, loc: jnp.ndarray) -> jnp.ndarray: """Get the squared distance from each point to each loc.""" - def _dist_sq_one_loc(points: jax.Array, loc: jax.Array) -> jax.Array: + def _dist_sq_one_loc(points: jnp.ndarray, loc: jnp.ndarray) -> jnp.ndarray: return jnp.sum((points - loc[None]) ** 2., axis=-1) dist_sq_fn = jax.vmap(_dist_sq_one_loc, in_axes=(None, 0), out_axes=1) @@ -195,8 +195,8 @@ def _dist_sq_one_loc(points: jax.Array, loc: jax.Array) -> jax.Array: def _get_locs( - rng: jax.Array, points: jax.Array, n_components: int -) -> jax.Array: + rng: jnp.ndarray, points: jnp.ndarray, n_components: int +) -> jnp.ndarray: """Get the initial component means. Args: @@ -229,9 +229,9 @@ def _get_locs( def from_kmeans_plusplus( - rng: jax.Array, - points: jax.Array, - point_weights: Optional[jax.Array], + rng: jnp.ndarray, + points: jnp.ndarray, + point_weights: Optional[jnp.ndarray], n_components: int, ) -> gaussian_mixture.GaussianMixture: """Initialize a GMM via a single pass of K-means++. @@ -265,9 +265,9 @@ def from_kmeans_plusplus( def initialize( - rng: jax.Array, - points: jax.Array, - point_weights: Optional[jax.Array], + rng: jnp.ndarray, + points: jnp.ndarray, + point_weights: Optional[jnp.ndarray], n_components: int, n_attempts: int = 50, verbose: bool = False diff --git a/src/ott/tools/gaussian_mixture/fit_gmm_pair.py b/src/ott/tools/gaussian_mixture/fit_gmm_pair.py index 35222caf9..7ecde263c 100644 --- a/src/ott/tools/gaussian_mixture/fit_gmm_pair.py +++ b/src/ott/tools/gaussian_mixture/fit_gmm_pair.py @@ -98,9 +98,9 @@ class Observations(NamedTuple): """Weighted observations and their E-step assignment probabilities.""" - points: jax.Array - point_weights: jax.Array - assignment_probs: jax.Array + points: jnp.ndarray + point_weights: jnp.ndarray + assignment_probs: jnp.ndarray # Model fit @@ -108,7 +108,7 @@ class Observations(NamedTuple): def get_q( gmm: gaussian_mixture.GaussianMixture, obs: Observations -) -> jax.Array: +) -> jnp.ndarray: r"""Get Q(\Theta|\Theta^{(t)}). Here Q is the log likelihood for our observations based on the current @@ -159,7 +159,7 @@ def _objective_fn( pair: gaussian_mixture_pair.GaussianMixturePair, obs0: Observations, obs1: Observations, - ) -> jax.Array: + ) -> jnp.ndarray: """Compute the objective function for a pair of GMMs. Args: @@ -204,11 +204,11 @@ def print_losses( def do_e_step( # noqa: D103 - e_step_fn: Callable[[gaussian_mixture.GaussianMixture, jax.Array], - jax.Array], + e_step_fn: Callable[[gaussian_mixture.GaussianMixture, jnp.ndarray], + jnp.ndarray], gmm: gaussian_mixture.GaussianMixture, - points: jax.Array, - point_weights: jax.Array, + points: jnp.ndarray, + point_weights: jnp.ndarray, ) -> Observations: assignment_probs = e_step_fn(gmm, points) return Observations( @@ -307,10 +307,10 @@ def get_fit_model_em_fn( def _fit_model_em( pair: gaussian_mixture_pair.GaussianMixturePair, - points0: jax.Array, - points1: jax.Array, - point_weights0: Optional[jax.Array], - point_weights1: Optional[jax.Array], + points0: jnp.ndarray, + points1: jnp.ndarray, + point_weights0: Optional[jnp.ndarray], + point_weights1: Optional[jnp.ndarray], em_steps: int, m_steps: int = 50, verbose: bool = False, diff --git a/src/ott/tools/gaussian_mixture/gaussian.py b/src/ott/tools/gaussian_mixture/gaussian.py index b8c8e227b..70ac505f2 100644 --- a/src/ott/tools/gaussian_mixture/gaussian.py +++ b/src/ott/tools/gaussian_mixture/gaussian.py @@ -28,15 +28,15 @@ class Gaussian: """Normal distribution.""" - def __init__(self, loc: jax.Array, scale: scale_tril.ScaleTriL): + def __init__(self, loc: jnp.ndarray, scale: scale_tril.ScaleTriL): self._loc = loc self._scale = scale @classmethod def from_samples( cls, - points: jax.Array, - weights: Optional[jax.Array] = None + points: jnp.ndarray, + weights: Optional[jnp.ndarray] = None ) -> "Gaussian": """Construct a Gaussian from weighted samples. @@ -63,11 +63,11 @@ def from_samples( @classmethod def from_random( cls, - rng: jax.Array, + rng: jnp.ndarray, n_dimensions: int, stdev_mean: float = 0.1, stdev_cov: float = 0.1, - ridge: Union[float, jax.Array] = 0, + ridge: Union[float, jnp.ndarray] = 0, dtype: Optional[jnp.dtype] = None ) -> "Gaussian": """Construct a random Gaussian. @@ -94,13 +94,13 @@ def from_random( return cls(loc=loc, scale=scale) @classmethod - def from_mean_and_cov(cls, mean: jax.Array, cov: jax.Array) -> "Gaussian": + def from_mean_and_cov(cls, mean: jnp.ndarray, cov: jnp.ndarray) -> "Gaussian": """Construct a Gaussian from a mean and covariance.""" scale = scale_tril.ScaleTriL.from_covariance(cov) return cls(loc=mean, scale=scale) @property - def loc(self) -> jax.Array: + def loc(self) -> jnp.ndarray: """Mean of the Gaussian.""" return self._loc @@ -114,22 +114,22 @@ def n_dimensions(self) -> int: """Dimensionality of the Gaussian.""" return self.loc.shape[-1] - def covariance(self) -> jax.Array: + def covariance(self) -> jnp.ndarray: """Covariance of the Gaussian.""" return self.scale.covariance() - def to_z(self, x: jax.Array) -> jax.Array: + def to_z(self, x: jnp.ndarray) -> jnp.ndarray: r"""Transform :math:`x` to :math:`z = \frac{x - loc}{scale}`.""" return self.scale.centered_to_z(x_centered=x - self.loc) - def from_z(self, z: jax.Array) -> jax.Array: + def from_z(self, z: jnp.ndarray) -> jnp.ndarray: r"""Transform :math:`z` to :math:`x = loc + scale \cdot z`.""" return self.scale.z_to_centered(z=z) + self.loc def log_prob( self, - x: jax.Array, # (?, d) - ) -> jax.Array: # (?, d) + x: jnp.ndarray, # (?, d) + ) -> jnp.ndarray: # (?, d) """Log probability for a Gaussian with a diagonal covariance.""" d = x.shape[-1] z = self.to_z(x) @@ -138,7 +138,7 @@ def log_prob( -0.5 * (d * LOG2PI + log_det[None] + jnp.sum(z ** 2., axis=-1)) ) # (?, k) - def sample(self, rng: jax.Array, size: int) -> jax.Array: + def sample(self, rng: jnp.ndarray, size: int) -> jnp.ndarray: """Generate samples from the distribution.""" std_samples_t = jax.random.normal(key=rng, shape=(self.n_dimensions, size)) return self.loc[None] + ( @@ -149,7 +149,7 @@ def sample(self, rng: jax.Array, size: int) -> jax.Array: ) ) - def w2_dist(self, other: "Gaussian") -> jax.Array: + def w2_dist(self, other: "Gaussian") -> jnp.ndarray: r"""Wasserstein distance :math:`W_2^2` to another Gaussian. .. math:: @@ -167,7 +167,7 @@ def w2_dist(self, other: "Gaussian") -> jax.Array: delta_sigma = self.scale.w2_dist(other.scale) return delta_mean + delta_sigma - def f_potential(self, dest: "Gaussian", points: jax.Array) -> jax.Array: + def f_potential(self, dest: "Gaussian", points: jnp.ndarray) -> jnp.ndarray: """Optimal potential for W2 distance between Gaussians. Evaluated on points. Args: @@ -191,7 +191,7 @@ def batch_inner_product(x, y): points.dot(dest.loc) ) - def transport(self, dest: "Gaussian", points: jax.Array) -> jax.Array: + def transport(self, dest: "Gaussian", points: jnp.ndarray) -> jnp.ndarray: """Transport points according to map between two Gaussian measures. Args: diff --git a/src/ott/tools/gaussian_mixture/gaussian_mixture.py b/src/ott/tools/gaussian_mixture/gaussian_mixture.py index a9cb2b326..5d40a870d 100644 --- a/src/ott/tools/gaussian_mixture/gaussian_mixture.py +++ b/src/ott/tools/gaussian_mixture/gaussian_mixture.py @@ -27,8 +27,9 @@ def get_summary_stats_from_points_and_assignment_probs( - points: jax.Array, point_weights: jax.Array, assignment_probs: jax.Array -) -> Tuple[jax.Array, jax.Array, jax.Array]: + points: jnp.ndarray, point_weights: jnp.ndarray, + assignment_probs: jnp.ndarray +) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: """Get component summary stats from points and component probabilities. Args: @@ -67,7 +68,7 @@ class GaussianMixture: """Gaussian Mixture model.""" def __init__( - self, loc: jax.Array, scale_params: jax.Array, + self, loc: jnp.ndarray, scale_params: jnp.ndarray, component_weight_ob: probabilities.Probabilities ): self._loc = loc @@ -77,7 +78,7 @@ def __init__( @classmethod def from_random( cls, - rng: jax.Array, + rng: jnp.ndarray, n_components: int, n_dimensions: int, stdev_mean: float = 0.1, @@ -112,7 +113,7 @@ def from_random( @classmethod def from_mean_cov_component_weights( - cls, mean: jax.Array, cov: jax.Array, component_weights: jax.Array + cls, mean: jnp.ndarray, cov: jnp.ndarray, component_weights: jnp.ndarray ): """Construct a GMM from means, covariances, and component weights.""" scale_params = [] @@ -127,9 +128,9 @@ def from_mean_cov_component_weights( @classmethod def from_points_and_assignment_probs( cls, - points: jax.Array, - point_weights: jax.Array, - assignment_probs: jax.Array, + points: jnp.ndarray, + point_weights: jnp.ndarray, + assignment_probs: jnp.ndarray, ) -> "GaussianMixture": """Estimate a GMM from points and a set of component probabilities.""" mean, cov, wts = get_summary_stats_from_points_and_assignment_probs( @@ -157,17 +158,17 @@ def n_components(self): return self._loc.shape[-2] @property - def loc(self) -> jax.Array: + def loc(self) -> jnp.ndarray: """Location parameters of the GMM.""" return self._loc @property - def scale_params(self) -> jax.Array: + def scale_params(self) -> jnp.ndarray: """Scale parameters of the GMM.""" return self._scale_params @property - def cholesky(self) -> jax.Array: + def cholesky(self) -> jnp.ndarray: """Cholesky decomposition of the GMM covariance matrices.""" size = self.n_dimensions @@ -177,7 +178,7 @@ def _get_cholesky(scale_params): return jax.vmap(_get_cholesky, in_axes=0, out_axes=0)(self.scale_params) @property - def covariance(self) -> jax.Array: + def covariance(self) -> jnp.ndarray: """Covariance matrices of the GMM.""" size = self.n_dimensions @@ -192,16 +193,16 @@ def component_weight_ob(self) -> probabilities.Probabilities: return self._component_weight_ob @property - def component_weights(self) -> jax.Array: + def component_weights(self) -> jnp.ndarray: """Component weights probabilities.""" return self._component_weight_ob.probs() - def log_component_weights(self) -> jax.Array: + def log_component_weights(self) -> jnp.ndarray: """Log component weights probabilities.""" return self._component_weight_ob.log_probs() def _get_normal( - self, loc: jax.Array, scale_params: jax.Array + self, loc: jnp.ndarray, scale_params: jnp.ndarray ) -> gaussian.Gaussian: size = loc.shape[-1] return gaussian.Gaussian( @@ -218,7 +219,7 @@ def components(self) -> List[gaussian.Gaussian]: """List of all GMM components.""" return [self.get_component(i) for i in range(self.n_components)] - def sample(self, rng: jax.Array, size: int) -> jax.Array: + def sample(self, rng: jnp.ndarray, size: int) -> jnp.ndarray: """Generate samples from the distribution.""" subrng0, subrng1 = jax.random.split(rng) component = self.component_weight_ob.sample(rng=subrng0, size=size) @@ -243,7 +244,7 @@ def _transform_single_value(single_component, single_x): axis=0 ) - def conditional_log_prob(self, x: jax.Array) -> jax.Array: + def conditional_log_prob(self, x: jnp.ndarray) -> jnp.ndarray: """Compute the component-conditional log probability of x. Args: @@ -255,7 +256,7 @@ def conditional_log_prob(self, x: jax.Array) -> jax.Array: """ def _log_prob_single_component( - loc: jax.Array, scale_params: jax.Array, x: jax.Array + loc: jnp.ndarray, scale_params: jnp.ndarray, x: jnp.ndarray ): norm = self._get_normal(loc=loc, scale_params=scale_params) return norm.log_prob(x) @@ -265,7 +266,7 @@ def _log_prob_single_component( ) return conditional_log_prob_fn(self._loc, self._scale_params, x) - def log_prob(self, x: jax.Array) -> jax.Array: + def log_prob(self, x: jnp.ndarray) -> jnp.ndarray: """Compute the log probability of the observations x. Args: @@ -281,7 +282,7 @@ def log_prob(self, x: jax.Array) -> jax.Array: log_prob_conditional + log_component_weight[None, :], axis=-1 ) - def get_log_component_posterior(self, x: jax.Array) -> jax.Array: + def get_log_component_posterior(self, x: jnp.ndarray) -> jnp.ndarray: """Compute the posterior probability that x came from each component. Args: diff --git a/src/ott/tools/gaussian_mixture/gaussian_mixture_pair.py b/src/ott/tools/gaussian_mixture/gaussian_mixture_pair.py index 21d4dbaf1..b24506fcc 100644 --- a/src/ott/tools/gaussian_mixture/gaussian_mixture_pair.py +++ b/src/ott/tools/gaussian_mixture/gaussian_mixture_pair.py @@ -128,12 +128,12 @@ def get_bures_geometry(self) -> pointcloud.PointCloud: epsilon=self.epsilon ) - def get_cost_matrix(self) -> jax.Array: + def get_cost_matrix(self) -> jnp.ndarray: """Get matrix of :math:`W_2^2` costs between all pairs of components.""" return self.get_bures_geometry().cost_matrix def get_sinkhorn( - self, cost_matrix: jax.Array, **kwargs: Any + self, cost_matrix: jnp.ndarray, **kwargs: Any ) -> sinkhorn.SinkhornOutput: """Get the output of Sinkhorn's method for a given cost matrix.""" # We use a Geometry here rather than the PointCloud created in @@ -152,7 +152,7 @@ def get_sinkhorn( def get_normalized_sinkhorn_coupling( self, sinkhorn_output: sinkhorn.SinkhornOutput, - ) -> jax.Array: + ) -> jnp.ndarray: """Get the normalized coupling matrix for the specified Sinkhorn output. Args: diff --git a/src/ott/tools/gaussian_mixture/linalg.py b/src/ott/tools/gaussian_mixture/linalg.py index 2a5114d69..9c88df0cc 100644 --- a/src/ott/tools/gaussian_mixture/linalg.py +++ b/src/ott/tools/gaussian_mixture/linalg.py @@ -18,9 +18,9 @@ def get_mean_and_var( - points: jax.Array, # (n, d) - weights: jax.Array, # (n,) -) -> Tuple[jax.Array, jax.Array]: + points: jnp.ndarray, # (n, d) + weights: jnp.ndarray, # (n,) +) -> Tuple[jnp.ndarray, jnp.ndarray]: """Get the mean and variance of a weighted set of points.""" weights_sum = jnp.sum(weights, axis=-1) # (1,) mean = ( @@ -37,9 +37,9 @@ def get_mean_and_var( def get_mean_and_cov( - points: jax.Array, # (n, d) - weights: jax.Array, # (n,) -) -> Tuple[jax.Array, jax.Array]: + points: jnp.ndarray, # (n, d) + weights: jnp.ndarray, # (n,) +) -> Tuple[jnp.ndarray, jnp.ndarray]: """Get the mean and covariance of a weighted set of points.""" weights_sum = jnp.sum(weights, axis=-1, keepdims=True) # (1,) mean = ( @@ -59,7 +59,7 @@ def get_mean_and_cov( return mean, cov -def flat_to_tril(x: jax.Array, size: int) -> jax.Array: +def flat_to_tril(x: jnp.ndarray, size: int) -> jnp.ndarray: """Map flat values to lower triangular matrices. Args: @@ -76,7 +76,7 @@ def flat_to_tril(x: jax.Array, size: int) -> jax.Array: return m.at[..., tril[0], tril[1]].set(x) -def tril_to_flat(m: jax.Array) -> jax.Array: +def tril_to_flat(m: jnp.ndarray) -> jnp.ndarray: """Flatten lower triangular matrices. Args: @@ -91,8 +91,8 @@ def tril_to_flat(m: jax.Array) -> jax.Array: def apply_to_diag( - m: jax.Array, fn: Callable[[jax.Array], jax.Array] -) -> jax.Array: + m: jnp.ndarray, fn: Callable[[jnp.ndarray], jnp.ndarray] +) -> jnp.ndarray: """Apply a function to the diagonal of a matrix.""" size = m.shape[-1] diag = jnp.diagonal(m, axis1=-2, axis2=-1) @@ -101,9 +101,9 @@ def apply_to_diag( def matrix_powers( - m: jax.Array, + m: jnp.ndarray, powers: Iterable[float], -) -> List[jax.Array]: +) -> List[jnp.ndarray]: """Raise a real, symmetric matrix to multiple powers.""" eigs, q = jnp.linalg.eigh(m) qt = jnp.swapaxes(q, axis1=-2, axis2=-1) @@ -113,7 +113,9 @@ def matrix_powers( return ret -def invmatvectril(m: jax.Array, x: jax.Array, lower: bool = True) -> jax.Array: +def invmatvectril( + m: jnp.ndarray, x: jnp.ndarray, lower: bool = True +) -> jnp.ndarray: """Multiply x by the inverse of a triangular matrix. Args: @@ -130,8 +132,10 @@ def invmatvectril(m: jax.Array, x: jax.Array, lower: bool = True) -> jax.Array: def get_random_orthogonal( - rng: jax.Array, dim: int, dtype: Optional[jnp.dtype] = None -) -> jax.Array: + rng: jnp.ndarray, + dim: int, + dtype: Optional[jnp.dtype] = None +) -> jnp.ndarray: """Get a random orthogonal matrix with the specified dimension.""" m = jax.random.normal(key=rng, shape=[dim, dim], dtype=dtype) q, _ = jnp.linalg.qr(m) diff --git a/src/ott/tools/gaussian_mixture/probabilities.py b/src/ott/tools/gaussian_mixture/probabilities.py index c3bb253a5..66a90c1a7 100644 --- a/src/ott/tools/gaussian_mixture/probabilities.py +++ b/src/ott/tools/gaussian_mixture/probabilities.py @@ -27,7 +27,7 @@ class Probabilities: to a length n simplex by appending a 0 and taking a softmax. """ - _params: jax.Array + _params: jnp.ndarray def __init__(self, params): self._params = params @@ -35,7 +35,7 @@ def __init__(self, params): @classmethod def from_random( cls, - rng: jax.Array, + rng: jnp.ndarray, n_dimensions: int, stdev: Optional[float] = 0.1, dtype: Optional[jnp.dtype] = None @@ -47,7 +47,7 @@ def from_random( ) @classmethod - def from_probs(cls, probs: jax.Array) -> "Probabilities": + def from_probs(cls, probs: jnp.ndarray) -> "Probabilities": """Construct Probabilities from a vector of probabilities.""" log_probs = jnp.log(probs) log_probs_normalized, norm = log_probs[:-1], log_probs[-1] @@ -62,21 +62,21 @@ def params(self): # noqa: D102 def dtype(self): # noqa: D102 return self._params.dtype - def unnormalized_log_probs(self) -> jax.Array: + def unnormalized_log_probs(self) -> jnp.ndarray: """Get the unnormalized log probabilities.""" return jnp.concatenate([self._params, jnp.zeros((1,), dtype=self.dtype)], axis=-1) - def log_probs(self) -> jax.Array: + def log_probs(self) -> jnp.ndarray: """Get the log probabilities.""" return jax.nn.log_softmax(self.unnormalized_log_probs()) - def probs(self) -> jax.Array: + def probs(self) -> jnp.ndarray: """Get the probabilities.""" return jax.nn.softmax(self.unnormalized_log_probs()) - def sample(self, rng: jax.Array, size: int) -> jax.Array: + def sample(self, rng: jnp.ndarray, size: int) -> jnp.ndarray: """Sample from the distribution.""" return jax.random.categorical( key=rng, logits=self.unnormalized_log_probs(), shape=(size,) diff --git a/src/ott/tools/gaussian_mixture/scale_tril.py b/src/ott/tools/gaussian_mixture/scale_tril.py index ee708d5ac..95b812d99 100644 --- a/src/ott/tools/gaussian_mixture/scale_tril.py +++ b/src/ott/tools/gaussian_mixture/scale_tril.py @@ -27,16 +27,16 @@ class ScaleTriL: """Pytree for a lower triangular Cholesky-factored covariance matrix.""" - def __init__(self, params: jax.Array, size: int): + def __init__(self, params: jnp.ndarray, size: int): self._params = params self._size = size @classmethod def from_points_and_weights( cls, - points: jax.Array, - weights: jax.Array, - ) -> Tuple[jax.Array, "ScaleTriL"]: + points: jnp.ndarray, + weights: jnp.ndarray, + ) -> Tuple[jnp.ndarray, "ScaleTriL"]: """Get a mean and a ScaleTriL from a set of points and weights.""" mean, cov = linalg.get_mean_and_cov(points=points, weights=weights) return mean, cls.from_covariance(cov) @@ -44,7 +44,7 @@ def from_points_and_weights( @classmethod def from_random( cls, - rng: jax.Array, + rng: jnp.ndarray, n_dimensions: int, stdev: Optional[float] = 0.1, dtype: jnp.dtype = jnp.float32, @@ -80,7 +80,7 @@ def from_random( return cls(params=flat, size=n_dimensions) @classmethod - def from_cholesky(cls, cholesky: jax.Array) -> "ScaleTriL": + def from_cholesky(cls, cholesky: jnp.ndarray) -> "ScaleTriL": """Construct ScaleTriL from a Cholesky factor of a covariance matrix.""" m = linalg.apply_to_diag(cholesky, jnp.log) flat = linalg.tril_to_flat(m) @@ -89,14 +89,14 @@ def from_cholesky(cls, cholesky: jax.Array) -> "ScaleTriL": @classmethod def from_covariance( cls, - covariance: jax.Array, + covariance: jnp.ndarray, ) -> "ScaleTriL": """Construct ScaleTriL from a covariance matrix.""" cholesky = jnp.linalg.cholesky(covariance) return cls.from_cholesky(cholesky) @property - def params(self) -> jax.Array: + def params(self) -> jnp.ndarray: """Internal representation.""" return self._params @@ -110,34 +110,34 @@ def dtype(self): """Data type of the covariance matrix.""" return self._params.dtype - def cholesky(self) -> jax.Array: + def cholesky(self) -> jnp.ndarray: """Get a lower triangular Cholesky factor for the covariance matrix.""" m = linalg.flat_to_tril(self._params, size=self._size) return linalg.apply_to_diag(m, jnp.exp) - def covariance(self) -> jax.Array: + def covariance(self) -> jnp.ndarray: """Get the covariance matrix.""" cholesky = self.cholesky() return cholesky @ cholesky.T - def covariance_sqrt(self) -> jax.Array: + def covariance_sqrt(self) -> jnp.ndarray: """Get the square root of the covariance matrix.""" return linalg.matrix_powers(self.covariance(), (0.5,))[0] - def log_det_covariance(self) -> jax.Array: + def log_det_covariance(self) -> jnp.ndarray: """Get the log of the determinant of the covariance matrix.""" diag = jnp.diagonal(self.cholesky(), axis1=-2, axis2=-1) return 2. * jnp.sum(jnp.log(diag), axis=-1) - def centered_to_z(self, x_centered: jax.Array) -> jax.Array: + def centered_to_z(self, x_centered: jnp.ndarray) -> jnp.ndarray: """Map centered points to standardized centered points (i.e. cov(z) = I).""" return linalg.invmatvectril(m=self.cholesky(), x=x_centered, lower=True) - def z_to_centered(self, z: jax.Array) -> jax.Array: + def z_to_centered(self, z: jnp.ndarray) -> jnp.ndarray: """Scale standardized points to points with the specified covariance.""" return (self.cholesky() @ z.T).T - def w2_dist(self, other: "ScaleTriL") -> jax.Array: + def w2_dist(self, other: "ScaleTriL") -> jnp.ndarray: r"""Wasserstein distance W_2^2 to another Gaussian with same mean. Args: @@ -148,7 +148,7 @@ def w2_dist(self, other: "ScaleTriL") -> jax.Array: """ dimension = self.size - def _flatten_cov(cov: jax.Array) -> jax.Array: + def _flatten_cov(cov: jnp.ndarray) -> jnp.ndarray: cov = cov.reshape(cov.shape[:-2] + (dimension * dimension,)) return jnp.concatenate([jnp.zeros(dimension), cov], axis=-1) @@ -159,7 +159,7 @@ def _flatten_cov(cov: jax.Array) -> jax.Array: ..., ] - def gaussian_map(self, dest_scale: "ScaleTriL") -> jax.Array: + def gaussian_map(self, dest_scale: "ScaleTriL") -> jnp.ndarray: """Scaling matrix used in transport between 0-mean Gaussians. Sigma_mu^{-1/2} @ @@ -179,7 +179,9 @@ def gaussian_map(self, dest_scale: "ScaleTriL") -> jax.Array: ) return jnp.matmul(sqrt0_inv, jnp.matmul(m, sqrt0_inv)) - def transport(self, dest_scale: "ScaleTriL", points: jax.Array) -> jax.Array: + def transport( + self, dest_scale: "ScaleTriL", points: jnp.ndarray + ) -> jnp.ndarray: """Apply Monge map, computed between two 0-mean Gaussians, to points. Args: diff --git a/src/ott/tools/k_means.py b/src/ott/tools/k_means.py index c8fc8189d..9175abe2c 100644 --- a/src/ott/tools/k_means.py +++ b/src/ott/tools/k_means.py @@ -25,29 +25,29 @@ __all__ = ["k_means", "KMeansOutput"] Init_t = Union[Literal["k-means++", "random"], - Callable[[pointcloud.PointCloud, int, jax.Array], jax.Array]] + Callable[[pointcloud.PointCloud, int, jnp.ndarray], jnp.ndarray]] class KPPState(NamedTuple): # noqa: D101 - rng: jax.Array - centroids: jax.Array - centroid_dists: jax.Array + rng: jnp.ndarray + centroids: jnp.ndarray + centroid_dists: jnp.ndarray class KMeansState(NamedTuple): # noqa: D101 - centroids: jax.Array - prev_assignment: jax.Array - assignment: jax.Array - errors: jax.Array + centroids: jnp.ndarray + prev_assignment: jnp.ndarray + assignment: jnp.ndarray + errors: jnp.ndarray center_shift: float class KMeansConst(NamedTuple): # noqa: D101 geom: pointcloud.PointCloud - x_weights: jax.Array + x_weights: jnp.ndarray @property - def x(self) -> jax.Array: + def x(self) -> jnp.ndarray: """Array of shape ``[n, ndim]`` containing the unweighted point cloud.""" return self.geom.x @@ -57,7 +57,7 @@ def weighted_x(self): return self.x_weights[:, :-1] @property - def weights(self) -> jax.Array: + def weights(self) -> jnp.ndarray: """Array of shape ``[n, 1]`` containing weights for each point.""" return self.x_weights[:, -1:] @@ -75,12 +75,12 @@ class KMeansOutput(NamedTuple): inner_errors: Array of shape ``[max_iterations,]`` containing the ``error`` at every iteration. """ - centroids: jax.Array - assignment: jax.Array + centroids: jnp.ndarray + assignment: jnp.ndarray converged: bool iteration: int error: float - inner_errors: Optional[jax.Array] + inner_errors: Optional[jnp.ndarray] @classmethod def _from_state( @@ -109,8 +109,8 @@ def _from_state( def _random_init( - geom: pointcloud.PointCloud, k: int, rng: jax.Array -) -> jax.Array: + geom: pointcloud.PointCloud, k: int, rng: jnp.ndarray +) -> jnp.ndarray: ixs = jnp.arange(geom.shape[0]) ixs = jax.random.choice(rng, ixs, shape=(k,), replace=False) return geom.subset(ixs, None).x @@ -119,11 +119,11 @@ def _random_init( def _k_means_plus_plus( geom: pointcloud.PointCloud, k: int, - rng: jax.Array, + rng: jnp.ndarray, n_local_trials: Optional[int] = None, -) -> jax.Array: +) -> jnp.ndarray: - def init_fn(geom: pointcloud.PointCloud, rng: jax.Array) -> KPPState: + def init_fn(geom: pointcloud.PointCloud, rng: jnp.ndarray) -> KPPState: rng, next_rng = jax.random.split(rng, 2) ix = jax.random.choice(rng, jnp.arange(geom.shape[0]), shape=()) centroids = jnp.full((k, geom.cost_rank), jnp.inf).at[0].set(geom.x[ix]) @@ -131,7 +131,7 @@ def init_fn(geom: pointcloud.PointCloud, rng: jax.Array) -> KPPState: return KPPState(rng=next_rng, centroids=centroids, centroid_dists=dists) def body_fn( - iteration: int, const: Tuple[pointcloud.PointCloud, jax.Array], + iteration: int, const: Tuple[pointcloud.PointCloud, jnp.ndarray], state: KPPState, compute_error: bool ) -> KPPState: del compute_error @@ -177,10 +177,10 @@ def body_fn( @functools.partial(jax.vmap, in_axes=[None, 0, 0, 0], out_axes=0) def _reallocate_centroids( const: KMeansConst, - ix: jax.Array, - centroid: jax.Array, - weight: jax.Array, -) -> Tuple[jax.Array, jax.Array]: + ix: jnp.ndarray, + centroid: jnp.ndarray, + weight: jnp.ndarray, +) -> Tuple[jnp.ndarray, jnp.ndarray]: is_empty = weight <= 0. new_centroid = (1 - is_empty) * centroid + is_empty * const.x[ix] # (ndim,) centroid_to_remove = is_empty * const.weighted_x[ix] # (ndim,) @@ -190,8 +190,8 @@ def _reallocate_centroids( def _update_assignment( const: KMeansConst, - centroids: jax.Array, -) -> Tuple[jax.Array, jax.Array]: + centroids: jnp.ndarray, +) -> Tuple[jnp.ndarray, jnp.ndarray]: (x, _, *args), aux_data = const.geom.tree_flatten() cost_matrix = type( const.geom @@ -203,9 +203,9 @@ def _update_assignment( def _update_centroids( - const: KMeansConst, k: int, assignment: jax.Array, - dist_to_centers: jax.Array -) -> jax.Array: + const: KMeansConst, k: int, assignment: jnp.ndarray, + dist_to_centers: jnp.ndarray +) -> jnp.ndarray: # TODO(michalk8): # cannot put `k` into `const`, see https://github.com/ott-jax/ott/issues/129 x_weights = jax.ops.segment_sum(const.x_weights, assignment, num_segments=k) @@ -224,10 +224,10 @@ def _update_centroids( @functools.partial(jax.vmap, in_axes=[0] + [None] * 9) def _k_means( - rng: jax.Array, + rng: jnp.ndarray, geom: pointcloud.PointCloud, k: int, - weights: Optional[jax.Array] = None, + weights: Optional[jnp.ndarray] = None, init: Init_t = "k-means++", n_local_trials: Optional[int] = None, tol: float = 1e-4, @@ -342,9 +342,9 @@ def finalize_fn(const: KMeansConst, state: KMeansState) -> KMeansState: def k_means( - geom: Union[jax.Array, pointcloud.PointCloud], + geom: Union[jnp.ndarray, pointcloud.PointCloud], k: int, - weights: Optional[jax.Array] = None, + weights: Optional[jnp.ndarray] = None, init: Init_t = "k-means++", n_init: int = 10, n_local_trials: Optional[int] = None, @@ -352,7 +352,7 @@ def k_means( min_iterations: int = 0, max_iterations: int = 300, store_inner_errors: bool = False, - rng: Optional[jax.Array] = None, + rng: Optional[jnp.ndarray] = None, ) -> KMeansOutput: r"""K-means clustering using Lloyd's algorithm :cite:`lloyd:82`. @@ -386,7 +386,7 @@ def k_means( """ assert geom.shape[ 0] >= k, f"Cannot cluster `{geom.shape[0]}` points into `{k}` clusters." - if isinstance(geom, jax.Array): + if isinstance(geom, jnp.ndarray): geom = pointcloud.PointCloud(geom) if isinstance(geom.cost_fn, costs.Cosine): geom = geom._cosine_to_sqeucl() diff --git a/src/ott/tools/plot.py b/src/ott/tools/plot.py index d83868fd5..bd1f42e91 100644 --- a/src/ott/tools/plot.py +++ b/src/ott/tools/plot.py @@ -13,7 +13,6 @@ # limitations under the License. from typing import List, Optional, Sequence, Tuple, Union -import jax import jax.numpy as jnp import numpy as np import scipy @@ -33,7 +32,8 @@ gromov_wasserstein.GWOutput] -def bidimensional(x: jax.Array, y: jax.Array) -> Tuple[jax.Array, jax.Array]: +def bidimensional(x: jnp.ndarray, + y: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: """Apply PCA to reduce to bi-dimensional data.""" if x.shape[1] < 3: return x, y @@ -121,7 +121,7 @@ def _scatter(self, ot: Transport): scales_y = b * self._scale * b.shape[0] return x, y, scales_x, scales_y - def _mapping(self, x: jax.Array, y: jax.Array, matrix: jax.Array): + def _mapping(self, x: jnp.ndarray, y: jnp.ndarray, matrix: jnp.ndarray): """Compute the lines representing the mapping between the 2 point clouds.""" # Only plot the lines with a cost above the threshold. u, v = jnp.where(matrix > self._threshold) diff --git a/src/ott/tools/segment_sinkhorn.py b/src/ott/tools/segment_sinkhorn.py index ca5e5c228..223f2a30f 100644 --- a/src/ott/tools/segment_sinkhorn.py +++ b/src/ott/tools/segment_sinkhorn.py @@ -14,7 +14,7 @@ from types import MappingProxyType from typing import Any, Mapping, Optional, Tuple -import jax +import jax.numpy as jnp from ott.geometry import costs, pointcloud, segment from ott.problems.linear import linear_problem @@ -22,21 +22,21 @@ def segment_sinkhorn( - x: jax.Array, - y: jax.Array, + x: jnp.ndarray, + y: jnp.ndarray, num_segments: Optional[int] = None, max_measure_size: Optional[int] = None, cost_fn: Optional[costs.CostFn] = None, - segment_ids_x: Optional[jax.Array] = None, - segment_ids_y: Optional[jax.Array] = None, + segment_ids_x: Optional[jnp.ndarray] = None, + segment_ids_y: Optional[jnp.ndarray] = None, indices_are_sorted: bool = False, num_per_segment_x: Optional[Tuple[int, ...]] = None, num_per_segment_y: Optional[Tuple[int, ...]] = None, - weights_x: Optional[jax.Array] = None, - weights_y: Optional[jax.Array] = None, + weights_x: Optional[jnp.ndarray] = None, + weights_y: Optional[jnp.ndarray] = None, sinkhorn_kwargs: Mapping[str, Any] = MappingProxyType({}), **kwargs: Any -) -> jax.Array: +) -> jnp.ndarray: """Compute regularized OT cost between subsets of vectors in `x` and `y`. Helper function designed to compute Sinkhorn regularized OT cost between @@ -104,10 +104,10 @@ def segment_sinkhorn( padding_vector = cost_fn._padder(dim=dim) def eval_fn( - padded_x: jax.Array, - padded_y: jax.Array, - padded_weight_x: jax.Array, - padded_weight_y: jax.Array, + padded_x: jnp.ndarray, + padded_y: jnp.ndarray, + padded_weight_x: jnp.ndarray, + padded_weight_y: jnp.ndarray, ) -> float: mask_x = padded_weight_x > 0. mask_y = padded_weight_y > 0. diff --git a/src/ott/tools/sinkhorn_divergence.py b/src/ott/tools/sinkhorn_divergence.py index 2ff1cbc4e..51de97613 100644 --- a/src/ott/tools/sinkhorn_divergence.py +++ b/src/ott/tools/sinkhorn_divergence.py @@ -14,7 +14,6 @@ from types import MappingProxyType from typing import Any, Mapping, Optional, Tuple, Type -import jax import jax.numpy as jnp from ott import utils @@ -28,7 +27,7 @@ "SinkhornDivergenceOutput" ] -Potentials_t = Tuple[jax.Array, jax.Array] +Potentials_t = Tuple[jnp.ndarray, jnp.ndarray] @utils.register_pytree_node @@ -36,10 +35,11 @@ class SinkhornDivergenceOutput: # noqa: D101 divergence: float potentials: Tuple[Potentials_t, Potentials_t, Potentials_t] geoms: Tuple[geometry.Geometry, geometry.Geometry, geometry.Geometry] - errors: Tuple[Optional[jax.Array], Optional[jax.Array], Optional[jax.Array]] + errors: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], + Optional[jnp.ndarray]] converged: Tuple[bool, bool, bool] - a: jax.Array - b: jax.Array + a: jnp.ndarray + b: jnp.ndarray n_iters: Tuple[int, int, int] def to_dual_potentials(self) -> "potentials.EntropicPotentials": @@ -73,8 +73,8 @@ def tree_unflatten_foo(cls, aux_data, children): # noqa: D102 def sinkhorn_divergence( geom: Type[geometry.Geometry], *args: Any, - a: Optional[jax.Array] = None, - b: Optional[jax.Array] = None, + a: Optional[jnp.ndarray] = None, + b: Optional[jnp.ndarray] = None, sinkhorn_kwargs: Mapping[str, Any] = MappingProxyType({}), static_b: bool = False, share_epsilon: bool = True, @@ -138,8 +138,8 @@ def _sinkhorn_divergence( geometry_xy: geometry.Geometry, geometry_xx: geometry.Geometry, geometry_yy: Optional[geometry.Geometry], - a: jax.Array, - b: jax.Array, + a: jnp.ndarray, + b: jnp.ndarray, symmetric_sinkhorn: bool, **kwargs: Any, ) -> SinkhornDivergenceOutput: @@ -155,9 +155,9 @@ def _sinkhorn_divergence( between elements of the view X. geometry_yy: a Cost object able to apply kernels with a certain epsilon, between elements of the view Y. - a: jax.Array[n]: the weight of each input point. The sum of + a: jnp.ndarray[n]: the weight of each input point. The sum of all elements of ``b`` must match that of ``a`` to converge. - b: jax.Array[m]: the weight of each target point. The sum of + b: jnp.ndarray[m]: the weight of each target point. The sum of all elements of ``b`` must match that of ``a`` to converge. symmetric_sinkhorn: Use Sinkhorn updates in Eq. 25 of :cite:`feydy:19` for symmetric terms comparing x/x and y/y. @@ -219,24 +219,24 @@ def _sinkhorn_divergence( def segment_sinkhorn_divergence( - x: jax.Array, - y: jax.Array, + x: jnp.ndarray, + y: jnp.ndarray, num_segments: Optional[int] = None, max_measure_size: Optional[int] = None, cost_fn: Optional[costs.CostFn] = None, - segment_ids_x: Optional[jax.Array] = None, - segment_ids_y: Optional[jax.Array] = None, + segment_ids_x: Optional[jnp.ndarray] = None, + segment_ids_y: Optional[jnp.ndarray] = None, indices_are_sorted: bool = False, num_per_segment_x: Optional[Tuple[int, ...]] = None, num_per_segment_y: Optional[Tuple[int, ...]] = None, - weights_x: Optional[jax.Array] = None, - weights_y: Optional[jax.Array] = None, + weights_x: Optional[jnp.ndarray] = None, + weights_y: Optional[jnp.ndarray] = None, sinkhorn_kwargs: Mapping[str, Any] = MappingProxyType({}), static_b: bool = False, share_epsilon: bool = True, symmetric_sinkhorn: bool = False, **kwargs: Any -) -> jax.Array: +) -> jnp.ndarray: """Compute Sinkhorn divergence between subsets of vectors in `x` and `y`. Helper function designed to compute Sinkhorn divergences between several point @@ -313,10 +313,10 @@ def segment_sinkhorn_divergence( padding_vector = cost_fn._padder(dim=dim) def eval_fn( - padded_x: jax.Array, - padded_y: jax.Array, - padded_weight_x: jax.Array, - padded_weight_y: jax.Array, + padded_x: jnp.ndarray, + padded_y: jnp.ndarray, + padded_weight_x: jnp.ndarray, + padded_weight_y: jnp.ndarray, ) -> float: mask_x = padded_weight_x > 0. mask_y = padded_weight_y > 0. diff --git a/src/ott/tools/soft_sort.py b/src/ott/tools/soft_sort.py index b5b33e183..beb88365f 100644 --- a/src/ott/tools/soft_sort.py +++ b/src/ott/tools/soft_sort.py @@ -30,14 +30,14 @@ "quantize", "topk_mask", "multivariate_cdf_quantile_maps" ] -Func_t = Callable[[jax.Array], jax.Array] +Func_t = Callable[[jnp.ndarray], jnp.ndarray] def transport_for_sort( - inputs: jax.Array, - weights: Optional[jax.Array] = None, - target_weights: Optional[jax.Array] = None, - squashing_fun: Optional[Callable[[jax.Array], jax.Array]] = None, + inputs: jnp.ndarray, + weights: Optional[jnp.ndarray] = None, + target_weights: Optional[jnp.ndarray] = None, + squashing_fun: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, epsilon: float = 1e-2, **kwargs: Any, ) -> sinkhorn.SinkhornOutput: @@ -83,7 +83,7 @@ def transport_for_sort( return solver(prob) -def apply_on_axis(op, inputs, axis, *args, **kwargs: Any) -> jax.Array: +def apply_on_axis(op, inputs, axis, *args, **kwargs: Any) -> jnp.ndarray: """Apply a differentiable operator on a given axis of the input. Args: @@ -120,8 +120,8 @@ def apply_on_axis(op, inputs, axis, *args, **kwargs: Any) -> jax.Array: def _sort( - inputs: jax.Array, topk: int, num_targets: Optional[int], **kwargs: Any -) -> jax.Array: + inputs: jnp.ndarray, topk: int, num_targets: Optional[int], **kwargs: Any +) -> jnp.ndarray: """Apply the soft sort operator on a one dimensional array.""" num_points = inputs.shape[0] a = jnp.ones((num_points,)) / num_points @@ -145,12 +145,12 @@ def _sort( def sort( - inputs: jax.Array, + inputs: jnp.ndarray, axis: int = -1, topk: int = -1, num_targets: Optional[int] = None, **kwargs: Any, -) -> jax.Array: +) -> jnp.ndarray: r"""Apply the soft sort operator on a given axis of the input. For instance: @@ -203,8 +203,8 @@ def sort( def _ranks( - inputs: jax.Array, num_targets, target_weights, **kwargs: Any -) -> jax.Array: + inputs: jnp.ndarray, num_targets, target_weights, **kwargs: Any +) -> jnp.ndarray: """Apply the soft ranks operator on a one dimensional array.""" num_points = inputs.shape[0] if target_weights is None: @@ -220,12 +220,12 @@ def _ranks( def ranks( - inputs: jax.Array, + inputs: jnp.ndarray, axis: int = -1, num_targets: Optional[int] = None, - target_weights: Optional[jax.Array] = None, + target_weights: Optional[jnp.ndarray] = None, **kwargs: Any, -) -> jax.Array: +) -> jnp.ndarray: r"""Apply the soft rank operator on input tensor. For instance: @@ -278,11 +278,11 @@ def ranks( def topk_mask( - inputs: jax.Array, + inputs: jnp.ndarray, axis: int = -1, k: int = 1, **kwargs: Any, -) -> jax.Array: +) -> jnp.ndarray: r"""Soft :math:`\text{top-}k` selection mask. For instance: @@ -337,12 +337,12 @@ def topk_mask( def quantile( - inputs: jax.Array, - q: Optional[Union[float, jax.Array]], + inputs: jnp.ndarray, + q: Optional[Union[float, jnp.ndarray]], axis: Union[int, Tuple[int, ...]] = -1, - weight: Optional[Union[float, jax.Array]] = None, + weight: Optional[Union[float, jnp.ndarray]] = None, **kwargs: Any, -) -> jax.Array: +) -> jnp.ndarray: r"""Apply the soft quantiles operator on the input tensor. For instance: @@ -395,8 +395,8 @@ def quantile( """ def _quantile( - inputs: jax.Array, q: float, weight: float, **kwargs - ) -> jax.Array: + inputs: jnp.ndarray, q: float, weight: float, **kwargs + ) -> jnp.ndarray: num_points = inputs.shape[0] q = jnp.array([0.2, 0.5, 0.8]) if q is None else jnp.atleast_1d(q) num_quantiles = q.shape[0] @@ -456,15 +456,15 @@ def _quantile( def multivariate_cdf_quantile_maps( - inputs: jax.Array, - target_sampler: Optional[Callable[[jax.Array, Tuple[int, int]], - jax.Array]] = None, - rng: Optional[jax.Array] = None, + inputs: jnp.ndarray, + target_sampler: Optional[Callable[[jnp.ndarray, Tuple[int, int]], + jnp.ndarray]] = None, + rng: Optional[jnp.ndarray] = None, num_target_samples: Optional[int] = None, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, - input_weights: Optional[jax.Array] = None, - target_weights: Optional[jax.Array] = None, + input_weights: Optional[jnp.ndarray] = None, + target_weights: Optional[jnp.ndarray] = None, **kwargs: Any ) -> Tuple[Func_t, Func_t]: r"""Returns multivariate CDF and quantile maps, given input samples. @@ -534,8 +534,8 @@ def multivariate_cdf_quantile_maps( def _quantile_normalization( - inputs: jax.Array, targets: jax.Array, weights: float, **kwargs: Any -) -> jax.Array: + inputs: jnp.ndarray, targets: jnp.ndarray, weights: float, **kwargs: Any +) -> jnp.ndarray: """Apply soft quantile normalization on a one dimensional array.""" num_points = inputs.shape[0] a = jnp.ones((num_points,)) / num_points @@ -544,12 +544,12 @@ def _quantile_normalization( def quantile_normalization( - inputs: jax.Array, - targets: jax.Array, - weights: Optional[jax.Array] = None, + inputs: jnp.ndarray, + targets: jnp.ndarray, + weights: Optional[jnp.ndarray] = None, axis: int = -1, **kwargs: Any, -) -> jax.Array: +) -> jnp.ndarray: r"""Re-normalize inputs so that its quantiles match those of targets/weights. Quantile normalization rearranges the values in inputs to values that match @@ -600,11 +600,11 @@ def quantile_normalization( def sort_with( - inputs: jax.Array, - criterion: jax.Array, + inputs: jnp.ndarray, + criterion: jnp.ndarray, topk: int = -1, **kwargs: Any, -) -> jax.Array: +) -> jnp.ndarray: r"""Sort a multidimensional array according to a real valued criterion. Given ``batch`` vectors of dimension `dim`, to which, for each, a real value @@ -655,7 +655,7 @@ def sort_with( return sort_fn(inputs) -def _quantize(inputs: jax.Array, num_q: int, **kwargs: Any) -> jax.Array: +def _quantize(inputs: jnp.ndarray, num_q: int, **kwargs: Any) -> jnp.ndarray: """Apply the soft quantization operator on a one dimensional array.""" num_points = inputs.shape[0] a = jnp.ones((num_points,)) / num_points @@ -665,11 +665,11 @@ def _quantize(inputs: jax.Array, num_q: int, **kwargs: Any) -> jax.Array: def quantize( - inputs: jax.Array, + inputs: jnp.ndarray, num_levels: int = 10, axis: int = -1, **kwargs: Any, -) -> jax.Array: +) -> jnp.ndarray: r"""Soft quantizes an input according using ``num_levels`` values along axis. The quantization operator consists in concentrating several values around diff --git a/src/ott/types.py b/src/ott/types.py index 5c4609ec2..7a4c88716 100644 --- a/src/ott/types.py +++ b/src/ott/types.py @@ -13,7 +13,7 @@ # limitations under the License. from typing import Protocol -import jax +import jax.numpy as jnp __all__ = ["Transport"] @@ -28,11 +28,11 @@ class can however be used in type hints to support duck typing. """ @property - def matrix(self) -> jax.Array: + def matrix(self) -> jnp.ndarray: ... - def apply(self, inputs: jax.Array, axis: int) -> jax.Array: + def apply(self, inputs: jnp.ndarray, axis: int) -> jnp.ndarray: ... - def marginal(self, axis: int = 0) -> jax.Array: + def marginal(self, axis: int = 0) -> jnp.ndarray: ... diff --git a/src/ott/utils.py b/src/ott/utils.py index 558f4ba1c..2acfd8420 100644 --- a/src/ott/utils.py +++ b/src/ott/utils.py @@ -18,6 +18,7 @@ from typing import Any, Callable, NamedTuple, Optional, Tuple import jax +import jax.numpy as jnp import numpy as np try: @@ -68,7 +69,7 @@ def wrapper(*args: Any, **kwargs: Any) -> Any: return functools.wraps(func)(wrapper) -def default_prng_key(rng: Optional[jax.Array] = None) -> jax.Array: +def default_prng_key(rng: Optional[jnp.ndarray] = None) -> jnp.ndarray: """Get the default PRNG key. Args: diff --git a/tests/conftest.py b/tests/conftest.py index a8118845c..bc4570343 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,6 +17,7 @@ import jax import jax.experimental +import jax.numpy as jnp import pytest from _pytest.python import Metafunc @@ -68,7 +69,7 @@ def pytest_generate_tests(metafunc: Metafunc) -> None: @pytest.fixture(scope="session") -def rng() -> jax.Array: +def rng() -> jnp.ndarray: return jax.random.PRNGKey(0) diff --git a/tests/geometry/costs_test.py b/tests/geometry/costs_test.py index 47446a4fd..b23e79071 100644 --- a/tests/geometry/costs_test.py +++ b/tests/geometry/costs_test.py @@ -27,7 +27,7 @@ ts_metrics = None -def _proj(matrix: jax.Array) -> jax.Array: +def _proj(matrix: jnp.ndarray) -> jnp.ndarray: u, _, v_h = jnp.linalg.svd(matrix, full_matrices=False) return u.dot(v_h) @@ -35,7 +35,7 @@ def _proj(matrix: jax.Array) -> jax.Array: @pytest.mark.fast() class TestCostFn: - def test_cosine(self, rng: jax.Array): + def test_cosine(self, rng: jnp.ndarray): """Test the cosine cost function.""" x = jnp.array([0, 0]) y = jnp.array([0, 0]) @@ -84,7 +84,7 @@ def test_cosine(self, rng: jax.Array): @pytest.mark.fast() class TestBuresBarycenter: - def test_bures(self, rng: jax.Array): + def test_bures(self, rng: jnp.ndarray): d = 3 r = jnp.array([1.2036, 0.2825, 0.013]) Sigma1 = r * jnp.eye(d) @@ -141,7 +141,7 @@ class TestRegTICost: ) def test_reg_cost_legendre( self, - rng: jax.Array, + rng: jnp.ndarray, scaling_reg: float, cost_fn_t: Type[costs.RegTICost], use_mat: bool, @@ -163,7 +163,7 @@ def test_reg_cost_legendre( @pytest.mark.parametrize("k", [1, 3, 10]) @pytest.mark.parametrize("d", [10, 50]) - def test_elastic_sq_k_overlap(self, rng: jax.Array, k: int, d: int): + def test_elastic_sq_k_overlap(self, rng: jnp.ndarray, k: int, d: int): expected = jax.random.normal(rng, (d,)) cost_fn = costs.ElasticSqKOverlap(k=k, scaling_reg=1e-2) @@ -178,7 +178,9 @@ def test_elastic_sq_k_overlap(self, rng: jax.Array, k: int, d: int): costs.ElasticSqKOverlap(k=3, scaling_reg=17) ] ) - def test_sparse_displacement(self, rng: jax.Array, cost_fn: costs.RegTICost): + def test_sparse_displacement( + self, rng: jnp.ndarray, cost_fn: costs.RegTICost + ): frac_sparse = 0.7 rng1, rng2 = jax.random.split(rng, 2) d = 17 @@ -194,7 +196,7 @@ def test_sparse_displacement(self, rng: jax.Array, cost_fn: costs.RegTICost): @pytest.mark.parametrize("cost_type_t", [costs.ElasticL1, costs.ElasticSTVS]) def test_stronger_regularization_increases_sparsity( - self, rng: jax.Array, cost_type_t: Type[costs.RegTICost] + self, rng: jnp.ndarray, cost_type_t: Type[costs.RegTICost] ): d, rngs = 17, jax.random.split(rng, 4) x = jax.random.normal(rngs[0], (50, d)) @@ -223,7 +225,7 @@ class TestSoftDTW: @pytest.mark.parametrize("n", [7, 10]) @pytest.mark.parametrize("m", [9, 10]) @pytest.mark.parametrize("gamma", [1e-3, 5]) - def test_soft_dtw(self, rng: jax.Array, n: int, m: int, gamma: float): + def test_soft_dtw(self, rng: jnp.ndarray, n: int, m: int, gamma: float): rng1, rng2 = jax.random.split(rng, 2) t1 = jax.random.normal(rng1, (n,)) t2 = jax.random.normal(rng2, (m,)) @@ -236,7 +238,7 @@ def test_soft_dtw(self, rng: jax.Array, n: int, m: int, gamma: float): @pytest.mark.parametrize(("debiased", "jit"), [(False, True), (True, False)]) def test_soft_dtw_debiased( self, - rng: jax.Array, + rng: jnp.ndarray, debiased: bool, jit: bool, ): @@ -263,7 +265,7 @@ def test_soft_dtw_debiased( @pytest.mark.parametrize(("debiased", "jit"), [(False, False), (True, True)]) @pytest.mark.parametrize("gamma", [1e-2, 1]) def test_soft_dtw_grad( - self, rng: jax.Array, debiased: bool, jit: bool, gamma: float + self, rng: jnp.ndarray, debiased: bool, jit: bool, gamma: float ): rngs = jax.random.split(rng, 4) eps, tol = 1e-3, 1e-5 diff --git a/tests/geometry/graph_test.py b/tests/geometry/graph_test.py index cda2900a8..b0c194c23 100644 --- a/tests/geometry/graph_test.py +++ b/tests/geometry/graph_test.py @@ -35,7 +35,7 @@ def random_graph( *, return_laplacian: bool = False, directed: bool = False, -) -> jax.Array: +) -> jnp.ndarray: G = random_graphs.fast_gnp_random_graph(n, p, seed=seed, directed=directed) if not directed: assert nx.is_connected(G), "Generated graph is not connected." @@ -51,7 +51,7 @@ def random_graph( return jnp.asarray(G.toarray()) -def gt_geometry(G: jax.Array, *, epsilon: float = 1e-2) -> geometry.Geometry: +def gt_geometry(G: jnp.ndarray, *, epsilon: float = 1e-2) -> geometry.Geometry: if not isinstance(G, nx.Graph): G = nx.from_numpy_array(np.asarray(G)) @@ -72,7 +72,7 @@ def gt_geometry(G: jax.Array, *, epsilon: float = 1e-2) -> geometry.Geometry: class TestGraph: - def test_kernel_is_symmetric_positive_definite(self, rng: jax.Array): + def test_kernel_is_symmetric_positive_definite(self, rng: jnp.ndarray): n, tol = 65, 0.02 x = jax.random.normal(rng, (n,)) geom = graph.Graph.from_graph(random_graph(n), t=1e-3) @@ -109,7 +109,7 @@ def test_automatic_t(self): ) def test_approximates_ground_truth( self, - rng: jax.Array, + rng: jnp.ndarray, numerical_scheme: Literal["backward_euler", "crank_nicolson"], ): eps, n_steps = 1e-5, 20 @@ -160,7 +160,7 @@ def test_crank_nicolson_more_stable(self, t: Optional[float], n_steps: int): @pytest.mark.parametrize(("jit", "normalize"), [(False, True), (True, False)]) def test_directed_graph(self, jit: bool, normalize: bool): - def create_graph(G: jax.Array) -> graph.Graph: + def create_graph(G: jnp.ndarray) -> graph.Graph: return graph.Graph.from_graph(G, directed=True, normalize=normalize) G = random_graph(16, p=0.25, directed=True) @@ -181,7 +181,7 @@ def create_graph(G: jax.Array) -> graph.Graph: @pytest.mark.parametrize("normalize", [False, True]) def test_normalize_laplacian(self, directed: bool, normalize: bool): - def laplacian(G: jax.Array) -> jax.Array: + def laplacian(G: jnp.ndarray) -> jnp.ndarray: if directed: G = G + G.T @@ -203,7 +203,7 @@ def laplacian(G: jax.Array) -> jax.Array: np.testing.assert_allclose(actual, expected, rtol=1e-6, atol=1e-6) @pytest.mark.fast.with_args(jit=[False, True], only_fast=0) - def test_graph_sinkhorn(self, rng: jax.Array, jit: bool): + def test_graph_sinkhorn(self, rng: jnp.ndarray, jit: bool): def callback(geom: geometry.Geometry) -> sinkhorn.SinkhornOutput: solver = sinkhorn.Sinkhorn(lse_mode=False) @@ -246,12 +246,12 @@ def callback(geom: geometry.Geometry) -> sinkhorn.SinkhornOutput: ids=["not-implicit", "implicit"], ) def test_dense_graph_differentiability( - self, rng: jax.Array, implicit_diff: bool + self, rng: jnp.ndarray, implicit_diff: bool ): def callback( - data: jax.Array, rows: jax.Array, cols: jax.Array, shape: Tuple[int, - int] + data: jnp.ndarray, rows: jnp.ndarray, cols: jnp.ndarray, + shape: Tuple[int, int] ) -> float: G = sparse.BCOO((data, jnp.c_[rows, cols]), shape=shape).todense() @@ -281,7 +281,7 @@ def callback( actual = 2 * jnp.vdot(v_w, grad_w) np.testing.assert_allclose(actual, expected, rtol=1e-4, atol=1e-4) - def test_tolerance_hilbert_metric(self, rng: jax.Array): + def test_tolerance_hilbert_metric(self, rng: jnp.ndarray): n, n_steps, t, tol = 256, 1000, 1e-4, 3e-4 G = random_graph(n, p=0.15) x = jnp.abs(jax.random.normal(rng, (n,))) diff --git a/tests/geometry/low_rank_test.py b/tests/geometry/low_rank_test.py index 6b3c36edd..b3cda89cf 100644 --- a/tests/geometry/low_rank_test.py +++ b/tests/geometry/low_rank_test.py @@ -24,7 +24,7 @@ @pytest.mark.fast() class TestLRGeometry: - def test_apply(self, rng: jax.Array): + def test_apply(self, rng: jnp.ndarray): """Test application of cost to vec or matrix.""" n, m, r = 17, 11, 7 rngs = jax.random.split(rng, 5) @@ -45,7 +45,7 @@ def test_apply(self, rng: jax.Array): @pytest.mark.parametrize("scale_cost", ["mean", "max_cost", "max_bound", 42.]) def test_conversion_pointcloud( - self, rng: jax.Array, scale_cost: Union[str, float] + self, rng: jnp.ndarray, scale_cost: Union[str, float] ): """Test conversion from PointCloud to LRCGeometry.""" n, m, d = 17, 11, 3 @@ -69,7 +69,7 @@ def test_conversion_pointcloud( rtol=1e-4 ) - def test_apply_squared(self, rng: jax.Array): + def test_apply_squared(self, rng: jnp.ndarray): """Test application of squared cost to vec or matrix.""" n, m = 27, 25 rngs = jax.random.split(rng, 5) @@ -94,7 +94,7 @@ def test_apply_squared(self, rng: jax.Array): @pytest.mark.parametrize("bias", [(0, 0), (4, 5)]) @pytest.mark.parametrize("scale_factor", [(1, 1), (2, 3)]) def test_add_lr_geoms( - self, rng: jax.Array, bias: Tuple[float, float], + self, rng: jnp.ndarray, bias: Tuple[float, float], scale_factor: Tuple[float, float] ): """Test application of cost to vec or matrix.""" @@ -133,7 +133,7 @@ def test_add_lr_geoms( @pytest.mark.parametrize(("scale", "scale_cost", "epsilon"), [(0.1, "mean", None), (0.9, "max_cost", 1e-2)]) def test_add_lr_geoms_scale_factor( - self, rng: jax.Array, scale: float, scale_cost: str, + self, rng: jnp.ndarray, scale: float, scale_cost: str, epsilon: Optional[float] ): n, d = 71, 2 @@ -160,7 +160,8 @@ def test_add_lr_geoms_scale_factor( @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("fn", [lambda x: x + 10, lambda x: x * 2]) def test_apply_affine_function_efficient( - self, rng: jax.Array, fn: Callable[[jax.Array], jax.Array], axis: int + self, rng: jnp.ndarray, fn: Callable[[jnp.ndarray], jnp.ndarray], + axis: int ): n, m, d = 21, 13, 3 rngs = jax.random.split(rng, 3) @@ -180,7 +181,7 @@ def test_apply_affine_function_efficient( np.testing.assert_allclose(res_ineff, res_eff, rtol=1e-4, atol=1e-4) @pytest.mark.parametrize("rank", [5, 1000]) - def test_point_cloud_to_lr(self, rng: jax.Array, rank: int): + def test_point_cloud_to_lr(self, rng: jnp.ndarray, rank: int): n, m = 1500, 1000 scale = 2.0 rngs = jax.random.split(rng, 2) @@ -220,7 +221,7 @@ def assert_upper_bound( assert lhs <= rhs @pytest.mark.fast.with_args(rank=[2, 3], tol=[5e-1, 1e-2], only_fast=0) - def test_geometry_to_lr(self, rng: jax.Array, rank: int, tol: float): + def test_geometry_to_lr(self, rng: jnp.ndarray, rank: int, tol: float): rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(370, 3)) y = jax.random.normal(rng2, shape=(460, 3)) @@ -241,7 +242,8 @@ def test_geometry_to_lr(self, rng: jax.Array, rank: int, tol: float): only_fast=1 ) def test_point_cloud_to_lr( - self, rng: jax.Array, batch_size: Optional[int], scale_cost: Optional[str] + self, rng: jnp.ndarray, batch_size: Optional[int], + scale_cost: Optional[str] ): rank, tol = 7, 1e-1 rng1, rng2 = jax.random.split(rng, 2) @@ -265,7 +267,7 @@ def test_point_cloud_to_lr( assert geom_lr.cost_rank == rank self.assert_upper_bound(geom, geom_lr, rank=rank, tol=tol) - def test_to_lrc_geometry_noop(self, rng: jax.Array): + def test_to_lrc_geometry_noop(self, rng: jnp.ndarray): rng1, rng2 = jax.random.split(rng, 2) cost1 = jax.random.normal(rng1, shape=(32, 2)) cost2 = jax.random.normal(rng2, shape=(23, 2)) @@ -287,7 +289,7 @@ def test_apply_transport_from_potentials(self): np.testing.assert_allclose(res, 1.1253539e-07, rtol=1e-6, atol=1e-6) @pytest.mark.limit_memory("190 MB") - def test_large_scale_factorization(self, rng: jax.Array): + def test_large_scale_factorization(self, rng: jnp.ndarray): rank, tol = 4, 1e-2 rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(10_000, 7)) @@ -318,7 +320,7 @@ def test_conversion_grid(self): cost_matrix, cost_matrix_lrc, rtol=1e-5, atol=1e-5 ) - def test_full_to_lrc_geometry(self, rng: jax.Array): + def test_full_to_lrc_geometry(self, rng: jnp.ndarray): rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(13, 7)) y = jax.random.normal(rng2, shape=(29, 7)) diff --git a/tests/geometry/pointcloud_test.py b/tests/geometry/pointcloud_test.py index 5f75ddb8e..ff32789fe 100644 --- a/tests/geometry/pointcloud_test.py +++ b/tests/geometry/pointcloud_test.py @@ -24,7 +24,7 @@ @pytest.mark.fast() class TestPointCloudApply: - def test_apply_cost_and_kernel(self, rng: jax.Array): + def test_apply_cost_and_kernel(self, rng: jnp.ndarray): """Test consistency of cost/kernel apply to vec.""" n, m, p, b = 5, 8, 10, 7 rngs = jax.random.split(rng, 5) @@ -68,7 +68,7 @@ def test_apply_cost_and_kernel(self, rng: jax.Array): np.testing.assert_allclose(prod0_online, prod0, rtol=1e-03, atol=1e-02) np.testing.assert_allclose(prod1_online, prod1, rtol=1e-03, atol=1e-02) - def test_general_cost_fn(self, rng: jax.Array): + def test_general_cost_fn(self, rng: jnp.ndarray): """Test non-vec cost apply to vec.""" n, m, p, b = 5, 8, 10, 7 rngs = jax.random.split(rng, 5) @@ -97,7 +97,7 @@ def test_correct_shape(self): np.testing.assert_array_equal(pc.shape, (n, m)) @pytest.mark.parametrize("axis", [0, 1]) - def test_apply_cost_without_norm(self, rng: jax.Array, axis: 1): + def test_apply_cost_without_norm(self, rng: jnp.ndarray, axis: 1): rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(17, 3)) y = jax.random.normal(rng2, shape=(12, 3)) @@ -122,7 +122,7 @@ class TestPointCloudCosineConversion: "scale_cost", ["mean", "median", "max_cost", "max_norm", 41] ) def test_cosine_to_sqeucl_conversion( - self, rng: jax.Array, scale_cost: Union[str, float] + self, rng: jnp.ndarray, scale_cost: Union[str, float] ): rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(101, 4)) @@ -155,7 +155,7 @@ def test_cosine_to_sqeucl_conversion( ) @pytest.mark.parametrize("axis", [0, 1]) def test_apply_cost_cosine_to_sqeucl( - self, rng: jax.Array, axis: int, scale_cost: Union[str, float] + self, rng: jnp.ndarray, axis: int, scale_cost: Union[str, float] ): rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(17, 5)) diff --git a/tests/geometry/scaling_cost_test.py b/tests/geometry/scaling_cost_test.py index b60805d34..ce3f616ce 100644 --- a/tests/geometry/scaling_cost_test.py +++ b/tests/geometry/scaling_cost_test.py @@ -26,7 +26,7 @@ class TestScaleCost: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self.dim = 4 self.n = 7 self.m = 9 @@ -53,7 +53,7 @@ def test_scale_cost_pointcloud( """Test various scale cost options for pointcloud.""" def apply_sinkhorn( - x: jax.Array, y: jax.Array, a: jax.Array, b: jax.Array, + x: jnp.ndarray, y: jnp.ndarray, a: jnp.ndarray, b: jnp.ndarray, scale_cost: Union[str, float] ): geom = pointcloud.PointCloud( @@ -120,8 +120,8 @@ def test_scale_cost_geometry(self, scale: Union[str, float]): """Test various scale cost options for geometry.""" def apply_sinkhorn( - cost: jax.Array, a: jax.Array, b: jax.Array, scale_cost: Union[str, - float] + cost: jnp.ndarray, a: jnp.ndarray, b: jnp.ndarray, + scale_cost: Union[str, float] ): geom = geometry.Geometry(cost, epsilon=self.eps, scale_cost=scale_cost) prob = linear_problem.LinearProblem(geom, a, b) diff --git a/tests/geometry/subsetting_test.py b/tests/geometry/subsetting_test.py index c07929436..5d7306682 100644 --- a/tests/geometry/subsetting_test.py +++ b/tests/geometry/subsetting_test.py @@ -25,7 +25,7 @@ @pytest.fixture() def pc_masked( - rng: jax.Array + rng: jnp.ndarray ) -> Tuple[pointcloud.PointCloud, pointcloud.PointCloud]: n, m = 20, 30 rng1, rng2 = jax.random.split(rng, 2) @@ -66,7 +66,7 @@ class TestMaskPointCloud: "clazz", [geometry.Geometry, pointcloud.PointCloud, low_rank.LRCGeometry] ) def test_mask( - self, rng: jax.Array, clazz: Type[geometry.Geometry], + self, rng: jnp.ndarray, clazz: Type[geometry.Geometry], src_ixs: Optional[Union[int, Sequence[int]]], tgt_ixs: Optional[Union[int, Sequence[int]]] ): @@ -140,7 +140,7 @@ def test_masked_summary( ) def test_mask_permutation( - self, geom_masked: Tuple[Geom_t, pointcloud.PointCloud], rng: jax.Array + self, geom_masked: Tuple[Geom_t, pointcloud.PointCloud], rng: jnp.ndarray ): rng1, rng2 = jax.random.split(rng) geom, _ = geom_masked @@ -162,7 +162,7 @@ def test_mask_permutation( ) def test_boolean_mask( - self, geom_masked: Tuple[Geom_t, pointcloud.PointCloud], rng: jax.Array + self, geom_masked: Tuple[Geom_t, pointcloud.PointCloud], rng: jnp.ndarray ): rng1, rng2 = jax.random.split(rng) p = jnp.array([0.5, 0.5]) diff --git a/tests/initializers/linear/sinkhorn_init_test.py b/tests/initializers/linear/sinkhorn_init_test.py index 73c0ddaaa..7686ddfa9 100644 --- a/tests/initializers/linear/sinkhorn_init_test.py +++ b/tests/initializers/linear/sinkhorn_init_test.py @@ -25,7 +25,7 @@ def create_sorting_problem( - rng: jax.Array, + rng: jnp.ndarray, n: int, epsilon: float = 1e-2, batch_size: Optional[int] = None @@ -55,7 +55,7 @@ def create_sorting_problem( def create_ot_problem( - rng: jax.Array, + rng: jnp.ndarray, n: int, m: int, d: int, @@ -80,12 +80,12 @@ def create_ot_problem( def run_sinkhorn( - x: jax.Array, - y: jax.Array, + x: jnp.ndarray, + y: jnp.ndarray, *, initializer: linear_init.SinkhornInitializer, - a: Optional[jax.Array] = None, - b: Optional[jax.Array] = None, + a: Optional[jnp.ndarray] = None, + b: Optional[jnp.ndarray] = None, epsilon: float = 1e-2, lse_mode: bool = True, ) -> sinkhorn.SinkhornOutput: @@ -132,7 +132,9 @@ def test_create_initializer(self, init: str): @pytest.mark.parametrize(("vector_min", "lse_mode"), [(True, True), (True, False), (False, True)]) - def test_sorting_init(self, vector_min: bool, lse_mode: bool, rng: jax.Array): + def test_sorting_init( + self, vector_min: bool, lse_mode: bool, rng: jnp.ndarray + ): """Tests sorting dual initializer.""" n = 50 epsilon = 1e-2 @@ -166,7 +168,7 @@ def test_sorting_init(self, vector_min: bool, lse_mode: bool, rng: jax.Array): assert sink_out_init.converged assert sink_out_base.n_iters > sink_out_init.n_iters - def test_sorting_init_online(self, rng: jax.Array): + def test_sorting_init_online(self, rng: jnp.ndarray): n = 10 epsilon = 1e-2 @@ -177,7 +179,7 @@ def test_sorting_init_online(self, rng: jax.Array): with pytest.raises(AssertionError, match=r"online"): sort_init.init_dual_a(ot_problem, lse_mode=True) - def test_sorting_init_square_cost(self, rng: jax.Array): + def test_sorting_init_square_cost(self, rng: jnp.ndarray): n, m, d = 10, 15, 1 epsilon = 1e-2 @@ -186,7 +188,7 @@ def test_sorting_init_square_cost(self, rng: jax.Array): with pytest.raises(AssertionError, match=r"square"): sort_init.init_dual_a(ot_problem, lse_mode=True) - def test_default_initializer(self, rng: jax.Array): + def test_default_initializer(self, rng: jnp.ndarray): """Tests default initializer""" n, m, d = 20, 20, 2 epsilon = 1e-2 @@ -204,7 +206,7 @@ def test_default_initializer(self, rng: jax.Array): np.testing.assert_array_equal(0., default_potential_a) np.testing.assert_array_equal(0., default_potential_b) - def test_gauss_pointcloud_geom(self, rng: jax.Array): + def test_gauss_pointcloud_geom(self, rng: jnp.ndarray): n, m, d = 20, 20, 2 epsilon = 1e-2 @@ -225,7 +227,7 @@ def test_gauss_pointcloud_geom(self, rng: jax.Array): @pytest.mark.parametrize("jit", [False, True]) @pytest.mark.parametrize("initializer", ["sorting", "gaussian", "subsample"]) def test_initializer_n_iter( - self, rng: jax.Array, lse_mode: bool, jit: bool, + self, rng: jnp.ndarray, lse_mode: bool, jit: bool, initializer: Literal["sorting", "gaussian", "subsample"] ): """Tests Gaussian initializer""" diff --git a/tests/initializers/linear/sinkhorn_lr_init_test.py b/tests/initializers/linear/sinkhorn_lr_init_test.py index f3fe7acd1..e954fec76 100644 --- a/tests/initializers/linear/sinkhorn_lr_init_test.py +++ b/tests/initializers/linear/sinkhorn_lr_init_test.py @@ -36,7 +36,7 @@ def test_explicit_initializer(self): ) @pytest.mark.parametrize("partial_init", ["q", "r", "g"]) def test_partial_initialization( - self, rng: jax.Array, initializer: str, partial_init: str + self, rng: jnp.ndarray, initializer: str, partial_init: str ): n, d, rank = 27, 5, 6 rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) @@ -64,7 +64,7 @@ def test_partial_initialization( @pytest.mark.fast.with_args("rank", [2, 4, 10, 13], only_fast=True) def test_generalized_k_means_has_correct_rank( - self, rng: jax.Array, rank: int + self, rng: jnp.ndarray, rank: int ): n, d = 27, 5 x = jax.random.normal(rng, (n, d)) @@ -81,7 +81,7 @@ def test_generalized_k_means_has_correct_rank( assert jnp.linalg.matrix_rank(q) == rank assert jnp.linalg.matrix_rank(r) == rank - def test_generalized_k_means_matches_k_means(self, rng: jax.Array): + def test_generalized_k_means_matches_k_means(self, rng: jnp.ndarray): n, d, rank = 27, 7, 5 eps = 1e-1 rng1, rng2 = jax.random.split(rng, 2) @@ -111,7 +111,7 @@ def test_generalized_k_means_matches_k_means(self, rng: jax.Array): ) @pytest.mark.parametrize("epsilon", [0., 1e-1]) - def test_better_initialization_helps(self, rng: jax.Array, epsilon: float): + def test_better_initialization_helps(self, rng: jnp.ndarray, epsilon: float): n, d, rank = 81, 13, 3 rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, (n, d)) diff --git a/tests/initializers/quadratic/gw_init_test.py b/tests/initializers/quadratic/gw_init_test.py index 4c39bafb4..e680e9c01 100644 --- a/tests/initializers/quadratic/gw_init_test.py +++ b/tests/initializers/quadratic/gw_init_test.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import jax +import jax.numpy as jnp import numpy as np import pytest @@ -49,7 +50,7 @@ def test_explicit_initializer_lr(self): assert solver.initializer.rank == rank @pytest.mark.parametrize("eps", [0., 1e-2]) - def test_gw_better_initialization_helps(self, rng: jax.Array, eps: float): + def test_gw_better_initialization_helps(self, rng: jnp.ndarray, eps: float): n, m, d1, d2, rank = 83, 84, 8, 6, 4 rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) diff --git a/tests/math/lse_test.py b/tests/math/lse_test.py index b842afe21..36e7eba7f 100644 --- a/tests/math/lse_test.py +++ b/tests/math/lse_test.py @@ -22,7 +22,7 @@ @pytest.mark.fast() class TestGeometryLse: - def test_lse(self, rng: jax.Array): + def test_lse(self, rng: jnp.ndarray): """Test consistency of custom lse's jvp.""" n, m = 12, 8 rngs = jax.random.split(rng, 5) diff --git a/tests/math/math_utils_test.py b/tests/math/math_utils_test.py index b8451355b..7848bc2a9 100644 --- a/tests/math/math_utils_test.py +++ b/tests/math/math_utils_test.py @@ -26,7 +26,7 @@ class TestNorm: @pytest.mark.parametrize("ord", [1.1, 2.0, jnp.inf]) def test_norm( self, - rng: jax.Array, + rng: jnp.ndarray, ord, ): d = 5 diff --git a/tests/math/matrix_square_root_test.py b/tests/math/matrix_square_root_test.py index 2263ea8b9..7c8a1e7d5 100644 --- a/tests/math/matrix_square_root_test.py +++ b/tests/math/matrix_square_root_test.py @@ -21,7 +21,7 @@ from ott.math import matrix_square_root -def _get_random_spd_matrix(dim: int, rng: jax.Array): +def _get_random_spd_matrix(dim: int, rng: jnp.ndarray): # Get a random symmetric, positive definite matrix of a specified size. rng, subrng0, subrng1 = jax.random.split(rng, num=3) @@ -37,9 +37,9 @@ def _get_random_spd_matrix(dim: int, rng: jax.Array): def _get_test_fn( - fn: Callable[[jax.Array], jax.Array], dim: int, rng: jax.Array, + fn: Callable[[jnp.ndarray], jnp.ndarray], dim: int, rng: jnp.ndarray, **kwargs: Any -) -> Callable[[jax.Array], jax.Array]: +) -> Callable[[jnp.ndarray], jnp.ndarray]: # We want to test gradients of a function fn that maps positive definite # matrices to positive definite matrices by comparing them to finite # difference approximations. We'll do so via a test function that @@ -54,7 +54,7 @@ def _get_test_fn( unit = jax.random.normal(key=subrng3, shape=(dim, dim)) unit /= jnp.sqrt(jnp.sum(unit ** 2.)) - def _test_fn(x: jax.Array, **kwargs: Any) -> jax.Array: + def _test_fn(x: jnp.ndarray, **kwargs: Any) -> jnp.ndarray: # m is the product of 2 symmetric, positive definite matrices # so it will be positive definite but not necessarily symmetric m = jnp.matmul(m0, m1 + x * dx) @@ -63,7 +63,7 @@ def _test_fn(x: jax.Array, **kwargs: Any) -> jax.Array: return _test_fn -def _sqrt_plus_inv_sqrt(x: jax.Array) -> jax.Array: +def _sqrt_plus_inv_sqrt(x: jnp.ndarray) -> jnp.ndarray: sqrtm = matrix_square_root.sqrtm(x) return sqrtm[0] + sqrtm[1] @@ -71,7 +71,7 @@ def _sqrt_plus_inv_sqrt(x: jax.Array) -> jax.Array: class TestMatrixSquareRoot: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self.dim = 13 self.batch = 3 # Values for testing the Sylvester solver diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 008036790..0dd65ba57 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -23,12 +23,8 @@ def __init__( self.rng = np.random.default_rng(seed=0) def __next__(self) -> Mapping[str, np.ndarray]: - inds_source = self.rng.choice( - len(self.source_data), size=[self.batch_size] - ) - inds_target = self.rng.choice( - len(self.target_data), size=[self.batch_size] - ) + inds_source = self.rng.choice(len(self.source_data), size=[self.batch_size]) + inds_target = self.rng.choice(len(self.target_data), size=[self.batch_size]) return { "source_lin": self.source_data[inds_source, :], @@ -79,8 +75,12 @@ def data_loader_gaussian_conditional(): source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 2)) - 2.0 - dl0 = DataLoader(source_0, target_0, 16, source_conditions=np.zeros_like(source_0) * 0.0) - dl1 = DataLoader(source_1, target_1, 16, source_conditions=np.ones_like(source_1) * 1.0) + dl0 = DataLoader( + source_0, target_0, 16, source_conditions=np.zeros_like(source_0) * 0.0 + ) + dl1 = DataLoader( + source_1, target_1, 16, source_conditions=np.ones_like(source_1) * 1.0 + ) return ConditionalDataLoader({"0": dl0, "1": dl1}, np.array([0.5, 0.5])) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 0c4abb55e..ed65fc657 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -13,7 +13,6 @@ # limitations under the License. from typing import Iterator, Optional -import jax import jax.numpy as jnp import optax import pytest @@ -76,7 +75,7 @@ def test_genot_linear_unconditional( result_forward = genot.transport( source_lin, condition=condition, forward=True ) - assert isinstance(result_forward, jax.Array) + assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @@ -120,7 +119,7 @@ def test_genot_quad_unconditional( result_forward = genot.transport( source_quad, condition=condition, forward=True ) - assert isinstance(result_forward, jax.Array) + assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @@ -166,7 +165,7 @@ def test_genot_fused_unconditional( condition=condition, forward=True ) - assert isinstance(result_forward, jax.Array) + assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @@ -217,7 +216,7 @@ def test_genot_linear_conditional( result_forward = genot.transport( source_lin, condition=condition, forward=True ) - assert isinstance(result_forward, jax.Array) + assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @@ -263,7 +262,7 @@ def test_genot_quad_conditional( result_forward = genot.transport( source_quad, condition=condition, forward=True ) - assert isinstance(result_forward, jax.Array) + assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @@ -311,7 +310,7 @@ def test_genot_fused_conditional( condition=condition, forward=True ) - assert isinstance(result_forward, jax.Array) + assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @pytest.mark.parametrize("conditional", [False, True]) @@ -365,9 +364,9 @@ def test_genot_linear_learn_rescaling( genot(data_loader, data_loader) result_eta = genot.evaluate_eta(source_lin, condition=condition) - assert isinstance(result_eta, jax.Array) + assert isinstance(result_eta, jnp.ndarray) assert jnp.sum(jnp.isnan(result_eta)) == 0 result_xi = genot.evaluate_xi(target_lin, condition=condition) - assert isinstance(result_xi, jax.Array) + assert isinstance(result_xi, jnp.ndarray) assert jnp.sum(jnp.isnan(result_xi)) == 0 diff --git a/tests/neural/icnn_test.py b/tests/neural/icnn_test.py index fd6c07f2b..4d760557f 100644 --- a/tests/neural/icnn_test.py +++ b/tests/neural/icnn_test.py @@ -22,7 +22,7 @@ @pytest.mark.fast() class TestICNN: - def test_icnn_convexity(self, rng: jax.Array): + def test_icnn_convexity(self, rng: jnp.ndarray): """Tests convexity of ICNN.""" n_samples, n_features = 10, 2 dim_hidden = (64, 64) @@ -48,7 +48,7 @@ def test_icnn_convexity(self, rng: jax.Array): np.testing.assert_array_equal(jnp.asarray(out) >= 0, True) - def test_icnn_hessian(self, rng: jax.Array): + def test_icnn_hessian(self, rng: jnp.ndarray): """Tests if Hessian of ICNN is positive-semidefinite.""" # define icnn model diff --git a/tests/neural/losses_test.py b/tests/neural/losses_test.py index 8cff7bd64..6ad2c0b3e 100644 --- a/tests/neural/losses_test.py +++ b/tests/neural/losses_test.py @@ -13,6 +13,7 @@ # limitations under the License. import jax +import jax.numpy as jnp import numpy as np import pytest @@ -27,7 +28,7 @@ class TestMongeGap: @pytest.mark.parametrize("n_samples", [5, 25]) @pytest.mark.parametrize("n_features", [10, 50, 100]) def test_monge_gap_non_negativity( - self, rng: jax.Array, n_samples: int, n_features: int + self, rng: jnp.ndarray, n_samples: int, n_features: int ): # generate data @@ -53,7 +54,7 @@ def test_monge_gap_non_negativity( np.testing.assert_array_equal(monge_gap_value, monge_gap_from_samples_value) - def test_monge_gap_jit(self, rng: jax.Array): + def test_monge_gap_jit(self, rng: jnp.ndarray): n_samples, n_features = 31, 17 # generate data rng1, rng2 = jax.random.split(rng, 2) @@ -85,7 +86,7 @@ def test_monge_gap_jit(self, rng: jax.Array): ], ) def test_monge_gap_from_samples_different_cost( - self, rng: jax.Array, cost_fn: costs.CostFn, n_samples: int, + self, rng: jnp.ndarray, cost_fn: costs.CostFn, n_samples: int, n_features: int ): """Test that the Monge gap for different costs. diff --git a/tests/neural/map_estimator_test.py b/tests/neural/map_estimator_test.py index 0454db751..7c506aa38 100644 --- a/tests/neural/map_estimator_test.py +++ b/tests/neural/map_estimator_test.py @@ -13,7 +13,7 @@ # limitations under the License. from typing import Optional -import jax +import jax.numpy as jnp import pytest from ott import datasets @@ -34,8 +34,8 @@ def test_map_estimator_convergence(self): # define the fitting loss and the regularizer def fitting_loss( - samples: jax.Array, - mapped_samples: jax.Array, + samples: jnp.ndarray, + mapped_samples: jnp.ndarray, ) -> Optional[float]: r"""Sinkhorn divergence fitting loss.""" div = sinkhorn_divergence.sinkhorn_divergence( diff --git a/tests/neural/meta_initializer_test.py b/tests/neural/meta_initializer_test.py index 25a88907e..f978e8206 100644 --- a/tests/neural/meta_initializer_test.py +++ b/tests/neural/meta_initializer_test.py @@ -31,7 +31,7 @@ class MetaMLP(nn.Module): num_hidden_layers: int = 3 @nn.compact - def __call__(self, a: jax.Array, b: jax.Array) -> jax.Array: + def __call__(self, a: jnp.ndarray, b: jnp.ndarray) -> jnp.ndarray: dtype = a.dtype z = jnp.concatenate((a, b)) for _ in range(self.num_hidden_layers): @@ -40,7 +40,7 @@ def __call__(self, a: jax.Array, b: jax.Array) -> jax.Array: def create_ot_problem( - rng: jax.Array, + rng: jnp.ndarray, n: int, m: int, d: int, @@ -65,12 +65,12 @@ def create_ot_problem( def run_sinkhorn( - x: jax.Array, - y: jax.Array, + x: jnp.ndarray, + y: jnp.ndarray, *, initializer: linear_init.SinkhornInitializer, - a: Optional[jax.Array] = None, - b: Optional[jax.Array] = None, + a: Optional[jnp.ndarray] = None, + b: Optional[jnp.ndarray] = None, epsilon: float = 1e-2, lse_mode: bool = True, ) -> sinkhorn.SinkhornOutput: @@ -86,7 +86,7 @@ def run_sinkhorn( class TestMetaInitializer: @pytest.mark.parametrize("lse_mode", [True, False]) - def test_meta_initializer(self, rng: jax.Array, lse_mode: bool): + def test_meta_initializer(self, rng: jnp.ndarray, lse_mode: bool): """Tests Meta initializer""" n, m, d = 20, 20, 2 epsilon = 1e-2 diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 9a75cb3fb..4346b6be8 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -13,7 +13,6 @@ # limitations under the License. from typing import Iterator, Type -import jax import jax.numpy as jnp import optax import pytest @@ -61,12 +60,18 @@ def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): fm(data_loader_gaussian, data_loader_gaussian) batch = next(data_loader_gaussian) - result_forward = fm.transport(batch["source_lin"], condition=batch["source_conditions"], forward=True) - assert isinstance(result_forward, jax.Array) + result_forward = fm.transport( + batch["source_lin"], condition=batch["source_conditions"], forward=True + ) + assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 - result_backward = fm.transport(batch["target_lin"], condition=batch["target_conditions"], forward=False) - assert isinstance(result_backward, jax.Array) + result_backward = fm.transport( + batch["target_lin"], + condition=batch["target_conditions"], + forward=False + ) + assert isinstance(result_backward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_backward)) == 0 @pytest.mark.parametrize( @@ -103,12 +108,18 @@ def test_flow_matching_with_conditions( ) batch = next(data_loader_gaussian_with_conditions) - result_forward = fm.transport(batch["source_lin"], condition=batch["source_conditions"], forward=True) - assert isinstance(result_forward, jax.Array) + result_forward = fm.transport( + batch["source_lin"], condition=batch["source_conditions"], forward=True + ) + assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 - result_backward = fm.transport(batch["target_lin"], condition=batch["target_conditions"], forward=False) - assert isinstance(result_backward, jax.Array) + result_backward = fm.transport( + batch["target_lin"], + condition=batch["target_conditions"], + forward=False + ) + assert isinstance(result_backward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_backward)) == 0 @pytest.mark.parametrize( @@ -142,12 +153,18 @@ def test_flow_matching_conditional( fm(data_loader_gaussian_conditional, data_loader_gaussian_conditional) batch = next(data_loader_gaussian_conditional) - result_forward = fm.transport(batch["source_lin"], condition=batch["source_conditions"], forward=True) - assert isinstance(result_forward, jax.Array) + result_forward = fm.transport( + batch["source_lin"], condition=batch["source_conditions"], forward=True + ) + assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 - result_backward = fm.transport(batch["target_lin"], condition=batch["target_conditions"], forward=False) - assert isinstance(result_backward, jax.Array) + result_backward = fm.transport( + batch["target_lin"], + condition=batch["target_conditions"], + forward=False + ) + assert isinstance(result_backward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_backward)) == 0 @pytest.mark.parametrize("conditional", [False, True]) @@ -190,10 +207,14 @@ def test_flow_matching_learn_rescaling( ) fm(data_loader, data_loader) - result_eta = fm.evaluate_eta(batch["source_lin"], condition=batch["source_conditions"]) - assert isinstance(result_eta, jax.Array) + result_eta = fm.evaluate_eta( + batch["source_lin"], condition=batch["source_conditions"] + ) + assert isinstance(result_eta, jnp.ndarray) assert jnp.sum(jnp.isnan(result_eta)) == 0 - result_xi = fm.evaluate_xi(batch["target_lin"], condition=batch["target_conditions"]) - assert isinstance(result_xi, jax.Array) + result_xi = fm.evaluate_xi( + batch["target_lin"], condition=batch["target_conditions"] + ) + assert isinstance(result_xi, jnp.ndarray) assert jnp.sum(jnp.isnan(result_xi)) == 0 diff --git a/tests/problems/linear/potentials_test.py b/tests/problems/linear/potentials_test.py index c9fa9cf17..dd5d4bbd6 100644 --- a/tests/problems/linear/potentials_test.py +++ b/tests/problems/linear/potentials_test.py @@ -36,7 +36,7 @@ def test_device_put(self): class TestEntropicPotentials: - def test_device_put(self, rng: jax.Array): + def test_device_put(self, rng: jnp.ndarray): n = 10 device = jax.devices()[0] rngs = jax.random.split(rng, 5) @@ -53,7 +53,7 @@ def test_device_put(self, rng: jax.Array): _ = jax.device_put(pot, device) @pytest.mark.fast.with_args(eps=[5e-2, 1e-1], only_fast=0) - def test_entropic_potentials_dist(self, rng: jax.Array, eps: float): + def test_entropic_potentials_dist(self, rng: jnp.ndarray, eps: float): n1, n2, d = 64, 96, 2 rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) @@ -91,7 +91,7 @@ def test_entropic_potentials_dist(self, rng: jax.Array, eps: float): @pytest.mark.fast.with_args(forward=[False, True], only_fast=0) def test_entropic_potentials_displacement( - self, rng: jax.Array, forward: bool, monkeypatch + self, rng: jnp.ndarray, forward: bool, monkeypatch ): """Tests entropic displacements, as well as their plots.""" n1, n2, d = 96, 128, 2 @@ -134,7 +134,7 @@ def test_entropic_potentials_displacement( p=[1.3, 2.2, 1.0], forward=[False, True], only_fast=0 ) def test_entropic_potentials_sqpnorm( - self, rng: jax.Array, p: float, forward: bool + self, rng: jnp.ndarray, p: float, forward: bool ): epsilon = None cost_fn = costs.SqPNorm(p=p) @@ -174,7 +174,7 @@ def test_entropic_potentials_sqpnorm( p=[1.45, 2.2, 1.0], forward=[False, True], only_fast=0 ) def test_entropic_potentials_pnorm( - self, rng: jax.Array, p: float, forward: bool + self, rng: jnp.ndarray, p: float, forward: bool ): epsilon = None cost_fn = costs.PNormP(p=p) @@ -216,7 +216,7 @@ def test_entropic_potentials_pnorm( assert div < .1 * div_0 @pytest.mark.parametrize("jit", [False, True]) - def test_distance_differentiability(self, rng: jax.Array, jit: bool): + def test_distance_differentiability(self, rng: jnp.ndarray, jit: bool): rng1, rng2, rng3 = jax.random.split(rng, 3) n, m, d = 18, 36, 5 @@ -238,7 +238,7 @@ def test_distance_differentiability(self, rng: jax.Array, jit: bool): np.testing.assert_allclose(actual, expected, rtol=1e-4, atol=1e-4) @pytest.mark.parametrize("eps", [None, 1e-1, 1e1, 1e2, 1e3]) - def test_potentials_sinkhorn_divergence(self, rng: jax.Array, eps: float): + def test_potentials_sinkhorn_divergence(self, rng: jnp.ndarray, eps: float): rng1, rng2, rng3 = jax.random.split(rng, 3) n, m, d = 32, 36, 4 fwd = True diff --git a/tests/solvers/linear/continuous_barycenter_test.py b/tests/solvers/linear/continuous_barycenter_test.py index 5c7fabd67..4989cc1db 100644 --- a/tests/solvers/linear/continuous_barycenter_test.py +++ b/tests/solvers/linear/continuous_barycenter_test.py @@ -27,7 +27,7 @@ means_and_covs_to_x = jax.vmap(costs.mean_and_cov_to_x, in_axes=[0, 0, None]) -def is_positive_semidefinite(c: jax.Array) -> bool: +def is_positive_semidefinite(c: jnp.ndarray) -> bool: # GPU friendly, eigvals not implemented for non-symmetric matrices w = jnp.linalg.eigvalsh((c + c.T) / 2.0) return jnp.all(w >= 0) @@ -50,7 +50,7 @@ class TestBarycenter: }, ) def test_euclidean_barycenter( - self, rng: jax.Array, rank: int, epsilon: float, init_random: bool, + self, rng: jnp.ndarray, rank: int, epsilon: float, init_random: bool, jit: bool ): rngs = jax.random.split(rng, 20) @@ -115,12 +115,12 @@ def test_euclidean_barycenter( assert jnp.all(out.x.ravel() > .7) @pytest.mark.parametrize("segment_before", [False, True]) - def test_barycenter_jit(self, rng: jax.Array, segment_before: bool): + def test_barycenter_jit(self, rng: jnp.ndarray, segment_before: bool): @functools.partial(jax.jit, static_argnums=(2, 3)) def barycenter( - y: jax.Array, - b: jax.Array, + y: jnp.ndarray, + b: jnp.ndarray, segment_before: bool, num_per_segment: Tuple[int, ...], ) -> cb.FreeBarycenterState: @@ -170,7 +170,7 @@ def barycenter( @pytest.mark.fast() def test_bures_barycenter( self, - rng: jax.Array, + rng: jnp.ndarray, ): lse_mode = True, epsilon = 1e-1 @@ -256,7 +256,7 @@ def test_bures_barycenter( @pytest.mark.fast() def test_bures_barycenter_different_number_of_components( self, - rng: jax.Array, + rng: jnp.ndarray, ): alpha = 5. epsilon = 0.01 diff --git a/tests/solvers/linear/sinkhorn_diff_test.py b/tests/solvers/linear/sinkhorn_diff_test.py index a608c0d71..944534e14 100644 --- a/tests/solvers/linear/sinkhorn_diff_test.py +++ b/tests/solvers/linear/sinkhorn_diff_test.py @@ -29,7 +29,7 @@ class TestSinkhornImplicit: """Check implicit and autodiff match for Sinkhorn.""" @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self.dim = 3 self.n = 38 self.m = 73 @@ -49,7 +49,7 @@ def test_implicit_differentiation_versus_autodiff( ): epsilon = 0.05 - def loss_g(a: jax.Array, x: jax.Array, implicit: bool = True) -> float: + def loss_g(a: jnp.ndarray, x: jnp.ndarray, implicit: bool = True) -> float: implicit_diff = implicit_lib.ImplicitDiff() if implicit else None geom = geometry.Geometry( cost_matrix=jnp.sum(x ** 2, axis=1)[:, jnp.newaxis] + @@ -65,7 +65,9 @@ def loss_g(a: jax.Array, x: jax.Array, implicit: bool = True) -> float: ) return solver(prob).reg_ot_cost - def loss_pcg(a: jax.Array, x: jax.Array, implicit: bool = True) -> float: + def loss_pcg( + a: jnp.ndarray, x: jnp.ndarray, implicit: bool = True + ) -> float: implicit_diff = implicit_lib.ImplicitDiff() if implicit else None geom = pointcloud.PointCloud(x, self.y, epsilon=epsilon) prob = linear_problem.LinearProblem( @@ -135,7 +137,7 @@ class TestSinkhornJacobian: only_fast=0, ) def test_autograd_sinkhorn( - self, rng: jax.Array, lse_mode: bool, shape_data: Tuple[int, int] + self, rng: jnp.ndarray, lse_mode: bool, shape_data: Tuple[int, int] ): """Test gradient w.r.t. probability weights.""" n, m = shape_data @@ -152,7 +154,7 @@ def test_autograd_sinkhorn( a = a / jnp.sum(a) b = b / jnp.sum(b) - def reg_ot(a: jax.Array, b: jax.Array) -> float: + def reg_ot(a: jnp.ndarray, b: jnp.ndarray) -> float: geom = pointcloud.PointCloud(x, y, epsilon=1e-1) prob = linear_problem.LinearProblem(geom, a=a, b=b) solver = sinkhorn.Sinkhorn(lse_mode=lse_mode) @@ -178,7 +180,7 @@ def reg_ot(a: jax.Array, b: jax.Array) -> float: @pytest.mark.parametrize(("lse_mode", "shape_data"), [(True, (7, 9)), (False, (11, 5))]) def test_gradient_sinkhorn_geometry( - self, rng: jax.Array, lse_mode: bool, shape_data: Tuple[int, int] + self, rng: jnp.ndarray, lse_mode: bool, shape_data: Tuple[int, int] ): """Test gradient w.r.t. cost matrix.""" n, m = shape_data @@ -188,7 +190,7 @@ def test_gradient_sinkhorn_geometry( delta = delta / jnp.sqrt(jnp.vdot(delta, delta)) eps = 1e-3 # perturbation magnitude - def loss_fn(cm: jax.Array): + def loss_fn(cm: jnp.ndarray): a = jnp.ones(cm.shape[0]) / cm.shape[0] b = jnp.ones(cm.shape[1]) / cm.shape[1] geom = geometry.Geometry(cm, epsilon=0.5) @@ -241,7 +243,7 @@ def loss_fn(cm: jax.Array): only_fast=[0, 1], ) def test_gradient_sinkhorn_euclidean( - self, rng: jax.Array, lse_mode: bool, implicit: bool, min_iter: int, + self, rng: jnp.ndarray, lse_mode: bool, implicit: bool, min_iter: int, max_iter: int, epsilon: float, cost_fn: costs.CostFn ): """Test gradient w.r.t. locations x of reg-ot-cost.""" @@ -262,8 +264,8 @@ def test_gradient_sinkhorn_euclidean( # Adding some near-zero distances to test proper handling with p_norm=1. y = y.at[0].set(x[0, :] + 1e-3) - def loss_fn(x: jax.Array, - y: jax.Array) -> Tuple[float, sinkhorn.SinkhornOutput]: + def loss_fn(x: jnp.ndarray, + y: jnp.ndarray) -> Tuple[float, sinkhorn.SinkhornOutput]: implicit_diff = implicit_lib.ImplicitDiff() if implicit else None geom = pointcloud.PointCloud(x, y, epsilon=epsilon, cost_fn=cost_fn) prob = linear_problem.LinearProblem(geom, a, b) @@ -315,10 +317,10 @@ def loss_fn(x: jax.Array, ) np.testing.assert_array_equal(jnp.isnan(custom_grad), False) - def test_autoepsilon_differentiability(self, rng: jax.Array): + def test_autoepsilon_differentiability(self, rng: jnp.ndarray): cost = jax.random.uniform(rng, (15, 17)) - def reg_ot_cost(c: jax.Array) -> float: + def reg_ot_cost(c: jnp.ndarray) -> float: geom = geometry.Geometry(c, epsilon=None) # auto epsilon prob = linear_problem.LinearProblem(geom) return sinkhorn.Sinkhorn()(prob).reg_ot_cost @@ -327,9 +329,9 @@ def reg_ot_cost(c: jax.Array) -> float: np.testing.assert_array_equal(jnp.isnan(gradient), False) @pytest.mark.fast() - def test_differentiability_with_jit(self, rng: jax.Array): + def test_differentiability_with_jit(self, rng: jnp.ndarray): - def reg_ot_cost(c: jax.Array) -> float: + def reg_ot_cost(c: jnp.ndarray) -> float: geom = geometry.Geometry(c, epsilon=1e-2) prob = linear_problem.LinearProblem(geom) return sinkhorn.Sinkhorn()(prob).reg_ot_cost @@ -345,7 +347,7 @@ def reg_ot_cost(c: jax.Array) -> float: only_fast=0 ) def test_apply_transport_jacobian( - self, rng: jax.Array, lse_mode: bool, tau_a: float, tau_b: float, + self, rng: jnp.ndarray, lse_mode: bool, tau_a: float, tau_b: float, arg: int, axis: int ): """Tests Jacobian of application of OT to vector, w.r.t. @@ -383,7 +385,7 @@ def test_apply_transport_jacobian( # general rule, even more so when using backprop. epsilon = 0.01 if lse_mode else 0.1 - def apply_ot(a: jax.Array, x: jax.Array, implicit: bool) -> jax.Array: + def apply_ot(a: jnp.ndarray, x: jnp.ndarray, implicit: bool) -> jnp.ndarray: geom = pointcloud.PointCloud(x, y, epsilon=epsilon) prob = linear_problem.LinearProblem(geom, a, b, tau_a=tau_a, tau_b=tau_b) @@ -457,7 +459,7 @@ def apply_ot(a: jax.Array, x: jax.Array, implicit: bool) -> jax.Array: only_fast=0, ) def test_potential_jacobian_sinkhorn( - self, rng: jax.Array, lse_mode: bool, tau_a: float, tau_b: float, + self, rng: jnp.ndarray, lse_mode: bool, tau_a: float, tau_b: float, shape: Tuple[int, int], arg: int ): """Test Jacobian of optimal potential w.r.t. weights and locations.""" @@ -486,7 +488,7 @@ def test_potential_jacobian_sinkhorn( # with small epsilon when differentiating. epsilon = 0.01 if lse_mode else 0.1 - def loss_from_potential(a: jax.Array, x: jax.Array, implicit: bool): + def loss_from_potential(a: jnp.ndarray, x: jnp.ndarray, implicit: bool): geom = pointcloud.PointCloud(x, y, epsilon=epsilon) prob = linear_problem.LinearProblem(geom, a, b, tau_a=tau_a, tau_b=tau_b) @@ -539,7 +541,7 @@ class TestSinkhornGradGrid: @pytest.mark.parametrize("lse_mode", [False, True]) def test_diff_sinkhorn_x_grid_x_perturbation( - self, rng: jax.Array, lse_mode: bool + self, rng: jnp.ndarray, lse_mode: bool ): """Test gradient w.r.t. probability weights.""" eps = 1e-3 # perturbation magnitude @@ -554,7 +556,7 @@ def test_diff_sinkhorn_x_grid_x_perturbation( a = a.ravel() / jnp.sum(a) b = b.ravel() / jnp.sum(b) - def reg_ot(x: List[jax.Array]) -> float: + def reg_ot(x: List[jnp.ndarray]) -> float: geom = grid.Grid(x=x, epsilon=1.0) prob = linear_problem.LinearProblem(geom, a=a, b=b) solver = sinkhorn.Sinkhorn(threshold=1e-1, lse_mode=lse_mode) @@ -584,7 +586,7 @@ def reg_ot(x: List[jax.Array]) -> float: @pytest.mark.parametrize("lse_mode", [False, True]) def test_diff_sinkhorn_x_grid_weights_perturbation( - self, rng: jax.Array, lse_mode: bool + self, rng: jnp.ndarray, lse_mode: bool ): """Test gradient w.r.t. probability weights.""" eps = 1e-4 # perturbation magnitude @@ -603,7 +605,7 @@ def test_diff_sinkhorn_x_grid_weights_perturbation( b = b.ravel() / jnp.sum(b) geom = grid.Grid(x=x, epsilon=1) - def reg_ot(a: jax.Array, b: jax.Array) -> float: + def reg_ot(a: jnp.ndarray, b: jnp.ndarray) -> float: prob = linear_problem.LinearProblem(geom, a, b) solver = sinkhorn.Sinkhorn(threshold=1e-3, lse_mode=lse_mode) return solver(prob).reg_ot_cost @@ -635,7 +637,7 @@ class TestSinkhornJacobianPreconditioning: only_fast=[0, -1], ) def test_potential_jacobian_sinkhorn_precond( - self, rng: jax.Array, lse_mode: bool, tau_a: float, tau_b: float, + self, rng: jnp.ndarray, lse_mode: bool, tau_a: float, tau_b: float, shape: Tuple[int, int], arg: int ): """Test Jacobian of optimal potential works across 2 precond_fun.""" @@ -665,9 +667,9 @@ def test_potential_jacobian_sinkhorn_precond( epsilon = 0.05 if lse_mode else 0.1 def loss_from_potential( - a: jax.Array, - x: jax.Array, - precondition_fun: Optional[Callable[[jax.Array], jax.Array]] = None, + a: jnp.ndarray, + x: jnp.ndarray, + precondition_fun: Optional[Callable[[jnp.ndarray], jnp.ndarray]] = None, symmetric: bool = False ) -> float: geom = pointcloud.PointCloud(x, y, epsilon=epsilon) @@ -738,7 +740,7 @@ class TestSinkhornHessian: only_fast=-1 ) def test_hessian_sinkhorn( - self, rng: jax.Array, lse_mode: bool, tau_a: float, tau_b: float, + self, rng: jnp.ndarray, lse_mode: bool, tau_a: float, tau_b: float, arg: int, lineax_ridge: float ): """Test hessian w.r.t. weights and locations.""" @@ -769,7 +771,7 @@ def test_hessian_sinkhorn( imp_dif = implicit_lib.ImplicitDiff(solver_kwargs=solver_kwargs) - def loss(a: jax.Array, x: jax.Array, implicit: bool = True): + def loss(a: jnp.ndarray, x: jnp.ndarray, implicit: bool = True): geom = pointcloud.PointCloud(x, y, epsilon=epsilon) prob = linear_problem.LinearProblem(geom, a, b, tau_a, tau_b) implicit_diff = imp_dif if implicit else None diff --git a/tests/solvers/linear/sinkhorn_grid_test.py b/tests/solvers/linear/sinkhorn_grid_test.py index b2aa4da3e..dd22f63b7 100644 --- a/tests/solvers/linear/sinkhorn_grid_test.py +++ b/tests/solvers/linear/sinkhorn_grid_test.py @@ -25,7 +25,7 @@ class TestSinkhornGrid: @pytest.mark.parametrize("lse_mode", [False, True]) - def test_separable_grid(self, rng: jax.Array, lse_mode: bool): + def test_separable_grid(self, rng: jnp.ndarray, lse_mode: bool): """Two histograms in a grid of size 5 x 6 x 7 in the hypercube^3.""" grid_size = (5, 6, 7) rngs = jax.random.split(rng, 2) @@ -46,7 +46,7 @@ def test_separable_grid(self, rng: jax.Array, lse_mode: bool): assert threshold > err @pytest.mark.fast.with_args("lse_mode", [False, True], only_fast=0) - def test_grid_vs_euclidean(self, rng: jax.Array, lse_mode: bool): + def test_grid_vs_euclidean(self, rng: jnp.ndarray, lse_mode: bool): grid_size = (5, 6, 7) rngs = jax.random.split(rng, 2) a = jax.random.uniform(rngs[0], grid_size) @@ -69,7 +69,7 @@ def test_grid_vs_euclidean(self, rng: jax.Array, lse_mode: bool): ) @pytest.mark.fast.with_args("lse_mode", [False, True], only_fast=1) - def test_apply_transport_grid(self, rng: jax.Array, lse_mode: bool): + def test_apply_transport_grid(self, rng: jnp.ndarray, lse_mode: bool): grid_size = (5, 6, 7) rngs = jax.random.split(rng, 4) a = jax.random.uniform(rngs[0], grid_size) @@ -118,7 +118,7 @@ def test_apply_transport_grid(self, rng: jax.Array, lse_mode: bool): np.testing.assert_array_equal(jnp.isnan(mat_transport_t_vec_a), False) @pytest.mark.fast() - def test_apply_cost(self, rng: jax.Array): + def test_apply_cost(self, rng: jnp.ndarray): grid_size = (5, 6, 7) geom_grid = grid.Grid(grid_size=grid_size, epsilon=0.1) diff --git a/tests/solvers/linear/sinkhorn_lr_test.py b/tests/solvers/linear/sinkhorn_lr_test.py index 9b360bdf0..90b149ea8 100644 --- a/tests/solvers/linear/sinkhorn_lr_test.py +++ b/tests/solvers/linear/sinkhorn_lr_test.py @@ -26,7 +26,7 @@ class TestLRSinkhorn: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self.dim = 4 self.n = 23 self.m = 27 diff --git a/tests/solvers/linear/sinkhorn_misc_test.py b/tests/solvers/linear/sinkhorn_misc_test.py index e88ff4d0c..e97a34228 100644 --- a/tests/solvers/linear/sinkhorn_misc_test.py +++ b/tests/solvers/linear/sinkhorn_misc_test.py @@ -36,7 +36,7 @@ class TestSinkhornAnderson: only_fast=0, ) def test_anderson( - self, rng: jax.Array, lse_mode: bool, tau_a: float, tau_b: float + self, rng: jnp.ndarray, lse_mode: bool, tau_a: float, tau_b: float ): """Test efficiency of Anderson acceleration. @@ -128,7 +128,7 @@ def initialize(self): @pytest.mark.parametrize(("unbalanced", "thresh"), [(False, 1e-3), (True, 1e-4)]) def test_bures_point_cloud( - self, rng: jax.Array, lse_mode: bool, unbalanced: bool, thresh: float + self, rng: jnp.ndarray, lse_mode: bool, unbalanced: bool, thresh: float ): """Two point clouds of Gaussians, tested with various parameters.""" if unbalanced: @@ -169,7 +169,7 @@ def test_regularized_unbalanced_bures_cost(self): class TestSinkhornOnline: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self.dim = 3 self.n = 100 self.m = 42 @@ -234,7 +234,7 @@ def callback(epsilon: float, batch_size: int) -> sinkhorn.SinkhornOutput: class TestSinkhornUnbalanced: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self.dim = 4 self.n = 17 self.m = 23 @@ -315,7 +315,7 @@ class TestSinkhornJIT: """Check jitted and non jit match for Sinkhorn, and that everything jits.""" @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self.dim = 3 self.n = 10 self.m = 11 @@ -346,10 +346,12 @@ def assert_output_close( ) -> None: """Assert SinkhornOutputs are close.""" x = tuple( - a for a in x if (a is not None and (isinstance(a, (jax.Array, int)))) + a for a in x + if (a is not None and (isinstance(a, (jnp.ndarray, int)))) ) y = tuple( - a for a in y if (a is not None and (isinstance(a, (jax.Array, int)))) + a for a in y + if (a is not None and (isinstance(a, (jnp.ndarray, int)))) ) return chex.assert_trees_all_close(x, y, atol=1e-6, rtol=0) @@ -362,7 +364,7 @@ def assert_output_close( def test_jit_vs_non_jit_bwd(self, implicit: bool): @jax.value_and_grad - def val_grad(a: jax.Array, x: jax.Array) -> float: + def val_grad(a: jnp.ndarray, x: jnp.ndarray) -> float: implicit_diff = implicit_lib.ImplicitDiff() if implicit else None geom = geometry.Geometry( cost_matrix=( diff --git a/tests/solvers/linear/sinkhorn_test.py b/tests/solvers/linear/sinkhorn_test.py index ce7f9919a..c7475c4f3 100644 --- a/tests/solvers/linear/sinkhorn_test.py +++ b/tests/solvers/linear/sinkhorn_test.py @@ -30,7 +30,7 @@ class TestSinkhorn: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self.rng = rng self.dim = 4 self.n = 17 diff --git a/tests/solvers/linear/univariate_test.py b/tests/solvers/linear/univariate_test.py index 1a5529167..47a34f7ce 100644 --- a/tests/solvers/linear/univariate_test.py +++ b/tests/solvers/linear/univariate_test.py @@ -25,7 +25,7 @@ class TestUnivariate: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self.rng = rng self.n = 17 self.m = 29 @@ -86,7 +86,7 @@ def test_cdf_distance_and_scipy(self): @pytest.mark.fast() def test_cdf_grad( self, - rng: jax.Array, + rng: jnp.ndarray, ): # TODO: Once a `check_grad` function is implemented, replace the code # blocks before with `check_grad`'s. diff --git a/tests/solvers/quadratic/fgw_test.py b/tests/solvers/quadratic/fgw_test.py index 508fedcb2..10361d088 100644 --- a/tests/solvers/quadratic/fgw_test.py +++ b/tests/solvers/quadratic/fgw_test.py @@ -29,7 +29,7 @@ class TestFusedGromovWasserstein: # TODO(michalk8): refactor me in the future @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): d_x = 2 d_y = 3 d_xy = 4 @@ -56,7 +56,7 @@ def test_gradient_marginals_fgw_solver(self, jit: bool): geom_y = pointcloud.PointCloud(self.y) geom_xy = pointcloud.PointCloud(self.x_2, self.y_2) - def reg_gw(a: jax.Array, b: jax.Array, implicit: bool): + def reg_gw(a: jnp.ndarray, b: jnp.ndarray, implicit: bool): prob = quadratic_problem.QuadraticProblem( geom_x, geom_y, geom_xy, fused_penalty=self.fused_penalty, a=a, b=b ) @@ -101,9 +101,9 @@ def test_gradient_fgw_solver_geometry(self, lse_mode: bool, is_cost: bool): """Test gradient w.r.t. the geometries.""" def reg_gw( - x: jax.Array, y: jax.Array, xy: Union[jax.Array, Tuple[jax.Array, - jax.Array]], - fused_penalty: float, a: jax.Array, b: jax.Array, implicit: bool + x: jnp.ndarray, y: jnp.ndarray, + xy: Union[jnp.ndarray, Tuple[jnp.ndarray, jnp.ndarray]], + fused_penalty: float, a: jnp.ndarray, b: jnp.ndarray, implicit: bool ): if is_cost: geom_x = geometry.Geometry(cost_matrix=x) @@ -182,8 +182,8 @@ def test_gradient_fgw_solver_penalty(self): lse_mode = True def reg_gw( - cx: jax.Array, cy: jax.Array, cxy: jax.Array, fused_penalty: float, - a: jax.Array, b: jax.Array, implicit: bool + cx: jnp.ndarray, cy: jnp.ndarray, cxy: jnp.ndarray, + fused_penalty: float, a: jnp.ndarray, b: jnp.ndarray, implicit: bool ) -> float: geom_x = geometry.Geometry(cost_matrix=cx) geom_y = geometry.Geometry(cost_matrix=cy) @@ -216,7 +216,7 @@ def reg_gw( @pytest.mark.limit_memory("200 MB") @pytest.mark.parametrize("jit", [False, True]) - def test_fgw_lr_memory(self, rng: jax.Array, jit: bool): + def test_fgw_lr_memory(self, rng: jnp.ndarray, jit: bool): rngs = jax.random.split(rng, 4) n, m, d1, d2 = 5_000, 2_500, 1, 2 x = jax.random.uniform(rngs[0], (n, d1)) @@ -243,7 +243,7 @@ def test_fgw_lr_memory(self, rng: jax.Array, jit: bool): @pytest.mark.parametrize("cost_rank", [4, (2, 3, 4)]) def test_fgw_lr_generic_cost_matrix( - self, rng: jax.Array, cost_rank: Union[int, Tuple[int, int, int]] + self, rng: jnp.ndarray, cost_rank: Union[int, Tuple[int, int, int]] ): n, m = 20, 30 rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) diff --git a/tests/solvers/quadratic/gw_barycenter_test.py b/tests/solvers/quadratic/gw_barycenter_test.py index d5dadd691..a157f27e5 100644 --- a/tests/solvers/quadratic/gw_barycenter_test.py +++ b/tests/solvers/quadratic/gw_barycenter_test.py @@ -31,7 +31,7 @@ class TestGWBarycenter: def random_pc( n: int, d: int, - rng: jax.Array, + rng: jnp.ndarray, m: Optional[int] = None, **kwargs: Any ) -> pointcloud.PointCloud: @@ -42,9 +42,9 @@ def random_pc( @staticmethod def pad_cost_matrices( - costs: Sequence[jax.Array], + costs: Sequence[jnp.ndarray], shape: Optional[Tuple[int, int]] = None - ) -> Tuple[jax.Array, jax.Array]: + ) -> Tuple[jnp.ndarray, jnp.ndarray]: if shape is None: shape = jnp.asarray([arr.shape for arr in costs]).max() shape = (shape, shape) @@ -65,7 +65,7 @@ def pad_cost_matrices( [("sqeucl", 17, None)] # , ("kl", 22, 1e-2)] ) def test_gw_barycenter( - self, rng: jax.Array, gw_loss: str, bar_size: int, + self, rng: jnp.ndarray, gw_loss: str, bar_size: int, epsilon: Optional[float] ): tol = 1e-3 if gw_loss == "sqeucl" else 1e-1 @@ -126,14 +126,14 @@ def test_gw_barycenter( ) def test_fgw_barycenter( self, - rng: jax.Array, + rng: jnp.ndarray, jit: bool, fused_penalty: float, scale_cost: str, ): def barycenter( - y: jnp.ndim, y_fused: jax.Array, num_per_segment: Tuple[int, ...] + y: jnp.ndim, y_fused: jnp.ndarray, num_per_segment: Tuple[int, ...] ) -> gwb_solver.GWBarycenterState: prob = gwb.GWBarycenterProblem( y=y, diff --git a/tests/solvers/quadratic/gw_test.py b/tests/solvers/quadratic/gw_test.py index e7d0ff106..e7ef7b558 100644 --- a/tests/solvers/quadratic/gw_test.py +++ b/tests/solvers/quadratic/gw_test.py @@ -31,7 +31,7 @@ class TestQuadraticProblem: @pytest.mark.parametrize("as_pc", [False, True]) @pytest.mark.parametrize("rank", [-1, 5, (1, 2, 3), (2, 3, 5)]) def test_quad_to_low_rank( - self, rng: jax.Array, as_pc: bool, rank: Union[int, Tuple[int, ...]] + self, rng: jnp.ndarray, as_pc: bool, rank: Union[int, Tuple[int, ...]] ): n, m, d1, d2, d = 100, 120, 4, 6, 10 rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) @@ -87,7 +87,7 @@ def test_quad_to_low_rank( assert lr_prob._is_low_rank_convertible assert lr_prob.to_low_rank() is lr_prob - def test_gw_implicit_conversion_mixed_input(self, rng: jax.Array): + def test_gw_implicit_conversion_mixed_input(self, rng: jnp.ndarray): n, m, d1, d2 = 13, 77, 3, 4 rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, (n, d1)) @@ -107,7 +107,7 @@ def test_gw_implicit_conversion_mixed_input(self, rng: jax.Array): class TestGromovWasserstein: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): d_x = 2 d_y = 3 self.n, self.m = 6, 7 @@ -156,8 +156,8 @@ def test_flag_store_errors(self): def test_gradient_marginals_gw(self, jit: bool): """Test gradient w.r.t. probability weights.""" - def reg_gw(a: jax.Array, b: jax.Array, - implicit: bool) -> Tuple[float, Tuple[jax.Array, jax.Array]]: + def reg_gw(a: jnp.ndarray, b: jnp.ndarray, + implicit: bool) -> Tuple[float, Tuple[jnp.ndarray, jnp.ndarray]]: prob = quadratic_problem.QuadraticProblem(geom_x, geom_y, a=a, b=b) implicit_diff = implicit_lib.ImplicitDiff() if implicit else None linear_solver = sinkhorn.Sinkhorn( @@ -245,7 +245,8 @@ def test_gradient_gw_geometry( """Test gradient w.r.t. the geometries.""" def reg_gw( - x: jax.Array, y: jax.Array, a: jax.Array, b: jax.Array, implicit: bool + x: jnp.ndarray, y: jnp.ndarray, a: jnp.ndarray, b: jnp.ndarray, + implicit: bool ) -> float: if is_cost: geom_x = geometry.Geometry(cost_matrix=x) @@ -309,7 +310,7 @@ def loss_thre(threshold: float) -> float: assert loss_thre(1e-3) >= loss_thre(1e-5) @pytest.mark.fast() - def test_gw_lr(self, rng: jax.Array): + def test_gw_lr(self, rng: jnp.ndarray): """Checking LR and Entropic have similar outputs on same problem.""" rngs = jax.random.split(rng, 4) n, m, d1, d2 = 24, 17, 2, 3 @@ -333,7 +334,7 @@ def test_gw_lr(self, rng: jax.Array): ot_gwlr.primal_cost, ot_gw.primal_cost, rtol=5e-2 ) - def test_gw_lr_matches_fused(self, rng: jax.Array): + def test_gw_lr_matches_fused(self, rng: jnp.ndarray): """Checking LR and Entropic have similar outputs on same fused problem.""" rngs = jax.random.split(rng, 5) n, m, d1, d2 = 24, 17, 2, 3 @@ -384,7 +385,7 @@ def test_gw_lr_apply(self, axis: int): @pytest.mark.parametrize("scale_cost", [1.0, "mean"]) def test_relative_epsilon( self, - rng: jax.Array, + rng: jnp.ndarray, scale_cost: Union[float, str], ): eps = 1e-2 diff --git a/tests/solvers/quadratic/lower_bound_test.py b/tests/solvers/quadratic/lower_bound_test.py index bf32aad87..68f7a804a 100644 --- a/tests/solvers/quadratic/lower_bound_test.py +++ b/tests/solvers/quadratic/lower_bound_test.py @@ -31,7 +31,7 @@ class TestLowerBoundSolver: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): d_x = 2 d_y = 3 self.n, self.m = 13, 15 @@ -118,11 +118,11 @@ def test_lb_pointcloud( ] ) def test_lb_grad( - self, rng: jax.Array, sort_fn: Callable[[jax.Array], jax.Array], + self, rng: jnp.ndarray, sort_fn: Callable[[jnp.ndarray], jnp.ndarray], method: str ): - def fn(x: jax.Array, y: jax.Array) -> float: + def fn(x: jnp.ndarray, y: jnp.ndarray) -> float: geom_x = pointcloud.PointCloud(x) geom_y = pointcloud.PointCloud(y) prob = quadratic_problem.QuadraticProblem(geom_x, geom_y) diff --git a/tests/tools/gaussian_mixture/fit_gmm_pair_test.py b/tests/tools/gaussian_mixture/fit_gmm_pair_test.py index 8f43eaa4e..75fc3bef5 100644 --- a/tests/tools/gaussian_mixture/fit_gmm_pair_test.py +++ b/tests/tools/gaussian_mixture/fit_gmm_pair_test.py @@ -29,7 +29,7 @@ class TestFitGmmPair: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): mean_generator0 = jnp.array([[2., -1.], [-2., 0.], [4., 3.]]) cov_generator0 = jnp.array([[[0.2, 0.], [0., 0.1]], [[0.6, 0.], [0., 0.3]], [[0.5, 0.4], [0.4, 0.5]]]) diff --git a/tests/tools/gaussian_mixture/fit_gmm_test.py b/tests/tools/gaussian_mixture/fit_gmm_test.py index e39633b19..1cfb4f95e 100644 --- a/tests/tools/gaussian_mixture/fit_gmm_test.py +++ b/tests/tools/gaussian_mixture/fit_gmm_test.py @@ -23,7 +23,7 @@ class TestFitGmm: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): mean_generator = jnp.array([[2., -1.], [-2., 0.], [4., 3.]]) cov_generator = jnp.array([[[0.2, 0.], [0., 0.1]], [[0.6, 0.], [0., 0.3]], [[0.5, 0.4], [0.4, 0.5]]]) diff --git a/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py b/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py index ccf1e50cd..bf2b01699 100644 --- a/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py +++ b/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py @@ -22,7 +22,7 @@ class TestGaussianMixturePair: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self.n_components = 3 self.n_dimensions = 2 self.epsilon = 1.e-3 diff --git a/tests/tools/gaussian_mixture/gaussian_mixture_test.py b/tests/tools/gaussian_mixture/gaussian_mixture_test.py index af52860be..fd7675d51 100644 --- a/tests/tools/gaussian_mixture/gaussian_mixture_test.py +++ b/tests/tools/gaussian_mixture/gaussian_mixture_test.py @@ -23,7 +23,7 @@ class TestGaussianMixture: def test_get_summary_stats_from_points_and_assignment_probs( - self, rng: jax.Array + self, rng: jnp.ndarray ): n = 50 rng, subrng0, subrng1 = jax.random.split(rng, num=3) @@ -56,7 +56,7 @@ def test_get_summary_stats_from_points_and_assignment_probs( np.testing.assert_allclose(expected_cov, cov, atol=1e-4, rtol=1e-4) np.testing.assert_allclose(expected_wt, comp_wt, atol=1e-4, rtol=1e-4) - def test_from_random(self, rng: jax.Array): + def test_from_random(self, rng: jnp.ndarray): gmm = gaussian_mixture.GaussianMixture.from_random( rng=rng, n_components=3, n_dimensions=2 ) @@ -79,7 +79,7 @@ def test_from_mean_cov_component_weights(self,): comp_wts, gmm.component_weights, atol=1e-4, rtol=1e-4 ) - def test_covariance(self, rng: jax.Array): + def test_covariance(self, rng: jnp.ndarray): gmm = gaussian_mixture.GaussianMixture.from_random( rng=rng, n_components=3, n_dimensions=2 ) @@ -89,7 +89,7 @@ def test_covariance(self, rng: jax.Array): cov[i], component.covariance(), atol=1e-4, rtol=1e-4 ) - def test_sample(self, rng: jax.Array): + def test_sample(self, rng: jnp.ndarray): gmm = gaussian_mixture.GaussianMixture.from_mean_cov_component_weights( mean=jnp.array([[-1., 0.], [1., 0.]]), cov=jnp.array([[[0.01, 0.], [0., 0.01]], [[0.01, 0.], [0., 0.01]]]), @@ -111,7 +111,7 @@ def test_sample(self, rng: jax.Array): atol=1.e-1 ) - def test_log_prob(self, rng: jax.Array): + def test_log_prob(self, rng: jnp.ndarray): n_components = 3 size = 100 subrng0, subrng1 = jax.random.split(rng, num=2) @@ -135,7 +135,7 @@ def test_log_prob(self, rng: jax.Array): np.testing.assert_allclose(expected, actual, atol=1e-4, rtol=1e-4) - def test_log_component_posterior(self, rng: jax.Array): + def test_log_component_posterior(self, rng: jnp.ndarray): gmm = gaussian_mixture.GaussianMixture.from_random( rng=rng, n_components=3, n_dimensions=2 ) @@ -149,7 +149,7 @@ def test_log_component_posterior(self, rng: jax.Array): expected, gmm.get_log_component_posterior(x), atol=1e-4, rtol=1e-4 ) - def test_flatten_unflatten(self, rng: jax.Array): + def test_flatten_unflatten(self, rng: jnp.ndarray): gmm = gaussian_mixture.GaussianMixture.from_random( rng=rng, n_components=3, n_dimensions=2 ) @@ -158,7 +158,7 @@ def test_flatten_unflatten(self, rng: jax.Array): assert gmm == gmm_new - def test_pytree_mapping(self, rng: jax.Array): + def test_pytree_mapping(self, rng: jnp.ndarray): gmm = gaussian_mixture.GaussianMixture.from_random( rng=rng, n_components=3, n_dimensions=2 ) diff --git a/tests/tools/gaussian_mixture/gaussian_test.py b/tests/tools/gaussian_mixture/gaussian_test.py index 8b720861c..0eac630e3 100644 --- a/tests/tools/gaussian_mixture/gaussian_test.py +++ b/tests/tools/gaussian_mixture/gaussian_test.py @@ -22,7 +22,7 @@ @pytest.mark.fast() class TestGaussian: - def test_from_random(self, rng: jax.Array): + def test_from_random(self, rng: jnp.ndarray): g = gaussian.Gaussian.from_random(rng=rng, n_dimensions=3) np.testing.assert_array_equal(g.loc.shape, (3,)) @@ -36,7 +36,7 @@ def test_from_mean_and_cov(self): np.testing.assert_array_equal(mean, g.loc) np.testing.assert_allclose(cov, g.covariance(), atol=1e-4, rtol=1e-4) - def test_to_z(self, rng: jax.Array): + def test_to_z(self, rng: jnp.ndarray): g = gaussian.Gaussian( loc=jnp.array([1., 2.]), scale=scale_tril.ScaleTriL( @@ -52,7 +52,7 @@ def test_to_z(self, rng: jax.Array): np.testing.assert_allclose(sample_mean, jnp.zeros(2), atol=0.1) np.testing.assert_allclose(sample_cov, jnp.eye(2), atol=0.1) - def test_from_z(self, rng: jax.Array): + def test_from_z(self, rng: jnp.ndarray): g = gaussian.Gaussian( loc=jnp.array([0., 0.]), scale=scale_tril.ScaleTriL( @@ -64,7 +64,7 @@ def test_from_z(self, rng: jax.Array): xnew = g.from_z(z) np.testing.assert_allclose(x, xnew, atol=1e-4, rtol=1e-4) - def test_log_prob(self, rng: jax.Array): + def test_log_prob(self, rng: jnp.ndarray): g = gaussian.Gaussian( loc=jnp.array([0., 0.]), scale=scale_tril.ScaleTriL( @@ -78,7 +78,7 @@ def test_log_prob(self, rng: jax.Array): ) np.testing.assert_allclose(expected, actual, atol=1e-5, rtol=1e-5) - def test_sample(self, rng: jax.Array): + def test_sample(self, rng: jnp.ndarray): mean = jnp.array([1., 2.]) cov = jnp.diag(jnp.array([1., 4.])) g = gaussian.Gaussian.from_mean_and_cov(mean, cov) @@ -89,7 +89,7 @@ def test_sample(self, rng: jax.Array): np.testing.assert_allclose(sample_mean, mean, atol=3. * 2. / 100.) np.testing.assert_allclose(sample_cov, cov, atol=2e-1) - def test_w2_dist(self, rng: jax.Array): + def test_w2_dist(self, rng: jnp.ndarray): # make sure distance between a random normal and itself is 0 rng, subrng = jax.random.split(rng) n = gaussian.Gaussian.from_random(rng=subrng, n_dimensions=3) @@ -118,7 +118,7 @@ def test_w2_dist(self, rng: jax.Array): expected = delta_mean + delta_sigma np.testing.assert_allclose(expected, w2, rtol=1e-6, atol=1e-6) - def test_transport(self, rng: jax.Array): + def test_transport(self, rng: jnp.ndarray): diag0 = jnp.array([1.]) diag1 = jnp.array([4.]) g0 = gaussian.Gaussian( @@ -134,14 +134,14 @@ def test_transport(self, rng: jax.Array): expected = 2. * points + 1. np.testing.assert_allclose(expected, actual, atol=1e-5, rtol=1e-5) - def test_flatten_unflatten(self, rng: jax.Array): + def test_flatten_unflatten(self, rng: jnp.ndarray): g = gaussian.Gaussian.from_random(rng, n_dimensions=3) children, aux_data = jax.tree_util.tree_flatten(g) g_new = jax.tree_util.tree_unflatten(aux_data, children) assert g == g_new - def test_pytree_mapping(self, rng: jax.Array): + def test_pytree_mapping(self, rng: jnp.ndarray): g = gaussian.Gaussian.from_random(rng, n_dimensions=3) g_x_2 = jax.tree_map(lambda x: 2 * x, g) diff --git a/tests/tools/gaussian_mixture/linalg_test.py b/tests/tools/gaussian_mixture/linalg_test.py index 4db928264..6fedb13ae 100644 --- a/tests/tools/gaussian_mixture/linalg_test.py +++ b/tests/tools/gaussian_mixture/linalg_test.py @@ -22,7 +22,7 @@ @pytest.mark.fast() class TestLinalg: - def test_get_mean_and_var(self, rng: jax.Array): + def test_get_mean_and_var(self, rng: jnp.ndarray): points = jax.random.normal(key=rng, shape=(10, 2)) weights = jnp.ones(10) expected_mean = jnp.mean(points, axis=0) @@ -33,7 +33,7 @@ def test_get_mean_and_var(self, rng: jax.Array): np.testing.assert_allclose(expected_mean, actual_mean, atol=1E-5, rtol=1E-5) np.testing.assert_allclose(expected_var, actual_var, atol=1E-5, rtol=1E-5) - def test_get_mean_and_var_nonuniform_weights(self, rng: jax.Array): + def test_get_mean_and_var_nonuniform_weights(self, rng: jnp.ndarray): points = jax.random.normal(key=rng, shape=(10, 2)) weights = jnp.concatenate([jnp.ones(5), jnp.zeros(5)], axis=-1) expected_mean = jnp.mean(points[:5], axis=0) @@ -44,7 +44,7 @@ def test_get_mean_and_var_nonuniform_weights(self, rng: jax.Array): np.testing.assert_allclose(expected_mean, actual_mean, rtol=1e-6, atol=1e-6) np.testing.assert_allclose(expected_var, actual_var, rtol=1e-6, atol=1e-6) - def test_get_mean_and_cov(self, rng: jax.Array): + def test_get_mean_and_cov(self, rng: jnp.ndarray): points = jax.random.normal(key=rng, shape=(10, 2)) weights = jnp.ones(10) expected_mean = jnp.mean(points, axis=0) @@ -55,7 +55,7 @@ def test_get_mean_and_cov(self, rng: jax.Array): np.testing.assert_allclose(expected_mean, actual_mean, atol=1e-5, rtol=1e-5) np.testing.assert_allclose(expected_cov, actual_cov, atol=1e-5, rtol=1e-5) - def test_get_mean_and_cov_nonuniform_weights(self, rng: jax.Array): + def test_get_mean_and_cov_nonuniform_weights(self, rng: jnp.ndarray): points = jax.random.normal(key=rng, shape=(10, 2)) weights = jnp.concatenate([jnp.ones(5), jnp.zeros(5)], axis=-1) expected_mean = jnp.mean(points[:5], axis=0) @@ -66,7 +66,7 @@ def test_get_mean_and_cov_nonuniform_weights(self, rng: jax.Array): np.testing.assert_allclose(expected_mean, actual_mean, rtol=1e-6, atol=1e-6) np.testing.assert_allclose(expected_cov, actual_cov, rtol=1e-6, atol=1e-6) - def test_flat_to_tril(self, rng: jax.Array): + def test_flat_to_tril(self, rng: jnp.ndarray): size = 3 x = jax.random.normal(key=rng, shape=(5, 4, size * (size + 1) // 2)) m = linalg.flat_to_tril(x, size) @@ -86,7 +86,7 @@ def test_flat_to_tril(self, rng: jax.Array): actual = linalg.tril_to_flat(m) np.testing.assert_allclose(x, actual) - def test_tril_to_flat(self, rng: jax.Array): + def test_tril_to_flat(self, rng: jnp.ndarray): size = 3 m = jax.random.normal(key=rng, shape=(5, 4, size, size)) for i in range(size): @@ -103,7 +103,7 @@ def test_tril_to_flat(self, rng: jax.Array): inverted = linalg.flat_to_tril(flat, size) np.testing.assert_allclose(m, inverted) - def test_apply_to_diag(self, rng: jax.Array): + def test_apply_to_diag(self, rng: jnp.ndarray): size = 3 m = jax.random.normal(key=rng, shape=(5, 4, size, size)) mnew = linalg.apply_to_diag(m, jnp.exp) @@ -114,7 +114,7 @@ def test_apply_to_diag(self, rng: jax.Array): else: np.testing.assert_allclose(jnp.exp(m[..., i, j]), mnew[..., i, j]) - def test_matrix_powers(self, rng: jax.Array): + def test_matrix_powers(self, rng: jnp.ndarray): rng, subrng = jax.random.split(rng) m = jax.random.normal(key=subrng, shape=(4, 4)) m += jnp.swapaxes(m, axis1=-2, axis2=-1) # symmetric @@ -125,7 +125,7 @@ def test_matrix_powers(self, rng: jax.Array): np.testing.assert_allclose(m, actual[0], rtol=1.e-5) np.testing.assert_allclose(inv_m, actual[1], rtol=1.e-4) - def test_invmatvectril(self, rng: jax.Array): + def test_invmatvectril(self, rng: jnp.ndarray): rng, subrng = jax.random.split(rng) m = jax.random.normal(key=subrng, shape=(2, 2)) m += jnp.swapaxes(m, axis1=-2, axis2=-1) # symmetric @@ -138,7 +138,7 @@ def test_invmatvectril(self, rng: jax.Array): actual = linalg.invmatvectril(m=cholesky, x=x, lower=True) np.testing.assert_allclose(expected, actual, atol=1e-4, rtol=1.e-4) - def test_get_random_orthogonal(self, rng: jax.Array): + def test_get_random_orthogonal(self, rng: jnp.ndarray): rng, subrng = jax.random.split(rng) q = linalg.get_random_orthogonal(rng=subrng, dim=3) qt = jnp.transpose(q) diff --git a/tests/tools/gaussian_mixture/probabilities_test.py b/tests/tools/gaussian_mixture/probabilities_test.py index 4924924df..5d28a52aa 100644 --- a/tests/tools/gaussian_mixture/probabilities_test.py +++ b/tests/tools/gaussian_mixture/probabilities_test.py @@ -39,7 +39,7 @@ def test_log_probs(self): np.testing.assert_allclose(jnp.sum(probs), 1.0, rtol=1e-6, atol=1e-6) np.testing.assert_array_equal(probs > 0., True) - def test_from_random(self, rng: jax.Array): + def test_from_random(self, rng: jnp.ndarray): n_dimensions = 4 pp = probabilities.Probabilities.from_random( rng=rng, n_dimensions=n_dimensions, stdev=0.1 @@ -51,7 +51,7 @@ def test_from_probs(self): pp = probabilities.Probabilities.from_probs(probs) np.testing.assert_allclose(probs, pp.probs(), rtol=1e-6, atol=1e-6) - def test_sample(self, rng: jax.Array): + def test_sample(self, rng: jnp.ndarray): p = 0.4 probs = jnp.array([p, 1. - p]) pp = probabilities.Probabilities.from_probs(probs) diff --git a/tests/tools/gaussian_mixture/scale_tril_test.py b/tests/tools/gaussian_mixture/scale_tril_test.py index 3e53fd543..36643b6d7 100644 --- a/tests/tools/gaussian_mixture/scale_tril_test.py +++ b/tests/tools/gaussian_mixture/scale_tril_test.py @@ -47,7 +47,7 @@ def test_log_det_covariance(self, chol: scale_tril.ScaleTriL): actual = chol.log_det_covariance() np.testing.assert_almost_equal(actual, expected) - def test_from_random(self, rng: jax.Array): + def test_from_random(self, rng: jnp.ndarray): n_dimensions = 4 cov = scale_tril.ScaleTriL.from_random( rng=rng, n_dimensions=n_dimensions, stdev=0.1 @@ -56,7 +56,7 @@ def test_from_random(self, rng: jax.Array): cov.cholesky().shape, (n_dimensions, n_dimensions) ) - def test_from_cholesky(self, rng: jax.Array): + def test_from_cholesky(self, rng: jnp.ndarray): n_dimensions = 4 cholesky = scale_tril.ScaleTriL.from_random( rng=rng, n_dimensions=n_dimensions, stdev=1. @@ -64,7 +64,7 @@ def test_from_cholesky(self, rng: jax.Array): scale = scale_tril.ScaleTriL.from_cholesky(cholesky) np.testing.assert_allclose(cholesky, scale.cholesky(), atol=1e-4, rtol=1e-4) - def test_w2_dist(self, rng: jax.Array): + def test_w2_dist(self, rng: jnp.ndarray): # make sure distance between a random normal and itself is 0 rng, subrng = jax.random.split(rng) s = scale_tril.ScaleTriL.from_random(rng=subrng, n_dimensions=3) @@ -85,7 +85,7 @@ def test_w2_dist(self, rng: jax.Array): delta_sigma = jnp.sum((jnp.sqrt(diag0) - jnp.sqrt(diag1)) ** 2.) np.testing.assert_allclose(delta_sigma, w2, atol=1e-4, rtol=1e-4) - def test_transport(self, rng: jax.Array): + def test_transport(self, rng: jnp.ndarray): size = 4 rng, subrng0, subrng1 = jax.random.split(rng, num=3) diag0 = jnp.exp(jax.random.normal(key=subrng0, shape=(size,))) @@ -99,14 +99,14 @@ def test_transport(self, rng: jax.Array): expected = x * jnp.sqrt(diag1)[None] / jnp.sqrt(diag0)[None] np.testing.assert_allclose(expected, transported, atol=1e-4, rtol=1e-4) - def test_flatten_unflatten(self, rng: jax.Array): + def test_flatten_unflatten(self, rng: jnp.ndarray): scale = scale_tril.ScaleTriL.from_random(rng=rng, n_dimensions=3) children, aux_data = jax.tree_util.tree_flatten(scale) scale_new = jax.tree_util.tree_unflatten(aux_data, children) np.testing.assert_array_equal(scale.params, scale_new.params) assert scale == scale_new - def test_pytree_mapping(self, rng: jax.Array): + def test_pytree_mapping(self, rng: jnp.ndarray): scale = scale_tril.ScaleTriL.from_random(rng=rng, n_dimensions=3) scale_x_2 = jax.tree_map(lambda x: 2 * x, scale) np.testing.assert_allclose(2. * scale.params, scale_x_2.params) diff --git a/tests/tools/k_means_test.py b/tests/tools/k_means_test.py index 55cacde02..9b504a82d 100644 --- a/tests/tools/k_means_test.py +++ b/tests/tools/k_means_test.py @@ -31,7 +31,7 @@ def make_blobs( *args: Any, cost_fn: Optional[Literal["sqeucl", "cosine"]] = None, **kwargs: Any -) -> Tuple[Union[jax.Array, pointcloud.PointCloud], jax.Array, jax.Array]: +) -> Tuple[Union[jnp.ndarray, pointcloud.PointCloud], jnp.ndarray, jnp.ndarray]: X, y, c = datasets.make_blobs(*args, return_centers=True, **kwargs) X, y, c = jnp.asarray(X), jnp.asarray(y), jnp.asarray(c) if cost_fn is None: @@ -47,10 +47,10 @@ def make_blobs( def compute_assignment( - x: jax.Array, - centers: jax.Array, - weights: Optional[jax.Array] = None -) -> Tuple[jax.Array, float]: + x: jnp.ndarray, + centers: jnp.ndarray, + weights: Optional[jnp.ndarray] = None +) -> Tuple[jnp.ndarray, float]: if weights is None: weights = jnp.ones(x.shape[0]) cost_matrix = pointcloud.PointCloud(x, centers).cost_matrix @@ -63,7 +63,7 @@ def compute_assignment( class TestKmeansPlusPlus: @pytest.mark.fast.with_args("n_local_trials", [None, 3], only_fast=-1) - def test_n_local_trials(self, rng: jax.Array, n_local_trials): + def test_n_local_trials(self, rng: jnp.ndarray, n_local_trials): n, k = 100, 4 rng1, rng2 = jax.random.split(rng) geom, _, c = make_blobs( @@ -78,7 +78,7 @@ def test_n_local_trials(self, rng: jax.Array, n_local_trials): assert shift1 > shift2 @pytest.mark.fast.with_args("k", [3, 5], only_fast=0) - def test_matches_sklearn(self, rng: jax.Array, k: int): + def test_matches_sklearn(self, rng: jnp.ndarray, k: int): ndim = 2 geom, _, _ = make_blobs( n_samples=100, @@ -102,9 +102,9 @@ def test_matches_sklearn(self, rng: jax.Array, k: int): ) assert jnp.abs(pred_inertia - gt_inertia) <= 200 - def test_initialization_differentiable(self, rng: jax.Array): + def test_initialization_differentiable(self, rng: jnp.ndarray): - def callback(x: jax.Array) -> float: + def callback(x: jnp.ndarray) -> float: geom = pointcloud.PointCloud(x) centers = k_means._k_means_plus_plus(geom, k=3, rng=rng) _, inertia = compute_assignment(x, centers) @@ -122,7 +122,7 @@ class TestKmeans: @pytest.mark.fast() @pytest.mark.parametrize("k", [1, 6]) - def test_k_means_output(self, rng: jax.Array, k: int): + def test_k_means_output(self, rng: jnp.ndarray, k: int): max_iter, ndim = 10, 4 geom, gt_assignment, _ = make_blobs( n_samples=50, n_features=ndim, centers=k, random_state=42 @@ -160,7 +160,7 @@ def test_k_means_simple_example(self): ["k-means++", "random", "callable", "wrong-callable"], only_fast=1, ) - def test_init_method(self, rng: jax.Array, init: str): + def test_init_method(self, rng: jnp.ndarray, init: str): if init == "callable": init_fn = lambda geom, k, _: geom.x[:k] elif init == "wrong-callable": @@ -176,7 +176,7 @@ def test_init_method(self, rng: jax.Array, init: str): else: _ = k_means.k_means(geom, k, init=init_fn) - def test_k_means_plus_plus_better_than_random(self, rng: jax.Array): + def test_k_means_plus_plus_better_than_random(self, rng: jnp.ndarray): k = 5 rng1, rng2 = jax.random.split(rng, 2) geom, _, _ = make_blobs(n_samples=50, centers=k, random_state=10) @@ -189,7 +189,7 @@ def test_k_means_plus_plus_better_than_random(self, rng: jax.Array): assert res_kpp.iteration < res_random.iteration assert res_kpp.error <= res_random.error - def test_larger_n_init_helps(self, rng: jax.Array): + def test_larger_n_init_helps(self, rng: jnp.ndarray): k = 10 geom, _, _ = make_blobs(n_samples=150, centers=k, random_state=0) @@ -199,7 +199,7 @@ def test_larger_n_init_helps(self, rng: jax.Array): assert res_larger_n_init.error < res.error @pytest.mark.parametrize("max_iter", [8, 16]) - def test_store_inner_errors(self, rng: jax.Array, max_iter: int): + def test_store_inner_errors(self, rng: jnp.ndarray, max_iter: int): ndim, k = 10, 4 geom, _, _ = make_blobs( n_samples=40, n_features=ndim, centers=k, random_state=43 @@ -215,7 +215,7 @@ def test_store_inner_errors(self, rng: jax.Array, max_iter: int): # check if error is decreasing np.testing.assert_array_equal(jnp.diff(errors[::-1]) >= 0., True) - def test_strict_tolerance(self, rng: jax.Array): + def test_strict_tolerance(self, rng: jnp.ndarray): k = 11 geom, _, _ = make_blobs(n_samples=200, centers=k, random_state=39) @@ -229,7 +229,7 @@ def test_strict_tolerance(self, rng: jax.Array): @pytest.mark.parametrize( "tol", [1e-3, 0.], ids=["weak-convergence", "strict-convergence"] ) - def test_convergence_force_scan(self, rng: jax.Array, tol: float): + def test_convergence_force_scan(self, rng: jnp.ndarray, tol: float): k, n_iter = 9, 20 geom, _, _ = make_blobs(n_samples=100, centers=k, random_state=37) @@ -247,7 +247,7 @@ def test_convergence_force_scan(self, rng: jax.Array, tol: float): assert res.iteration == n_iter np.testing.assert_array_equal(res.inner_errors == -1, False) - def test_k_means_min_iterations(self, rng: jax.Array): + def test_k_means_min_iterations(self, rng: jnp.ndarray): k, min_iter = 8, 12 geom, _, _ = make_blobs(n_samples=160, centers=k, random_state=38) @@ -264,7 +264,7 @@ def test_k_means_min_iterations(self, rng: jax.Array): assert res.converged assert jnp.sum(res.inner_errors != -1) >= min_iter - def test_weight_scaling_effects_only_inertia(self, rng: jax.Array): + def test_weight_scaling_effects_only_inertia(self, rng: jnp.ndarray): k = 10 rng1, rng2 = jax.random.split(rng) geom, _, _ = make_blobs(n_samples=130, centers=k, random_state=3) @@ -285,7 +285,7 @@ def test_weight_scaling_effects_only_inertia(self, rng: jax.Array): ) @pytest.mark.fast() - def test_empty_weights(self, rng: jax.Array): + def test_empty_weights(self, rng: jnp.ndarray): n, ndim, k, d = 20, 2, 3, 5. gen = np.random.RandomState(0) x = gen.normal(size=(n, ndim)) @@ -333,10 +333,10 @@ def test_cosine_cost_fn(self): @pytest.mark.fast.with_args("init", ["k-means++", "random"], only_fast=0) def test_k_means_jitting( - self, rng: jax.Array, init: Literal["k-means++", "random"] + self, rng: jnp.ndarray, init: Literal["k-means++", "random"] ): - def callback(x: jax.Array) -> k_means.KMeansOutput: + def callback(x: jnp.ndarray) -> k_means.KMeansOutput: return k_means.k_means( x, k=k, init=init, store_inner_errors=True, rng=rng ) @@ -365,10 +365,10 @@ def callback(x: jax.Array) -> k_means.KMeansOutput: (False, True)], ids=["jit-while-loop", "nojit-for-loop"]) def test_k_means_differentiability( - self, rng: jax.Array, jit: bool, force_scan: bool + self, rng: jnp.ndarray, jit: bool, force_scan: bool ): - def inertia(x: jax.Array, w: jax.Array) -> float: + def inertia(x: jnp.ndarray, w: jnp.ndarray) -> float: return k_means.k_means( x, k=k, @@ -404,7 +404,7 @@ def inertia(x: jax.Array, w: jax.Array) -> float: @pytest.mark.parametrize("tol", [1e-3, 0.]) @pytest.mark.parametrize(("n", "k"), [(37, 4), (128, 6)]) def test_clustering_matches_sklearn( - self, rng: jax.Array, n: int, k: int, tol: float + self, rng: jnp.ndarray, n: int, k: int, tol: float ): x, _, _ = make_blobs(n_samples=n, centers=k, random_state=41) diff --git a/tests/tools/segment_sinkhorn_test.py b/tests/tools/segment_sinkhorn_test.py index 119dbf93a..6e8a8fb8c 100644 --- a/tests/tools/segment_sinkhorn_test.py +++ b/tests/tools/segment_sinkhorn_test.py @@ -26,7 +26,7 @@ class TestSegmentSinkhorn: @pytest.fixture(autouse=True) - def setUp(self, rng: jax.Array): + def setUp(self, rng: jnp.ndarray): self._dim = 4 self._num_points = 13, 17 self._max_measure_size = 20 diff --git a/tests/tools/sinkhorn_divergence_test.py b/tests/tools/sinkhorn_divergence_test.py index 07bcf535e..0f3e56bfc 100644 --- a/tests/tools/sinkhorn_divergence_test.py +++ b/tests/tools/sinkhorn_divergence_test.py @@ -28,7 +28,7 @@ class TestSinkhornDivergence: @pytest.fixture(autouse=True) - def setUp(self, rng: jax.Array): + def setUp(self, rng: jnp.ndarray): self._dim = 4 self._num_points = 13, 17 self.rng, *rngs = jax.random.split(rng, 3) @@ -389,7 +389,7 @@ def test_euclidean_momentum_params( class TestSinkhornDivergenceGrad: @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): + def initialize(self, rng: jnp.ndarray): self._dim = 3 self._num_points = 13, 12 self.rng, *rngs = jax.random.split(rng, 3) @@ -403,7 +403,7 @@ def test_gradient_generic_point_cloud_wrapper(self): x = jax.random.uniform(rngs[0], (self._num_points[0], self._dim)) y = jax.random.uniform(rngs[1], (self._num_points[1], self._dim)) - def loss_fn(cloud_a: jax.Array, cloud_b: jax.Array) -> float: + def loss_fn(cloud_a: jnp.ndarray, cloud_b: jnp.ndarray) -> float: div = sinkhorn_divergence.sinkhorn_divergence( pointcloud.PointCloud, cloud_a, diff --git a/tests/tools/soft_sort_test.py b/tests/tools/soft_sort_test.py index 4f3a12c10..2432a2dee 100644 --- a/tests/tools/soft_sort_test.py +++ b/tests/tools/soft_sort_test.py @@ -27,14 +27,14 @@ class TestSoftSort: @pytest.mark.parametrize("shape", [(20,), (20, 1)]) - def test_sort_one_array(self, rng: jax.Array, shape: Tuple[int, ...]): + def test_sort_one_array(self, rng: jnp.ndarray, shape: Tuple[int, ...]): x = jax.random.uniform(rng, shape) xs = soft_sort.sort(x, axis=0) np.testing.assert_array_equal(x.shape, xs.shape) np.testing.assert_array_equal(jnp.diff(xs, axis=0) >= 0.0, True) - def test_sort_array_squashing_momentum(self, rng: jax.Array): + def test_sort_array_squashing_momentum(self, rng: jnp.ndarray): shape = (33, 1) x = jax.random.uniform(rng, shape) xs_lin = soft_sort.sort( @@ -61,7 +61,7 @@ def test_sort_array_squashing_momentum(self, rng: jax.Array): @pytest.mark.fast() @pytest.mark.parametrize("k", [-1, 4, 100]) - def test_topk_one_array(self, rng: jax.Array, k: int): + def test_topk_one_array(self, rng: jnp.ndarray, k: int): n = 20 x = jax.random.uniform(rng, (n,)) axis = 0 @@ -75,7 +75,7 @@ def test_topk_one_array(self, rng: jax.Array, k: int): np.testing.assert_allclose(xs, jnp.sort(x, axis=axis)[-outsize:], atol=0.01) @pytest.mark.fast.with_args("topk", [-1, 2, 11], only_fast=-1) - def test_sort_batch(self, rng: jax.Array, topk: int): + def test_sort_batch(self, rng: jnp.ndarray, topk: int): x = jax.random.uniform(rng, (32, 10, 6, 4)) axis = 1 xs = soft_sort.sort(x, axis=axis, topk=topk) @@ -85,7 +85,7 @@ def test_sort_batch(self, rng: jax.Array, topk: int): np.testing.assert_array_equal(xs.shape, expected_shape) np.testing.assert_array_equal(jnp.diff(xs, axis=axis) >= 0.0, True) - def test_multivariate_cdf_quantiles(self, rng: jax.Array): + def test_multivariate_cdf_quantiles(self, rng: jnp.ndarray): n, d = 512, 3 key1, key2, key3 = jax.random.split(rng, 3) @@ -108,7 +108,7 @@ def test_multivariate_cdf_quantiles(self, rng: jax.Array): # Check passing custom sampler, must be still symmetric / centered on {.5}^d # Check passing custom epsilon also works. - def ball_sampler(k: jax.Array, s: Tuple[int, int]) -> jax.Array: + def ball_sampler(k: jnp.ndarray, s: Tuple[int, int]) -> jnp.ndarray: return 0.5 * (jax.random.ball(k, d=s[1], p=4, shape=(s[0],)) + 1.) num_target_samples = 473 @@ -128,7 +128,7 @@ def mv_c_q(inputs, num_target_samples, rng, epsilon): np.testing.assert_allclose(z, qua(q), atol=atol) @pytest.mark.fast.with_args("axis,jit", [(0, False), (1, True)], only_fast=0) - def test_ranks(self, axis, rng: jax.Array, jit: bool): + def test_ranks(self, axis, rng: jnp.ndarray, jit: bool): rng1, rng2 = jax.random.split(rng, 2) num_targets = 13 x = jax.random.uniform(rng1, (8, 5, 2)) @@ -163,7 +163,7 @@ def test_ranks(self, axis, rng: jax.Array, jit: bool): np.testing.assert_allclose(ranks, expected_ranks, atol=0.3, rtol=0.1) @pytest.mark.fast.with_args("axis,jit", [(0, False), (1, True)], only_fast=0) - def test_topk_mask(self, axis, rng: jax.Array, jit: bool): + def test_topk_mask(self, axis, rng: jnp.ndarray, jit: bool): def boolean_topk_mask(u, k): return u >= jnp.flip(jax.numpy.sort(u))[k - 1] @@ -194,7 +194,7 @@ def test_quantile(self, q: float): np.testing.assert_allclose(x_q, q, atol=1e-3, rtol=1e-2) - def test_quantile_on_several_axes(self, rng: jax.Array): + def test_quantile_on_several_axes(self, rng: jnp.ndarray): batch, height, width, channels = 4, 47, 45, 3 x = jax.random.uniform(rng, shape=(batch, height, width, channels)) q = soft_sort.quantile( @@ -208,7 +208,7 @@ def test_quantile_on_several_axes(self, rng: jax.Array): @pytest.mark.fast() @pytest.mark.parametrize("jit", [False, True]) - def test_quantiles(self, rng: jax.Array, jit: bool): + def test_quantiles(self, rng: jnp.ndarray, jit: bool): inputs = jax.random.uniform(rng, (100, 2, 3)) q = jnp.array([.1, .8, .4]) quantile_fn = soft_sort.quantile @@ -220,7 +220,7 @@ def test_quantiles(self, rng: jax.Array, jit: bool): np.testing.assert_allclose(m1.mean(axis=[1, 2]), q, atol=5e-2) @pytest.mark.parametrize("jit", [False, True]) - def test_soft_quantile_normalization(self, rng: jax.Array, jit: bool): + def test_soft_quantile_normalization(self, rng: jnp.ndarray, jit: bool): rngs = jax.random.split(rng, 2) x = jax.random.uniform(rngs[0], shape=(100,)) mu, sigma = 2.0, 1.2 @@ -237,7 +237,7 @@ def test_soft_quantile_normalization(self, rng: jax.Array, jit: bool): [mu_target, sigma_target], rtol=0.05) - def test_sort_with(self, rng: jax.Array): + def test_sort_with(self, rng: jnp.ndarray): n, d = 20, 4 inputs = jax.random.uniform(rng, shape=(n, d)) criterion = jnp.linspace(0.1, 1.2, n) @@ -269,7 +269,7 @@ def test_quantize(self, jit: bool): np.testing.assert_allclose(min_distances, min_distances, atol=0.05) @pytest.mark.parametrize("implicit", [False, True]) - def test_soft_sort_jacobian(self, rng: jax.Array, implicit: bool): + def test_soft_sort_jacobian(self, rng: jnp.ndarray, implicit: bool): # Add a ridge when using JAX solvers. try: from ott.solvers.linear import lineax_implicit # noqa: F401 @@ -283,7 +283,7 @@ def test_soft_sort_jacobian(self, rng: jax.Array, implicit: bool): z = jax.random.uniform(rngs[0], ((b, n))) random_dir = jax.random.normal(rngs[1], (b,)) / b - def loss_fn(logits: jax.Array) -> float: + def loss_fn(logits: jnp.ndarray) -> float: im_d = None if implicit: # Ridge parameters are only used when using JAX's CG. From 8fa3683a5b91d5848231236cb97d5b1632d1538d Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 28 Nov 2023 14:53:21 +0100 Subject: [PATCH 032/186] move dataloader from tests to module --- src/ott/neural/data/dataloaders.py | 131 +++++++++++++++++++++++--- tests/neural/conftest.py | 143 +++-------------------------- 2 files changed, 127 insertions(+), 147 deletions(-) diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index 466460384..121af2c94 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -11,22 +11,123 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -#import tensorflow as tf +from typing import Dict, Iterator, Mapping, Optional -class ConditionalDataLoader: #TODO(@MUCDK) uncomment, resolve installation issues with TF - pass +import numpy as np - #def __init__( - # self, rng: jax.random.KeyArray, dataloaders: Dict[str, tf.Dataloader], - # p: jnp.ndarray - #) -> None: - # super().__init__() - # self.rng = rng - # self.conditions = dataloaders.keys() - # self.p = p - #def __next__(self) -> jnp.ndarray: - # self.rng, rng = jax.random.split(self.rng, 2) - # condition = jax.random.choice(rng, self.conditions, p=self.p) - # return next(self.dataloaders[condition]) +__all__ =[ "OTDataLoader", "ConditionalDataLoader"] + +class OTDataLoader: + """Data loader for OT problems. + + Args: + batch_size: Number of samples per batch. + source_lin: Linear part of the source measure. + source_quad: Quadratic part of the source measure. + target_lin: Linear part of the target measure. + target_quad: Quadratic part of the target measure. + source_conditions: Conditions of the source measure. + target_conditions: Conditions of the target measure. + seed: Random seed. + """ + + def __init__( + self, + batch_size: int = 64, + source_lin: Optional[np.ndarray] = None, + source_quad: Optional[np.ndarray] = None, + target_lin: Optional[np.ndarray] = None, + target_quad: Optional[np.ndarray] = None, + source_conditions: Optional[np.ndarray] = None, + target_conditions: Optional[np.ndarray] = None, + seed: int = 0, + ) -> None: + super().__init__() + if source_lin is not None: + if source_quad is not None: + assert len(source_lin) == len(source_quad) + self.n_source = len(source_lin) + else: + self.n_source = len(source_lin) + else: + self.n_source = len(source_quad) + if source_conditions is not None: + assert len(source_conditions) == self.n_source + if target_lin is not None: + if target_quad is not None: + assert len(target_lin) == len(target_quad) + self.n_target = len(target_lin) + else: + self.n_target = len(target_lin) + else: + self.n_target = len(target_quad) + if target_conditions is not None: + assert len(target_conditions) == self.n_target + + self.source_lin = source_lin + self.target_lin = target_lin + self.source_quad = source_quad + self.target_quad = target_quad + self.source_conditions = source_conditions + self.target_conditions = target_conditions + self.batch_size = batch_size + self.rng = np.random.default_rng(seed=seed) + + def __next__(self) -> Mapping[str, np.ndarray]: + inds_source = self.rng.choice(self.n_source, size=[self.batch_size]) + inds_target = self.rng.choice(self.n_target, size=[self.batch_size]) + return { + "source_lin": + self.source_lin[inds_source, :] + if self.source_lin is not None else None, + "source_quad": + self.source_quad[inds_source, :] + if self.source_quad is not None else None, + "target_lin": + self.target_lin[inds_target, :] + if self.target_lin is not None else None, + "target_quad": + self.target_quad[inds_target, :] + if self.target_quad is not None else None, + "source_conditions": + self.source_conditions[inds_source, :] + if self.source_conditions is not None else None, + "target_conditions": + self.target_conditions[inds_target, :] + if self.target_conditions is not None else None, + } + + +class ConditionalDataLoader: + """Data loader for OT problems with conditions. + + This data loader wraps several data loaders and samples from them according to their conditions. + + Args: + dataloaders: Dictionary of data loaders with keys corresponding to conditions. + p: Probability of sampling from each data loader. + seed: Random seed. + + """ + + def __init__( + self, + dataloaders: Dict[str, Iterator], + p: np.ndarray, + seed: int = 0 + ) -> None: + super().__init__() + self.dataloaders = dataloaders + self.conditions = list(dataloaders.keys()) + self.p = p + self.rng = np.random.default_rng(seed=seed) + + def __next__(self, cond: str = None) -> Mapping[str, np.ndarray]: + if cond is not None: + if cond not in self.conditions: + raise ValueError(f"Condition {cond} not in {self.conditions}") + return next(self.dataloaders[cond]) + idx = self.rng.choice(len(self.conditions), p=self.p) + return next(self.dataloaders[self.conditions[idx]]) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 0dd65ba57..edb635e90 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -1,60 +1,7 @@ -from typing import Dict, Iterator, Mapping, Optional - import numpy as np import pytest - -class DataLoader: - - def __init__( - self, - source_data: np.ndarray, - target_data: np.ndarray, - batch_size: int = 64, - source_conditions: Optional[np.ndarray] = None, - target_conditions: Optional[np.ndarray] = None, - ) -> None: - super().__init__() - self.source_data = source_data - self.target_data = target_data - self.source_conditions = source_conditions - self.target_conditions = target_conditions - self.batch_size = batch_size - self.rng = np.random.default_rng(seed=0) - - def __next__(self) -> Mapping[str, np.ndarray]: - inds_source = self.rng.choice(len(self.source_data), size=[self.batch_size]) - inds_target = self.rng.choice(len(self.target_data), size=[self.batch_size]) - return { - "source_lin": - self.source_data[inds_source, :], - "target_lin": - self.target_data[inds_target, :], - "source_conditions": - self.source_conditions[inds_source, :] - if self.source_conditions is not None else None, - "target_conditions": - self.target_conditions[inds_target, :] - if self.target_conditions is not None else None, - } - - -class ConditionalDataLoader: - - def __init__(self, dataloaders: Dict[str, Iterator], p: np.ndarray) -> None: - super().__init__() - self.dataloaders = dataloaders - self.conditions = list(dataloaders.keys()) - self.p = p - self.rng = np.random.default_rng(seed=0) - - def __next__(self, cond: str = None) -> Mapping[str, np.ndarray]: - if cond is not None: - if cond not in self.conditions: - raise ValueError(f"Condition {cond} not in {self.conditions}") - return next(self.dataloaders[cond]) - idx = self.rng.choice(len(self.conditions), p=self.p) - return next(self.dataloaders[self.conditions[idx]]) +from ott.neural.data.dataloaders import ConditionalDataLoader, OTDataLoader @pytest.fixture(scope="module") @@ -63,7 +10,7 @@ def data_loader_gaussian(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - return DataLoader(source, target, 16) + return OTDataLoader(source, target, 16) @pytest.fixture(scope="module") @@ -75,10 +22,10 @@ def data_loader_gaussian_conditional(): source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 2)) - 2.0 - dl0 = DataLoader( + dl0 = OTDataLoader( source_0, target_0, 16, source_conditions=np.zeros_like(source_0) * 0.0 ) - dl1 = DataLoader( + dl1 = OTDataLoader( source_1, target_1, 16, source_conditions=np.ones_like(source_1) * 1.0 ) @@ -93,75 +40,7 @@ def data_loader_gaussian_with_conditions(): target = rng.normal(size=(100, 2)) + 1.0 source_conditions = rng.normal(size=(100, 1)) target_conditions = rng.normal(size=(100, 1)) - 1.0 - return DataLoader(source, target, 16, source_conditions, target_conditions) - - -class GENOTDataLoader: - - def __init__( - self, - batch_size: int = 64, - source_lin: Optional[np.ndarray] = None, - source_quad: Optional[np.ndarray] = None, - target_lin: Optional[np.ndarray] = None, - target_quad: Optional[np.ndarray] = None, - source_conditions: Optional[np.ndarray] = None, - target_conditions: Optional[np.ndarray] = None, - ) -> None: - super().__init__() - if source_lin is not None: - if source_quad is not None: - assert len(source_lin) == len(source_quad) - self.n_source = len(source_lin) - else: - self.n_source = len(source_lin) - else: - self.n_source = len(source_quad) - if source_conditions is not None: - assert len(source_conditions) == self.n_source - if target_lin is not None: - if target_quad is not None: - assert len(target_lin) == len(target_quad) - self.n_target = len(target_lin) - else: - self.n_target = len(target_lin) - else: - self.n_target = len(target_quad) - if target_conditions is not None: - assert len(target_conditions) == self.n_target - - self.source_lin = source_lin - self.target_lin = target_lin - self.source_quad = source_quad - self.target_quad = target_quad - self.source_conditions = source_conditions - self.target_conditions = target_conditions - self.batch_size = batch_size - self.rng = np.random.default_rng(seed=0) - - def __next__(self) -> Mapping[str, np.ndarray]: - inds_source = self.rng.choice(self.n_source, size=[self.batch_size]) - inds_target = self.rng.choice(self.n_target, size=[self.batch_size]) - return { - "source_lin": - self.source_lin[inds_source, :] - if self.source_lin is not None else None, - "source_quad": - self.source_quad[inds_source, :] - if self.source_quad is not None else None, - "target_lin": - self.target_lin[inds_target, :] - if self.target_lin is not None else None, - "target_quad": - self.target_quad[inds_target, :] - if self.target_quad is not None else None, - "source_conditions": - self.source_conditions[inds_source, :] - if self.source_conditions is not None else None, - "target_conditions": - self.target_conditions[inds_target, :] - if self.target_conditions is not None else None, - } + return OTDataLoader(source, target, 16, source_conditions, target_conditions) @pytest.fixture(scope="module") @@ -170,7 +49,7 @@ def genot_data_loader_linear(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - return GENOTDataLoader(16, source_lin=source, target_lin=target) + return OTDataLoader(16, source_lin=source, target_lin=target) @pytest.fixture(scope="module") @@ -181,7 +60,7 @@ def genot_data_loader_linear_conditional(): target = rng.normal(size=(100, 2)) + 1.0 conditions_source = rng.normal(size=(100, 4)) conditions_target = rng.normal(size=(100, 4)) - 1.0 - return GENOTDataLoader( + return OTDataLoader( 16, source_lin=source, target_lin=target, @@ -196,7 +75,7 @@ def genot_data_loader_quad(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 1)) + 1.0 - return GENOTDataLoader(16, source_quad=source, target_quad=target) + return OTDataLoader(16, source_quad=source, target_quad=target) @pytest.fixture(scope="module") @@ -206,7 +85,7 @@ def genot_data_loader_quad_conditional(): source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 1)) + 1.0 conditions = rng.normal(size=(100, 7)) - return GENOTDataLoader( + return OTDataLoader( 16, source_quad=source, target_quad=target, @@ -223,7 +102,7 @@ def genot_data_loader_fused(): target_q = rng.normal(size=(100, 1)) + 1.0 source_lin = rng.normal(size=(100, 2)) target_lin = rng.normal(size=(100, 2)) + 1.0 - return GENOTDataLoader( + return OTDataLoader( 16, source_lin=source_lin, source_quad=source_q, @@ -241,7 +120,7 @@ def genot_data_loader_fused_conditional(): source_lin = rng.normal(size=(100, 2)) target_lin = rng.normal(size=(100, 2)) + 1.0 conditions = rng.normal(size=(100, 7)) - return GENOTDataLoader( + return OTDataLoader( 16, source_lin=source_lin, source_quad=source_q, From 2e2f9f344822c98fcc1b54f606b44208a366f129 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 28 Nov 2023 15:33:39 +0100 Subject: [PATCH 033/186] add docstrings to neurcal networks --- src/ott/neural/data/dataloaders.py | 2 +- src/ott/neural/models/base_models.py | 18 +++++- src/ott/neural/models/models.py | 83 +++++++++++++++++++++++++++- 3 files changed, 99 insertions(+), 4 deletions(-) diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index 121af2c94..938dabb96 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -16,8 +16,8 @@ import numpy as np +__all__ = ["OTDataLoader", "ConditionalDataLoader"] -__all__ =[ "OTDataLoader", "ConditionalDataLoader"] class OTDataLoader: """Data loader for OT problems. diff --git a/src/ott/neural/models/base_models.py b/src/ott/neural/models/base_models.py index c96ad5b29..8b5dc126a 100644 --- a/src/ott/neural/models/base_models.py +++ b/src/ott/neural/models/base_models.py @@ -21,6 +21,7 @@ class BaseNeuralVectorField(nn.Module, abc.ABC): + """Base class for neural vector field models.""" @abc.abstractmethod def __call__( @@ -29,11 +30,20 @@ def __call__( x: jnp.ndarray, condition: Optional[jnp.ndarray] = None, keys_model: Optional[jnp.ndarray] = None - ) -> jnp.ndarray: # noqa: D102): + ) -> jnp.ndarray: + """"Evaluate the vector field. + + Args: + t: Time. + x: Input data. + condition: Condition. + keys_model: Random keys for the model. + """ pass class BaseRescalingNet(nn.Module, abc.ABC): + """Base class for models to learn distributional rescaling factors.""" @abc.abstractmethod def __call__( @@ -41,4 +51,10 @@ def __call__( x: jnp.ndarray, condition: Optional[jnp.ndarray] = None ) -> jnp.ndarray: + """Evaluate the model. + + Args: + x: Input data. + condition: Condition. + """ pass diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 80326d3ca..0c424c588 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -180,7 +180,9 @@ def __call__(self, x: jnp.ndarray) -> float: # noqa: D102 return z.squeeze() -class MLP(neuraldual.BaseW2NeuralDual): +class MLP( + neuraldual.BaseW2NeuralDual +): #TODO don't let this inherit from BaseW2NeuralDual """A generic, typically not-convex (w.r.t input) MLP. Args: @@ -418,12 +420,34 @@ class Block(nn.Module): @nn.compact def __call__(self, x): for i in range(self.num_layers): - x = nn.Dense(self.dim, name="fc{0}".format(i))(x) + x = nn.Dense(self.dim)(x) x = self.act_fn(x) return nn.Dense(self.out_dim)(x) class NeuralVectorField(BaseNeuralVectorField): + """Parameterized neural vector field. + + Each of the input, condition, and time embeddings are passed through a block + consisting of ``num_layers_per_block`` layers of dimension ``latent_embed_dim``, ``condition_embed_dim``, and ``time_embed_dim``, respectively. + The output of each block is concatenated and passed through a final block of dimension ``joint_hidden_dim``. + + Args: + output_dim: Dimensionality of the neural vector field. + condition_dim: Dimensionality of the conditioning vector. + latent_embed_dim: Dimensionality of the embedding of the data. + condition_embed_dim: Dimensionality of the embedding of the condition. + If ``None``, set to ``latent_embed_dim``. + t_embed_dim: Dimensionality of the time embedding. + If ``None``, set to ``latent_embed_dim``. + joint_hidden_dim: Dimensionality of the hidden layers of the joint network. + If ``None``, set to ``latent_embed_dim + condition_embed_dim + + t_embed_dim``. + num_layers_per_block: Number of layers per block. + act_fn: Activation function. + n_frequencies: Number of frequencies to use for the time embedding. + + """ output_dim: int condition_dim: int latent_embed_dim: int @@ -435,6 +459,14 @@ class NeuralVectorField(BaseNeuralVectorField): n_frequencies: int = 128 def time_encoder(self, t: jnp.ndarray) -> jnp.array: + """Encode the time. + + Args: + t: Time. + + Returns: + Encoded time. + """ freq = 2 * jnp.arange(self.n_frequencies) * jnp.pi t = freq * t return jnp.concatenate((jnp.cos(t), jnp.sin(t)), axis=-1) @@ -469,7 +501,17 @@ def __call__( condition: Optional[jnp.ndarray], keys_model: Optional[jnp.ndarray] = None, ) -> jnp.ndarray: + """Forward pass through the neural vector field. + + Args: + t: Time. + x: Data. + condition: Conditioning vector. + keys_model: Random number generator. + Returns: + Output of the neural vector field. + """ t = self.time_encoder(t) t = Block( dim=self.t_embed_dim, @@ -524,6 +566,16 @@ def create_train_state( optimizer: optax.OptState, input_dim: int, ) -> train_state.TrainState: + """Create the training state. + + Args: + rng: Random number generator. + optimizer: Optimizer. + input_dim: Dimensionality of the input. + + Returns: + Training state. + """ params = self.init( rng, jnp.ones((1, 1)), jnp.ones((1, input_dim)), jnp.ones((1, self.condition_dim)) @@ -534,6 +586,23 @@ def create_train_state( class Rescaling_MLP(BaseRescalingNet): + """Network to learn distributional rescaling factors based on a MLP. + + The input is passed through a block consisting of ``num_layers_per_block`` with size ``hidden_dim``. + If ``condition_dim`` is greater than 0, the conditioning vector is passed through a block of the same size. + Both outputs are concatenated and passed through another block of the same size. + + To ensure non-negativity of the output, the output is exponentiated. + + Args: + hidden_dim: Dimensionality of the hidden layers. + condition_dim: Dimensionality of the conditioning vector. + num_layers_per_block: Number of layers per block. + act_fn: Activation function. + + Returns: + Rescaling factors. + """ hidden_dim: int condition_dim: int num_layers_per_block: int = 3 @@ -582,6 +651,16 @@ def create_train_state( optimizer: optax.OptState, input_dim: int, ) -> train_state.TrainState: + """Create the training state. + + Args: + rng: Random number generator. + optimizer: Optimizer. + input_dim: Dimensionality of the input. + + Returns: + Training state. + """ params = self.init( rng, jnp.ones((1, input_dim)), jnp.ones((1, self.condition_dim)) )["params"] From 8c71deb5229d8929cde52293c83dee6e98146b0b Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 28 Nov 2023 17:20:18 +0100 Subject: [PATCH 034/186] [ci skip] adapt type of scale_cost and cost_fn --- src/ott/neural/solvers/base_solver.py | 13 ++++++++--- src/ott/neural/solvers/genot.py | 31 ++++++++++++--------------- 2 files changed, 24 insertions(+), 20 deletions(-) diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 0ad159a8f..cc9a4c310 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -152,8 +152,10 @@ def _get_sinkhorn_match_fn( self, ot_solver: Any, epsilon: float = 1e-2, - cost_fn: Any = costs.SqEuclidean(), - scale_cost: Any = "mean", + cost_fn: costs.CostFn = costs.SqEuclidean(), + scale_cost: Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", "max_cost", + "median"]] = "mean", tau_a: float = 1.0, tau_b: float = 1.0, *, @@ -187,7 +189,12 @@ def _get_gromov_match_fn( self, ot_solver: Any, cost_fn: Union[Any, Mapping[str, Any]], - scale_cost: Union[Any, Mapping[str, Any]], + scale_cost: Union[Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", + "max_cost", "median"]], + Dict[str, Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", + "max_cost", "median"]]]], tau_a: float, tau_b: float, fused_penalty: float, diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 7b0867e44..a71f34760 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -13,15 +13,7 @@ # limitations under the License. import functools import types -from typing import ( - Any, - Callable, - Dict, - Optional, - Tuple, - Type, - Union, -) +from typing import Any, Callable, Dict, Literal, Optional, Tuple, Type, Union import diffrax import jax @@ -35,15 +27,15 @@ from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, ) from ott.neural.solvers.flows import ( - BaseFlow, - BaseTimeSampler, - ConstantNoiseFlow, - UniformSampler, + BaseFlow, + BaseTimeSampler, + ConstantNoiseFlow, + UniformSampler, ) from ott.solvers import was_solver from ott.solvers.linear import sinkhorn @@ -101,7 +93,12 @@ def __init__( ot_solver: Type[was_solver.WassersteinSolver], epsilon: float, cost_fn: Union[costs.CostFn, Dict[str, costs.CostFn]], - scale_cost: Union[Any, Dict[str, Any]], #TODO: replace `Any` + scale_cost: Union[Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", + "max_cost", "median"]], + Dict[str, Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", + "max_cost", "median"]]]], optimizer: Type[optax.GradientTransformation], flow: Type[BaseFlow] = ConstantNoiseFlow(0.0), time_sampler: Type[BaseTimeSampler] = UniformSampler(), From a25b6c22d6f0ec2feee8facb262287f1c8dd11b2 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 28 Nov 2023 17:53:27 +0100 Subject: [PATCH 035/186] [ci skip] clean code --- src/ott/neural/models/models.py | 2 +- src/ott/neural/solvers/genot.py | 21 ++++++++------------- 2 files changed, 9 insertions(+), 14 deletions(-) diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 0c424c588..c8bcb00ce 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -419,7 +419,7 @@ class Block(nn.Module): @nn.compact def __call__(self, x): - for i in range(self.num_layers): + for _ in range(self.num_layers): x = nn.Dense(self.dim)(x) x = self.act_fn(x) return nn.Dense(self.out_dim)(x) diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index a71f34760..0e8de2705 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -13,7 +13,7 @@ # limitations under the License. import functools import types -from typing import Any, Callable, Dict, Literal, Optional, Tuple, Type, Union +from typing import Any, Callable, Dict, Literal, Optional, Type, Union import diffrax import jax @@ -27,25 +27,20 @@ from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, ) from ott.neural.solvers.flows import ( - BaseFlow, - BaseTimeSampler, - ConstantNoiseFlow, - UniformSampler, + BaseFlow, + BaseTimeSampler, + ConstantNoiseFlow, + UniformSampler, ) from ott.solvers import was_solver from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein -Match_fn_T = Callable[[jax.random.PRNGKeyArray, jnp.array, jnp.array], - Tuple[jnp.array, jnp.array, jnp.array, jnp.array]] -Match_latent_fn_T = Callable[[jax.random.PRNGKeyArray, jnp.array, jnp.array], - Tuple[jnp.array, jnp.array]] - __all__ = ["GENOT"] From 75437db12328dce43a9de1cf94caeeceed10944a Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 28 Nov 2023 18:25:04 +0100 Subject: [PATCH 036/186] [ci skip] fix genot tests --- src/ott/neural/solvers/genot.py | 36 ++++++------- tests/neural/conftest.py | 16 +++--- tests/neural/genot_test.py | 89 ++++++++++++++++++--------------- 3 files changed, 73 insertions(+), 68 deletions(-) diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 0e8de2705..f6e7cd3e3 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -209,15 +209,14 @@ def __call__(self, train_loader, valid_loader) -> None: """Train GENOT.""" batch: Dict[str, jnp.array] = {} for iteration in range(self.iterations): - batch["source_lin"], batch["source_q"], batch["target_lin"], batch[ - "target_q"], batch["condition"] = next(train_loader) + batch = next(train_loader) self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn = jax.random.split( self.rng, 6 ) batch_size = len( batch["source_lin"] - ) if batch["source_lin"] is not None else len(batch["source_q"]) + ) if batch["source_lin"] is not None else len(batch["source_quad"]) n_samples = batch_size * self.k_samples_per_x batch["time"] = self.time_sampler(rng_time, n_samples) batch["noise"] = self.sample_noise(rng_noise, n_samples) @@ -226,34 +225,33 @@ def __call__(self, train_loader, valid_loader) -> None: ) tmat = self.match_fn( - batch["source_lin"], batch["source_q"], batch["target_lin"], - batch["target_q"] + batch["source_lin"], batch["source_quad"], batch["target_lin"], + batch["target_quad"] ) batch["source"] = jnp.concatenate([ batch[el] - for el in ["source_lin", "source_q"] + for el in ["source_lin", "source_quad"] if batch[el] is not None ], axis=1) batch["target"] = jnp.concatenate([ batch[el] - for el in ["target_lin", "target_q"] + for el in ["target_lin", "target_quad"] if batch[el] is not None ], axis=1) batch = { - k: v - for k, v in batch.items() - if k in ["source", "target", "condition", "time", "noise", "latent"] + k: v for k, v in batch.items() if k in + ["source", "target", "source_conditions", "time", "noise", "latent"] } - (batch["source"], batch["condition"] + (batch["source"], batch["source_conditions"] ), (batch["target"],) = self._sample_conditional_indices_from_tmap( rng_resample, tmat, - self.k_samples_per_x, (batch["source"], batch["condition"]), + self.k_samples_per_x, (batch["source"], batch["source_conditions"]), (batch["target"],), source_is_balanced=(self.tau_a == 1.0) ) @@ -268,10 +266,10 @@ def __call__(self, train_loader, valid_loader) -> None: rng_latent_data_match = jax.random.split( rng_latent_data_match, self.k_samples_per_x ) - (batch["source"], batch["condition"] + (batch["source"], batch["source_conditions"] ), (batch["target"],) = jax.vmap(self._resample_data, 0, 0)( rng_latent_data_match, tmats_latent_data, - (batch["source"], batch["condition"]), (batch["target"],) + (batch["source"], batch["source_conditions"]), (batch["target"],) ) batch = { key: @@ -287,7 +285,7 @@ def __call__(self, train_loader, valid_loader) -> None: self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( source=batch["source"], target=batch["target"], - condition=batch["condition"], + condition=batch["source_conditions"], a=tmat.sum(axis=1), b=tmat.sum(axis=0), state_eta=self.state_eta, @@ -299,8 +297,8 @@ def __call__(self, train_loader, valid_loader) -> None: states_to_save = { "state_neural_vector_field": self.state_neural_vector_field } - if self.state_mlp is not None: - states_to_save["state_eta"] = self.state_mlp + if self.state_eta is not None: + states_to_save["state_eta"] = self.state_eta if self.state_xi is not None: states_to_save["state_xi"] = self.state_xi self.checkpoint_manager.save(iteration, states_to_save) @@ -326,7 +324,9 @@ def loss_fn( ) cond_input = jnp.concatenate([ - batch[el] for el in ["source", "condition"] if batch[el] is not None + batch[el] + for el in ["source", "source_conditions"] + if batch[el] is not None ], axis=1) v_t = jax.vmap(apply_fn)( diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index edb635e90..fb70cbb4d 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -58,14 +58,12 @@ def genot_data_loader_linear_conditional(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - conditions_source = rng.normal(size=(100, 4)) - conditions_target = rng.normal(size=(100, 4)) - 1.0 + source_conditions = rng.normal(size=(100, 4)) return OTDataLoader( 16, source_lin=source, target_lin=target, - conditions_source=conditions_source, - conditions_target=conditions_target + source_conditions=source_conditions, ) @@ -84,13 +82,12 @@ def genot_data_loader_quad_conditional(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 1)) + 1.0 - conditions = rng.normal(size=(100, 7)) + source_conditions = rng.normal(size=(100, 7)) return OTDataLoader( 16, source_quad=source, target_quad=target, - source_conditions=conditions, - target_conditions=conditions + source_conditions=source_conditions, ) @@ -119,13 +116,12 @@ def genot_data_loader_fused_conditional(): target_q = rng.normal(size=(100, 1)) + 1.0 source_lin = rng.normal(size=(100, 2)) target_lin = rng.normal(size=(100, 2)) + 1.0 - conditions = rng.normal(size=(100, 7)) + source_conditions = rng.normal(size=(100, 7)) return OTDataLoader( 16, source_lin=source_lin, source_quad=source_q, target_lin=target_lin, target_quad=target_q, - source_conditions=conditions, - target_conditions=conditions + source_conditions=source_conditions, ) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index ed65fc657..620c92138 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -36,9 +36,11 @@ def test_genot_linear_unconditional( ): solver_latent_to_data = None if solver_latent_to_data is None else sinkhorn.Sinkhorn( ) - source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_linear - ) + batch = next(genot_data_loader_linear) + source_lin, source_quad, target_lin, target_quad, source_condition = batch[ + "source_lin"], batch["source_quad"], batch["target_lin"], batch[ + "target_quad"], batch["source_conditions"] + source_dim = source_lin.shape[1] target_dim = target_lin.shape[1] condition_dim = 0 @@ -69,11 +71,13 @@ def test_genot_linear_unconditional( ) genot(genot_data_loader_linear, genot_data_loader_linear) - source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_linear - ) + batch = next(genot_data_loader_linear) + source_lin, source_quad, target_lin, target_quad, source_condition = batch[ + "source_lin"], batch["source_quad"], batch["target_lin"], batch[ + "target_quad"], batch["source_conditions"] + result_forward = genot.transport( - source_lin, condition=condition, forward=True + source_lin, condition=source_condition, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -85,9 +89,11 @@ def test_genot_quad_unconditional( solver_latent_to_data: Optional[str] ): None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_quad - ) + batch = next(genot_data_loader_quad) + source_lin, source_quad, target_lin, target_quad, source_condition = batch[ + "source_lin"], batch["source_quad"], batch["target_lin"], batch[ + "target_quad"], batch["source_conditions"] + source_dim = source_quad.shape[1] target_dim = target_quad.shape[1] condition_dim = 0 @@ -117,7 +123,7 @@ def test_genot_quad_unconditional( genot(genot_data_loader_quad, genot_data_loader_quad) result_forward = genot.transport( - source_quad, condition=condition, forward=True + source_quad, condition=source_condition, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -129,9 +135,11 @@ def test_genot_fused_unconditional( solver_latent_to_data: Optional[str] ): None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_fused - ) + batch = next(genot_data_loader_fused) + batch = source_lin, source_quad, target_lin, target_quad, source_condition = batch[ + "source_lin"], batch["source_quad"], batch["target_lin"], batch[ + "target_quad"], batch["source_conditions"] + source_dim = source_lin.shape[1] + source_quad.shape[1] target_dim = target_lin.shape[1] + target_quad.shape[1] condition_dim = 0 @@ -162,7 +170,7 @@ def test_genot_fused_unconditional( result_forward = genot.transport( jnp.concatenate((source_lin, source_quad), axis=1), - condition=condition, + condition=source_condition, forward=True ) assert isinstance(result_forward, jnp.ndarray) @@ -175,12 +183,12 @@ def test_genot_linear_conditional( k_samples_per_x: int, solver_latent_to_data: Optional[str] ): None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_linear_conditional - ) + batch = next(genot_data_loader_linear_conditional) + source_lin, target_lin, source_condition = batch["source_lin"], batch[ + "target_lin"], batch["source_conditions"] source_dim = source_lin.shape[1] target_dim = target_lin.shape[1] - condition_dim = condition.shape[1] + condition_dim = source_condition.shape[1] neural_vf = NeuralVectorField( output_dim=target_dim, @@ -209,12 +217,8 @@ def test_genot_linear_conditional( genot_data_loader_linear_conditional, genot_data_loader_linear_conditional ) - - source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_linear_conditional - ) result_forward = genot.transport( - source_lin, condition=condition, forward=True + source_lin, condition=source_condition, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -226,12 +230,14 @@ def test_genot_quad_conditional( solver_latent_to_data: Optional[str] ): None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_quad_conditional - ) + batch = next(genot_data_loader_quad_conditional) + source_lin, source_quad, target_lin, target_quad, source_condition = batch[ + "source_lin"], batch["source_quad"], batch["target_lin"], batch[ + "target_quad"], batch["source_conditions"] + source_dim = source_quad.shape[1] target_dim = target_quad.shape[1] - condition_dim = condition.shape[1] + condition_dim = source_condition.shape[1] neural_vf = NeuralVectorField( output_dim=target_dim, condition_dim=source_dim + condition_dim, @@ -260,7 +266,7 @@ def test_genot_quad_conditional( ) result_forward = genot.transport( - source_quad, condition=condition, forward=True + source_quad, condition=source_condition, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -272,12 +278,13 @@ def test_genot_fused_conditional( solver_latent_to_data: Optional[str] ): None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - source_lin, source_quad, target_lin, target_quad, condition = next( - genot_data_loader_fused_conditional - ) + batch = next(genot_data_loader_fused_conditional) + source_lin, source_quad, target_lin, target_quad, source_condition = batch[ + "source_lin"], batch["source_quad"], batch["target_lin"], batch[ + "target_quad"], batch["source_conditions"] source_dim = source_lin.shape[1] + source_quad.shape[1] target_dim = target_lin.shape[1] + target_quad.shape[1] - condition_dim = condition.shape[1] + condition_dim = source_condition.shape[1] neural_vf = NeuralVectorField( output_dim=target_dim, condition_dim=source_dim + condition_dim, @@ -307,7 +314,7 @@ def test_genot_fused_conditional( result_forward = genot.transport( jnp.concatenate((source_lin, source_quad), axis=1), - condition=condition, + condition=source_condition, forward=True ) assert isinstance(result_forward, jnp.ndarray) @@ -323,12 +330,14 @@ def test_genot_linear_learn_rescaling( None if solver_latent_to_data is None else sinkhorn.Sinkhorn() data_loader = genot_data_loader_linear_conditional if conditional else genot_data_loader_linear - source_lin, source_quad, target_lin, target_quad, condition = next( - data_loader - ) + batch = next(data_loader) + source_lin, source_quad, target_lin, target_quad, source_condition = batch[ + "source_lin"], batch["source_quad"], batch["target_lin"], batch[ + "target_quad"], batch["source_conditions"] + source_dim = source_lin.shape[1] target_dim = target_lin.shape[1] - condition_dim = condition.shape[1] if conditional else 0 + condition_dim = source_condition.shape[1] if conditional else 0 neural_vf = NeuralVectorField( output_dim=target_dim, @@ -363,10 +372,10 @@ def test_genot_linear_learn_rescaling( genot(data_loader, data_loader) - result_eta = genot.evaluate_eta(source_lin, condition=condition) + result_eta = genot.evaluate_eta(source_lin, condition=source_condition) assert isinstance(result_eta, jnp.ndarray) assert jnp.sum(jnp.isnan(result_eta)) == 0 - result_xi = genot.evaluate_xi(target_lin, condition=condition) + result_xi = genot.evaluate_xi(target_lin, condition=source_condition) assert isinstance(result_xi, jnp.ndarray) assert jnp.sum(jnp.isnan(result_xi)) == 0 From bfcfcbdbf667d714e899af409db2e9dbd7203e54 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 28 Nov 2023 18:29:27 +0100 Subject: [PATCH 037/186] [ci skip] fix otfm tests --- tests/neural/conftest.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index fb70cbb4d..1aa567d8f 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -10,7 +10,7 @@ def data_loader_gaussian(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - return OTDataLoader(source, target, 16) + return OTDataLoader(16, source_lin=source, target_lin=target) @pytest.fixture(scope="module") @@ -23,10 +23,10 @@ def data_loader_gaussian_conditional(): source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 2)) - 2.0 dl0 = OTDataLoader( - source_0, target_0, 16, source_conditions=np.zeros_like(source_0) * 0.0 + 16, source_lin=source_0, target_lin=target_0, source_conditions=np.zeros_like(source_0) * 0.0 ) dl1 = OTDataLoader( - source_1, target_1, 16, source_conditions=np.ones_like(source_1) * 1.0 + 16, source_lin=source_1, target_lin=target_1, source_conditions=np.ones_like(source_1) * 1.0 ) return ConditionalDataLoader({"0": dl0, "1": dl1}, np.array([0.5, 0.5])) @@ -40,7 +40,7 @@ def data_loader_gaussian_with_conditions(): target = rng.normal(size=(100, 2)) + 1.0 source_conditions = rng.normal(size=(100, 1)) target_conditions = rng.normal(size=(100, 1)) - 1.0 - return OTDataLoader(source, target, 16, source_conditions, target_conditions) + return OTDataLoader(16, source_lin=source, target_lin=target, source_conditions=source_conditions, target_conditions=target_conditions) @pytest.fixture(scope="module") From f27bc22ba84c462d00e5f25548c3b691fde250f7 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 28 Nov 2023 18:34:50 +0100 Subject: [PATCH 038/186] [ci skip] fix otfm tests --- tests/neural/conftest.py | 18 +++++++++++++++--- tests/neural/otfm_test.py | 8 ++++---- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 1aa567d8f..c6d25b128 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -23,10 +23,16 @@ def data_loader_gaussian_conditional(): source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 2)) - 2.0 dl0 = OTDataLoader( - 16, source_lin=source_0, target_lin=target_0, source_conditions=np.zeros_like(source_0) * 0.0 + 16, + source_lin=source_0, + target_lin=target_0, + source_conditions=np.zeros_like(source_0) * 0.0 ) dl1 = OTDataLoader( - 16, source_lin=source_1, target_lin=target_1, source_conditions=np.ones_like(source_1) * 1.0 + 16, + source_lin=source_1, + target_lin=target_1, + source_conditions=np.ones_like(source_1) * 1.0 ) return ConditionalDataLoader({"0": dl0, "1": dl1}, np.array([0.5, 0.5])) @@ -40,7 +46,13 @@ def data_loader_gaussian_with_conditions(): target = rng.normal(size=(100, 2)) + 1.0 source_conditions = rng.normal(size=(100, 1)) target_conditions = rng.normal(size=(100, 1)) - 1.0 - return OTDataLoader(16, source_lin=source, target_lin=target, source_conditions=source_conditions, target_conditions=target_conditions) + return OTDataLoader( + 16, + source_lin=source, + target_lin=target, + source_conditions=source_conditions, + target_conditions=target_conditions + ) @pytest.fixture(scope="module") diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 4346b6be8..04413914d 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -68,7 +68,7 @@ def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): result_backward = fm.transport( batch["target_lin"], - condition=batch["target_conditions"], + condition=batch["source_conditions"], forward=False ) assert isinstance(result_backward, jnp.ndarray) @@ -116,7 +116,7 @@ def test_flow_matching_with_conditions( result_backward = fm.transport( batch["target_lin"], - condition=batch["target_conditions"], + condition=batch["source_conditions"], forward=False ) assert isinstance(result_backward, jnp.ndarray) @@ -161,7 +161,7 @@ def test_flow_matching_conditional( result_backward = fm.transport( batch["target_lin"], - condition=batch["target_conditions"], + condition=batch["source_conditions"], forward=False ) assert isinstance(result_backward, jnp.ndarray) @@ -214,7 +214,7 @@ def test_flow_matching_learn_rescaling( assert jnp.sum(jnp.isnan(result_eta)) == 0 result_xi = fm.evaluate_xi( - batch["target_lin"], condition=batch["target_conditions"] + batch["target_lin"], condition=batch["source_conditions"] ) assert isinstance(result_xi, jnp.ndarray) assert jnp.sum(jnp.isnan(result_xi)) == 0 From 384e8fcc15fadd1feba75301c74a457c1c6f0bf7 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 28 Nov 2023 18:59:24 +0100 Subject: [PATCH 039/186] add scale cost to otfm --- src/ott/neural/solvers/otfm.py | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 2afc94e6d..83b2ceaa6 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -14,7 +14,17 @@ import functools import types from collections import defaultdict -from typing import Any, Callable, Dict, Mapping, Optional, Tuple, Type +from typing import ( + Any, + Callable, + Dict, + Literal, + Mapping, + Optional, + Tuple, + Type, + Union, +) import diffrax import jax @@ -27,13 +37,13 @@ from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, ) from ott.neural.solvers.flows import ( - BaseFlow, - BaseTimeSampler, + BaseFlow, + BaseTimeSampler, ) from ott.solvers import was_solver @@ -56,6 +66,7 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): checkpoint_manager: Checkpoint manager. epsilon: Entropy regularization term of the OT OT problem solved by the `ot_solver`. cost_fn: Cost function for the OT problem solved by the `ot_solver`. + scale_cost: How to scale the cost matrix for the OT problem solved by the `ot_solver`. tau_a: If :math:`<1`, defines how much unbalanced the problem is on the first marginal. tau_b: If :math:`< 1`, defines how much unbalanced the problem is @@ -85,6 +96,9 @@ def __init__( checkpoint_manager: Type[checkpoint.CheckpointManager] = None, epsilon: float = 1e-2, cost_fn: Type[costs.CostFn] = costs.SqEuclidean(), + scale_cost: Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", "max_cost", + "median"]] = "mean", tau_a: float = 1.0, tau_b: float = 1.0, mlp_eta: Callable[[jnp.ndarray], float] = None, @@ -123,6 +137,7 @@ def __init__( self.optimizer = optimizer self.epsilon = epsilon self.cost_fn = cost_fn + self.scale_cost = scale_cost self.callback_fn = callback_fn self.checkpoint_manager = checkpoint_manager self.rng = rng From ef204e69a11725b1e2ce060e1ebf3a500a13d504 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 29 Nov 2023 10:37:33 +0100 Subject: [PATCH 040/186] incorporate feedback partially --- src/ott/datasets.py | 22 ++++++----------- src/ott/neural/models/models.py | 4 ++-- src/ott/neural/solvers/base_solver.py | 34 +++++++++++++-------------- src/ott/neural/solvers/genot.py | 4 ++-- src/ott/neural/solvers/otfm.py | 28 +++++++++++----------- tests/geometry/scaling_cost_test.py | 4 ++-- tests/neural/genot_test.py | 21 +++++++---------- tests/neural/otfm_test.py | 6 ++--- 8 files changed, 56 insertions(+), 67 deletions(-) diff --git a/src/ott/datasets.py b/src/ott/datasets.py index 07bd87fb9..9ddc0435a 100644 --- a/src/ott/datasets.py +++ b/src/ott/datasets.py @@ -51,13 +51,13 @@ class GaussianMixture: rectangle batch_size: batch size of the samples - init_rng: initial PRNG key + rng: initial PRNG key scale: scale of the Gaussian means std: the standard deviation of the individual Gaussian samples """ name: Name_t batch_size: int - init_rng: jnp.ndarray + rng: jnp.ndarray scale: float = 5.0 std: float = 0.5 @@ -96,7 +96,7 @@ def __iter__(self) -> Iterator[jnp.array]: return self._create_sample_generators() def _create_sample_generators(self) -> Iterator[jnp.array]: - rng = self.init_rng + rng = self.rng while True: rng1, rng2, rng = jax.random.split(rng, 3) means = jax.random.choice(rng1, self.centers, (self.batch_size,)) @@ -128,26 +128,18 @@ def create_gaussian_mixture_samplers( rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) train_dataset = Dataset( source_iter=iter( - GaussianMixture( - name_source, batch_size=train_batch_size, init_rng=rng1 - ) + GaussianMixture(name_source, batch_size=train_batch_size, rng=rng1) ), target_iter=iter( - GaussianMixture( - name_target, batch_size=train_batch_size, init_rng=rng2 - ) + GaussianMixture(name_target, batch_size=train_batch_size, rng=rng2) ) ) valid_dataset = Dataset( source_iter=iter( - GaussianMixture( - name_source, batch_size=valid_batch_size, init_rng=rng3 - ) + GaussianMixture(name_source, batch_size=valid_batch_size, rng=rng3) ), target_iter=iter( - GaussianMixture( - name_target, batch_size=valid_batch_size, init_rng=rng4 - ) + GaussianMixture(name_target, batch_size=valid_batch_size, rng=rng4) ) ) dim_data = 2 diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index c8bcb00ce..f2ef76162 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -35,7 +35,7 @@ from ott.problems.linear import linear_problem __all__ = [ - "ICNN", "MLP", "MetaInitializer", "NeuralVectorField", "Rescaling_MLP" + "ICNN", "MLP", "MetaInitializer", "NeuralVectorField", "RescalingMLP" ] @@ -585,7 +585,7 @@ def create_train_state( ) -class Rescaling_MLP(BaseRescalingNet): +class RescalingMLP(BaseRescalingNet): """Network to learn distributional rescaling factors based on a MLP. The input is passed through a block consisting of ``num_layers_per_block`` with size ``hidden_dim``. diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index cc9a4c310..ce20d09c5 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -200,26 +200,26 @@ def _get_gromov_match_fn( fused_penalty: float, ) -> Callable: if isinstance(cost_fn, Mapping): - assert "x_cost_fn" in cost_fn - assert "y_cost_fn" in cost_fn - x_cost_fn = cost_fn["x_cost_fn"] - y_cost_fn = cost_fn["y_cost_fn"] + assert "cost_fn_xx" in cost_fn + assert "cost_fn_yy" in cost_fn + cost_fn_xx = cost_fn["cost_fn_xx"] + cost_fn_yy = cost_fn["cost_fn_yy"] if fused_penalty > 0: - assert "xy_cost_fn" in x_cost_fn - xy_cost_fn = cost_fn["xy_cost_fn"] + assert "cost_fn_xy" in cost_fn_xx + cost_fn_xy = cost_fn["cost_fn_xy"] else: - x_cost_fn = y_cost_fn = xy_cost_fn = cost_fn + cost_fn_xx = cost_fn_yy = cost_fn_xy = cost_fn if isinstance(scale_cost, Mapping): - assert "x_scale_cost" in scale_cost - assert "y_scale_cost" in scale_cost - x_scale_cost = scale_cost["x_scale_cost"] - y_scale_cost = scale_cost["y_scale_cost"] + assert "scale_cost_xx" in scale_cost + assert "scale_cost_yy" in scale_cost + scale_cost_xx = scale_cost["scale_cost_xx"] + scale_cost_yy = scale_cost["scale_cost_yy"] if fused_penalty > 0: - assert "xy_scale_cost" in scale_cost - xy_scale_cost = cost_fn["xy_scale_cost"] + assert "scale_cost_xy" in scale_cost + scale_cost_xy = cost_fn["scale_cost_xy"] else: - x_scale_cost = y_scale_cost = xy_scale_cost = scale_cost + scale_cost_xx = scale_cost_yy = scale_cost_xy = scale_cost def match_pairs( x_lin: Optional[jnp.ndarray], @@ -228,14 +228,14 @@ def match_pairs( y_quad: Tuple[jnp.ndarray, jnp.ndarray], ) -> Tuple[jnp.array, jnp.array]: geom_xx = pointcloud.PointCloud( - x=x_quad, y=x_quad, cost_fn=x_cost_fn, scale_cost=x_scale_cost + x=x_quad, y=x_quad, cost_fn=cost_fn_xx, scale_cost=scale_cost_xx ) geom_yy = pointcloud.PointCloud( - x=y_quad, y=y_quad, cost_fn=y_cost_fn, scale_cost=y_scale_cost + x=y_quad, y=y_quad, cost_fn=cost_fn_yy, scale_cost=scale_cost_yy ) if fused_penalty > 0: geom_xy = pointcloud.PointCloud( - x=x_lin, y=y_lin, cost_fn=xy_cost_fn, scale_cost=xy_scale_cost + x=x_lin, y=y_lin, cost_fn=cost_fn_xy, scale_cost=scale_cost_xy ) else: geom_xy = None diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index f6e7cd3e3..72adce17e 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -56,8 +56,8 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): valid_freq: Frequency of validation. ot_solver: OT solver to match samples from the source and the target distribution. epsilon: Entropy regularization term of the OT problem solved by `ot_solver`. - cost_fn: Cost function for the OT problem solved by the `ot_solver`. In the linear case, this is always expected to be of type `str`. If the problem is of quadratic type and `cost_fn` is a string, the `cost_fn` is used for all terms, i.e. both quadratic terms and, if applicable, the linear temr. If of type :class:`dict`, the keys are expected to be `x_cost_fn`, `y_cost_fn`, and if applicable, `xy_cost_fn`. - scale_cost: How to scale the cost matrix for the OT problem solved by the `ot_solver`. In the linear case, this is always expected to be not a :class:`dict`. If the problem is of quadratic type and `scale_cost` is a string, the `scale_cost` argument is used for all terms, i.e. both quadratic terms and, if applicable, the linear temr. If of type :class:`dict`, the keys are expected to be `x_scale_cost`, `y_scale_cost`, and if applicable, `xy_scale_cost`. + cost_fn: Cost function for the OT problem solved by the `ot_solver`. In the linear case, this is always expected to be of type `str`. If the problem is of quadratic type and `cost_fn` is a string, the `cost_fn` is used for all terms, i.e. both quadratic terms and, if applicable, the linear temr. If of type :class:`dict`, the keys are expected to be `cost_fn_xx`, `cost_fn_yy`, and if applicable, `cost_fn_xy`. + scale_cost: How to scale the cost matrix for the OT problem solved by the `ot_solver`. In the linear case, this is always expected to be not a :class:`dict`. If the problem is of quadratic type and `scale_cost` is a string, the `scale_cost` argument is used for all terms, i.e. both quadratic terms and, if applicable, the linear temr. If of type :class:`dict`, the keys are expected to be `scale_cost_xx`, `scale_cost_yy`, and if applicable, `scale_cost_xy`. optimizer: Optimizer for `neural_vector_field`. flow: Flow between latent distribution and target distribution. time_sampler: Sampler for the time. diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 83b2ceaa6..b69d7978e 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -15,15 +15,15 @@ import types from collections import defaultdict from typing import ( - Any, - Callable, - Dict, - Literal, - Mapping, - Optional, - Tuple, - Type, - Union, + Any, + Callable, + Dict, + Literal, + Mapping, + Optional, + Tuple, + Type, + Union, ) import diffrax @@ -37,13 +37,13 @@ from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, ) from ott.neural.solvers.flows import ( - BaseFlow, - BaseTimeSampler, + BaseFlow, + BaseTimeSampler, ) from ott.solvers import was_solver diff --git a/tests/geometry/scaling_cost_test.py b/tests/geometry/scaling_cost_test.py index ce3f616ce..9f4ad1d57 100644 --- a/tests/geometry/scaling_cost_test.py +++ b/tests/geometry/scaling_cost_test.py @@ -188,7 +188,7 @@ def apply_sinkhorn(cost1, cost2, scale_cost): np.testing.assert_allclose(1.0, geom.cost_matrix.max(), rtol=1e-4) @pytest.mark.parametrize("batch_size", [5, 12]) - def test_max_scale_cost_low_rank_with_batch(self, batch_size: int): + def test_mascale_cost_xx_low_rank_with_batch(self, batch_size: int): """Test max_cost options for low rank with batch_size fixed.""" geom0 = low_rank.LRCGeometry( @@ -199,7 +199,7 @@ def test_max_scale_cost_low_rank_with_batch(self, batch_size: int): geom0.inv_scale_cost, 1.0 / jnp.max(self.cost_lr), rtol=1e-4 ) - def test_max_scale_cost_low_rank_large_array(self): + def test_mascale_cost_xx_low_rank_large_array(self): """Test max_cost options for large matrices.""" _, *rngs = jax.random.split(self.rng, 3) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 620c92138..5c7c1d431 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -18,7 +18,7 @@ import pytest from ott.geometry import costs -from ott.neural.models.models import NeuralVectorField, Rescaling_MLP +from ott.neural.models.models import NeuralVectorField, RescalingMLP from ott.neural.solvers.flows import OffsetUniformSampler, UniformSampler from ott.neural.solvers.genot import GENOT from ott.solvers.linear import sinkhorn @@ -90,9 +90,8 @@ def test_genot_quad_unconditional( ): None if solver_latent_to_data is None else sinkhorn.Sinkhorn() batch = next(genot_data_loader_quad) - source_lin, source_quad, target_lin, target_quad, source_condition = batch[ - "source_lin"], batch["source_quad"], batch["target_lin"], batch[ - "target_quad"], batch["source_conditions"] + source_quad, target_quad, source_condition = batch["source_quad"], batch[ + "target_quad"], batch["source_conditions"] source_dim = source_quad.shape[1] target_dim = target_quad.shape[1] @@ -231,9 +230,8 @@ def test_genot_quad_conditional( ): None if solver_latent_to_data is None else sinkhorn.Sinkhorn() batch = next(genot_data_loader_quad_conditional) - source_lin, source_quad, target_lin, target_quad, source_condition = batch[ - "source_lin"], batch["source_quad"], batch["target_lin"], batch[ - "target_quad"], batch["source_conditions"] + source_quad, target_quad, source_condition = batch["source_quad"], batch[ + "target_quad"], batch["source_conditions"] source_dim = source_quad.shape[1] target_dim = target_quad.shape[1] @@ -331,9 +329,8 @@ def test_genot_linear_learn_rescaling( data_loader = genot_data_loader_linear_conditional if conditional else genot_data_loader_linear batch = next(data_loader) - source_lin, source_quad, target_lin, target_quad, source_condition = batch[ - "source_lin"], batch["source_quad"], batch["target_lin"], batch[ - "target_quad"], batch["source_conditions"] + source_lin, target_lin, source_condition = batch["source_lin"], batch[ + "target_lin"], batch["source_conditions"] source_dim = source_lin.shape[1] target_dim = target_lin.shape[1] @@ -349,8 +346,8 @@ def test_genot_linear_learn_rescaling( optimizer = optax.adam(learning_rate=1e-3) tau_a = 0.9 tau_b = 0.2 - mlp_eta = Rescaling_MLP(hidden_dim=4, condition_dim=condition_dim) - mlp_xi = Rescaling_MLP(hidden_dim=4, condition_dim=condition_dim) + mlp_eta = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + mlp_xi = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) genot = GENOT( neural_vf, input_dim=source_dim, diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 04413914d..d8deb1102 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -17,7 +17,7 @@ import optax import pytest -from ott.neural.models.models import NeuralVectorField, Rescaling_MLP +from ott.neural.models.models import NeuralVectorField, RescalingMLP from ott.neural.solvers.flows import ( BaseFlow, BrownianNoiseFlow, @@ -188,8 +188,8 @@ def test_flow_matching_learn_rescaling( tau_a = 0.9 tau_b = 0.2 - mlp_eta = Rescaling_MLP(hidden_dim=4, condition_dim=condition_dim) - mlp_xi = Rescaling_MLP(hidden_dim=4, condition_dim=condition_dim) + mlp_eta = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + mlp_xi = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) fm = OTFlowMatching( neural_vf, input_dim=source_dim, From 2b1ab921258ba63bb473ab397cef1f481314e7a1 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 29 Nov 2023 11:18:50 +0100 Subject: [PATCH 041/186] resolve circular import errors --- src/ott/neural/__init__.py | 2 +- src/ott/neural/models/__init__.py | 2 +- src/ott/neural/{ => models}/base_models.py | 0 src/ott/neural/{ => models}/layers.py | 0 src/ott/neural/{ => models}/losses.py | 0 src/ott/neural/{ => models}/models.py | 3 ++- tests/neural/losses_test.py | 2 +- tests/neural/map_estimator_test.py | 2 +- 8 files changed, 6 insertions(+), 5 deletions(-) rename src/ott/neural/{ => models}/base_models.py (100%) rename src/ott/neural/{ => models}/layers.py (100%) rename src/ott/neural/{ => models}/losses.py (100%) rename src/ott/neural/{ => models}/models.py (99%) diff --git a/src/ott/neural/__init__.py b/src/ott/neural/__init__.py index aa1ca23fa..16f90c799 100644 --- a/src/ott/neural/__init__.py +++ b/src/ott/neural/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import layers, losses, models, solvers +from . import models, solvers, data diff --git a/src/ott/neural/models/__init__.py b/src/ott/neural/models/__init__.py index d2a583f34..5e6590cd1 100644 --- a/src/ott/neural/models/__init__.py +++ b/src/ott/neural/models/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import base_models, conjugate_solvers, layers, models +from . import base_models, models, losses, layers diff --git a/src/ott/neural/base_models.py b/src/ott/neural/models/base_models.py similarity index 100% rename from src/ott/neural/base_models.py rename to src/ott/neural/models/base_models.py diff --git a/src/ott/neural/layers.py b/src/ott/neural/models/layers.py similarity index 100% rename from src/ott/neural/layers.py rename to src/ott/neural/models/layers.py diff --git a/src/ott/neural/losses.py b/src/ott/neural/models/losses.py similarity index 100% rename from src/ott/neural/losses.py rename to src/ott/neural/models/losses.py diff --git a/src/ott/neural/models.py b/src/ott/neural/models/models.py similarity index 99% rename from src/ott/neural/models.py rename to src/ott/neural/models/models.py index 4087853dd..15fdcfcbc 100644 --- a/src/ott/neural/models.py +++ b/src/ott/neural/models/models.py @@ -26,9 +26,10 @@ from ott.geometry import geometry from ott.initializers.linear import initializers as lin_init from ott.math import matrix_square_root -from ott.neural import layers +from ott.neural.models import layers from ott.neural.solvers import neuraldual from ott.problems.linear import linear_problem +from ott.neural.models.base_models import BaseNeuralVectorField, BaseRescalingNet __all__ = [ "ICNN", "MLP", "MetaInitializer", "NeuralVectorField", "RescalingMLP" diff --git a/tests/neural/losses_test.py b/tests/neural/losses_test.py index a432f2dc5..4569b04d1 100644 --- a/tests/neural/losses_test.py +++ b/tests/neural/losses_test.py @@ -18,7 +18,7 @@ import pytest from ott.geometry import costs -from ott.neural import losses, models +from ott.neural.models import losses, models @pytest.mark.fast() diff --git a/tests/neural/map_estimator_test.py b/tests/neural/map_estimator_test.py index e0ec0b56b..96f9a9797 100644 --- a/tests/neural/map_estimator_test.py +++ b/tests/neural/map_estimator_test.py @@ -18,7 +18,7 @@ from ott import datasets from ott.geometry import pointcloud -from ott.neural import losses, models +from ott.neural.models import losses, models from ott.neural.solvers import map_estimator from ott.tools import sinkhorn_divergence From e1be6ca6c0c6d07490f976f92b75e8ebefd7b11f Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 29 Nov 2023 11:23:48 +0100 Subject: [PATCH 042/186] resolve a few pre-commit errors --- src/ott/neural/__init__.py | 2 +- src/ott/neural/models/__init__.py | 2 +- src/ott/neural/models/models.py | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/ott/neural/__init__.py b/src/ott/neural/__init__.py index 16f90c799..326fae432 100644 --- a/src/ott/neural/__init__.py +++ b/src/ott/neural/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import models, solvers, data +from . import data, models, solvers diff --git a/src/ott/neural/models/__init__.py b/src/ott/neural/models/__init__.py index 5e6590cd1..1e374d236 100644 --- a/src/ott/neural/models/__init__.py +++ b/src/ott/neural/models/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import base_models, models, losses, layers +from . import base_models, layers, losses, models diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 15fdcfcbc..5c7b3f30e 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -27,9 +27,12 @@ from ott.initializers.linear import initializers as lin_init from ott.math import matrix_square_root from ott.neural.models import layers +from ott.neural.models.base_models import ( + BaseNeuralVectorField, + BaseRescalingNet, +) from ott.neural.solvers import neuraldual from ott.problems.linear import linear_problem -from ott.neural.models.base_models import BaseNeuralVectorField, BaseRescalingNet __all__ = [ "ICNN", "MLP", "MetaInitializer", "NeuralVectorField", "RescalingMLP" From a307bf8791b479d3843cbe80dda71a99d8a4f1dd Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 29 Nov 2023 11:42:40 +0100 Subject: [PATCH 043/186] resolve pre-commit errors --- src/ott/neural/data/dataloaders.py | 6 ++- src/ott/neural/models/models.py | 25 +++++++++--- src/ott/neural/solvers/base_solver.py | 11 +++-- src/ott/neural/solvers/flows.py | 40 ++++++++++++++---- src/ott/neural/solvers/genot.py | 59 +++++++++++++++++++-------- src/ott/neural/solvers/otfm.py | 32 ++++++++++----- tests/neural/genot_test.py | 10 ++--- 7 files changed, 132 insertions(+), 51 deletions(-) diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index 938dabb96..4fe8a9a8c 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -103,10 +103,12 @@ def __next__(self) -> Mapping[str, np.ndarray]: class ConditionalDataLoader: """Data loader for OT problems with conditions. - This data loader wraps several data loaders and samples from them according to their conditions. + This data loader wraps several data loaders and samples from them according + to their conditions. Args: - dataloaders: Dictionary of data loaders with keys corresponding to conditions. + dataloaders: Dictionary of data loaders with keys corresponding to + conditions. p: Probability of sampling from each data loader. seed: Random seed. diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 5c7b3f30e..9b15cb803 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -427,8 +427,11 @@ class NeuralVectorField(BaseNeuralVectorField): """Parameterized neural vector field. Each of the input, condition, and time embeddings are passed through a block - consisting of ``num_layers_per_block`` layers of dimension ``latent_embed_dim``, ``condition_embed_dim``, and ``time_embed_dim``, respectively. - The output of each block is concatenated and passed through a final block of dimension ``joint_hidden_dim``. + consisting of ``num_layers_per_block`` layers of dimension + ``latent_embed_dim``, ``condition_embed_dim``, and ``time_embed_dim``, + respectively. + The output of each block is concatenated and passed through a final block of + dimension ``joint_hidden_dim``. Args: output_dim: Dimensionality of the neural vector field. @@ -586,9 +589,12 @@ def create_train_state( class RescalingMLP(BaseRescalingNet): """Network to learn distributional rescaling factors based on a MLP. - The input is passed through a block consisting of ``num_layers_per_block`` with size ``hidden_dim``. - If ``condition_dim`` is greater than 0, the conditioning vector is passed through a block of the same size. - Both outputs are concatenated and passed through another block of the same size. + The input is passed through a block consisting of ``num_layers_per_block`` + with size ``hidden_dim``. + If ``condition_dim`` is greater than 0, the conditioning vector is passed + through a block of the same size. + Both outputs are concatenated and passed through another block of the same + size. To ensure non-negativity of the output, the output is exponentiated. @@ -610,6 +616,15 @@ class RescalingMLP(BaseRescalingNet): def __call__( self, x: jnp.ndarray, condition: Optional[jnp.ndarray] ) -> jnp.ndarray: # noqa: D102 + """Forward pass through the rescaling network. + + Args: + x: Data. + condition: Condition. + + Returns: + Estimated rescaling factors. + """ x = Block( dim=self.hidden_dim, out_dim=self.hidden_dim, diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index ce20d09c5..5c291afaa 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -439,7 +439,10 @@ def step_fn( else: new_state_xi = xi_predictions = loss_b = None - return new_state_eta, new_state_xi, eta_predictions, xi_predictions, loss_a, loss_b + return ( + new_state_eta, new_state_xi, eta_predictions, xi_predictions, loss_a, + loss_b + ) return step_fn @@ -449,7 +452,8 @@ def evaluate_eta( """Evaluate the left learnt rescaling factor. Args: - source: Samples from the source distribution to evaluate rescaling function on. + source: Samples from the source distribution to evaluate rescaling + function on. condition: Condition belonging to the samples in the source distribution. Returns: @@ -467,7 +471,8 @@ def evaluate_xi( """Evaluate the right learnt rescaling factor. Args: - target: Samples from the target distribution to evaluate the rescaling function on. + target: Samples from the target distribution to evaluate the rescaling + function on. condition: Condition belonging to the samples in the target distribution. Returns: diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py index b02981fc9..b61ff08d1 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/solvers/flows.py @@ -34,7 +34,10 @@ def __init__(self, sigma: float) -> None: @abc.abstractmethod def compute_mu_t(self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray): - """Compute the mean of the probablitiy path between :math:`x` and :math:`y` at time :math:`t`. + """Compute the mean of the probablitiy path. + + Compute the mean of the probablitiy path between :math:`x` and :math:`y` + at time :math:`t`. Args: t: Time :math:`t`. @@ -56,7 +59,10 @@ def compute_sigma_t(self, t: jnp.ndarray): def compute_ut( self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray ) -> jnp.ndarray: - """Evaluate the conditional vector field defined between :math:`x_0` and :math:`x_1` at time :math:`t`. + """Evaluate the conditional vector field. + + Evaluate the conditional vector field defined between :math:`x_0` and + :math:`x_1` at time :math:`t`. Args: t: Time :math:`t`. @@ -69,7 +75,10 @@ def compute_xt( self, noise: jnp.ndarray, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray ) -> jnp.ndarray: - """Sample from the probability path between :math:`x_0` and :math:`x_1` at time :math:`t`. + """Sample from the probability path. + + Sample from the probability path between :math:`x_0` and :math:`x_1` at + time :math:`t`. Args: noise: Noise sampled from a standard normal distribution. @@ -78,7 +87,8 @@ def compute_xt( x_1: Sample from the target distribution. Returns: - Samples from the probability path between :math:`x_0` and :math:`x_1` at time :math:`t`. + Samples from the probability path between :math:`x_0` and :math:`x_1` + at time :math:`t`. """ mu_t = self.compute_mu_t(t, x_0, x_1) sigma_t = self.compute_sigma_t(t) @@ -91,7 +101,10 @@ class StraightFlow(BaseFlow, abc.ABC): def compute_mu_t( self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray ) -> jnp.ndarray: - """Compute the mean of the probablitiy path between :math:`x` and :math:`y` at time :math:`t`. + """Compute the mean of the probablitiy path. + + Compute the mean of the probablitiy path between :math:`x` and :math:`y` + at time :math:`t`. Args: t: Time :math:`t`. @@ -103,7 +116,10 @@ def compute_mu_t( def compute_ut( self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray ) -> jnp.ndarray: - """Evaluate the conditional vector field defined between :math:`x_0` and :math:`x_1` at time :math:`t`. + """Evaluate the conditional vector field. + + Evaluate the conditional vector field defined between :math:`x_0` and + :math:`x_1` at time :math:`t`. Args: t: Time :math:`t`. @@ -132,7 +148,12 @@ def compute_sigma_t(self, t: jnp.ndarray): class BrownianNoiseFlow(StraightFlow): - r"""Sampler for sampling noise implicitly defined by a Schroedinger Bridge problem with parameter `\sigma` such that :math:`\sigma_t = \sigma * \sqrt(t * (1-t))`.""" + r"""Brownian Bridge Flow. + + Sampler for sampling noise implicitly defined by a Schroedinger Bridge + problem with parameter `\sigma` such that + :math:`\sigma_t = \sigma * \sqrt(t * (1-t))`. + """ def compute_sigma_t(self, t: jnp.ndarray): """Compute the standard deviation of the probablity path at time :math:`t`. @@ -196,7 +217,10 @@ def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: class OffsetUniformSampler(BaseTimeSampler): - """Sample :math:`t` from a uniform distribution :math:`[low, high]` with offset `offset`. + """Sample the time :math:`t`. + + Sample :math:`t` from a uniform distribution :math:`[low, high]` with + offset `offset`. Args: offset: Offset of the uniform distribution. diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 72adce17e..2eb81be98 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -54,24 +54,44 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): cond_dim: Dimension of the conditioning variable. iterations: Number of iterations. valid_freq: Frequency of validation. - ot_solver: OT solver to match samples from the source and the target distribution. - epsilon: Entropy regularization term of the OT problem solved by `ot_solver`. - cost_fn: Cost function for the OT problem solved by the `ot_solver`. In the linear case, this is always expected to be of type `str`. If the problem is of quadratic type and `cost_fn` is a string, the `cost_fn` is used for all terms, i.e. both quadratic terms and, if applicable, the linear temr. If of type :class:`dict`, the keys are expected to be `cost_fn_xx`, `cost_fn_yy`, and if applicable, `cost_fn_xy`. - scale_cost: How to scale the cost matrix for the OT problem solved by the `ot_solver`. In the linear case, this is always expected to be not a :class:`dict`. If the problem is of quadratic type and `scale_cost` is a string, the `scale_cost` argument is used for all terms, i.e. both quadratic terms and, if applicable, the linear temr. If of type :class:`dict`, the keys are expected to be `scale_cost_xx`, `scale_cost_yy`, and if applicable, `scale_cost_xy`. + ot_solver: OT solver to match samples from the source and the target + distribution. + epsilon: Entropy regularization term of the OT problem solved by + `ot_solver`. + cost_fn: Cost function for the OT problem solved by the `ot_solver`. + In the linear case, this is always expected to be of type `str`. + If the problem is of quadratic type and `cost_fn` is a string, + the `cost_fn` is used for all terms, i.e. both quadratic terms and, + if applicable, the linear temr. If of type :class:`dict`, the keys + are expected to be `cost_fn_xx`, `cost_fn_yy`, and if applicable, + `cost_fn_xy`. + scale_cost: How to scale the cost matrix for the OT problem solved by + the `ot_solver`. In the linear case, this is always expected to be + not a :class:`dict`. If the problem is of quadratic type and + `scale_cost` is a string, the `scale_cost` argument is used for all + terms, i.e. both quadratic terms and, if applicable, the linear temr. + If of type :class:`dict`, the keys are expected to be `scale_cost_xx`, + `scale_cost_yy`, and if applicable, `scale_cost_xy`. optimizer: Optimizer for `neural_vector_field`. flow: Flow between latent distribution and target distribution. time_sampler: Sampler for the time. checkpoint_manager: Checkpoint manager. - k_samples_per_x: Number of samples drawn from the conditional distribution of an input sample, see algorithm TODO. - solver_latent_to_data: Linear OT solver to match the latent distribution with the conditional distribution. Only applicable if `k_samples_per_x` is larger than :math:`1`. #TODO: adapt - kwargs_solver_latent_to_data: Keyword arguments for `solver_latent_to_data`. #TODO: adapt - fused_penalty: Fused penalty of the linear/fused term in the Fused Gromov-Wasserstein problem. + k_samples_per_x: Number of samples drawn from the conditional distribution + of an input sample, see algorithm TODO. + solver_latent_to_data: Linear OT solver to match the latent distribution + with the conditional distribution. + kwargs_solver_latent_to_data: Keyword arguments for `solver_latent_to_data`. + #TODO: adapt + fused_penalty: Fused penalty of the linear/fused term in the Fused + Gromov-Wasserstein problem. tau_a: If :math:`<1`, defines how much unbalanced the problem is on the first marginal. tau_b: If :math:`< 1`, defines how much unbalanced the problem is on the second marginal. - mlp_eta: Neural network to learn the left rescaling function. If `None`, the left rescaling factor is not learnt. - mlp_xi: Neural network to learn the right rescaling function. If `None`, the right rescaling factor is not learnt. + mlp_eta: Neural network to learn the left rescaling function. If `None`, + the left rescaling factor is not learnt. + mlp_xi: Neural network to learn the right rescaling function. If `None`, + the right rescaling factor is not learnt. unbalanced_kwargs: Keyword arguments for the unbalancedness solver. callback_fn: Callback function. rng: Random number generator. @@ -107,7 +127,7 @@ def __init__( tau_b: float = 1.0, mlp_eta: Callable[[jnp.ndarray], float] = None, mlp_xi: Callable[[jnp.ndarray], float] = None, - unbalanced_kwargs: Dict[str, Any] = {}, + unbalanced_kwargs: Dict[str, Any] = types.MappingProxyType({}), callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, rng: random.PRNGKeyArray = random.PRNGKey(0), @@ -133,8 +153,9 @@ def __init__( ot_solver, gromov_wasserstein.GromovWasserstein ) and epsilon is not None: raise ValueError( - "If `ot_solver` is `GromovWasserstein`, `epsilon` must be `None`. This check is performed " - "to ensure that in the (fused) Gromov case the `epsilon` parameter is passed via the `ot_solver`." + "If `ot_solver` is `GromovWasserstein`, `epsilon` must be `None`. " + + "This check is performed to ensure that in the (fused) Gromov case " + + "the `epsilon` parameter is passed via the `ot_solver`." ) self.rng = rng @@ -356,8 +377,11 @@ def transport( ) -> Union[jnp.array, diffrax.Solution, Optional[jnp.ndarray]]: """Transport data with the learnt plan. - This method pushes-forward the `source` to its conditional distribution by solving the neural ODE parameterized by the :attr:`~ott.neural.solvers.GENOTg.neural_vector_field` from - :attr:`~ott.neural.flows.BaseTimeSampler.low` to :attr:`~ott.neural.flows.BaseTimeSampler.high`. + This method pushes-forward the `source` to its conditional distribution by + solving the neural ODE parameterized by the + :attr:`~ott.neural.solvers.GENOTg.neural_vector_field` from + :attr:`~ott.neural.flows.BaseTimeSampler.low` to + :attr:`~ott.neural.flows.BaseTimeSampler.high`. Args: source: Data to transport. @@ -367,7 +391,8 @@ def transport( diffeqsolve_kwargs: Keyword arguments for the ODE solver. Returns: - The push-forward or pull-back distribution defined by the learnt transport plan. + The push-forward or pull-back distribution defined by the learnt + transport plan. """ if not forward: @@ -411,7 +436,7 @@ def _valid_step(self, valid_loader, iter) -> None: @property def learn_rescaling(self) -> bool: - """Whether to learn at least one rescaling factor of the marginal distributions.""" + """Whether to learn at least one rescaling factor.""" return self.mlp_eta is not None or self.mlp_xi is not None def save(self, path: str) -> None: diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index b69d7978e..57720f139 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -51,7 +51,10 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): - """Flow matching as introduced in :cite:`lipman:22`, with extension to OT-FM (:cite`tong:23`, :cite:`pooladian:23`). + """(Optimal transport) flow matching class. + + Flow matching as introduced in :cite:`lipman:22`, with extension to OT-FM + (:cite`tong:23`, :cite:`pooladian:23`). Args: neural_vector_field: Neural vector field parameterized by a neural network. @@ -59,20 +62,26 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): cond_dim: Dimension of the conditioning variable. iterations: Number of iterations. valid_freq: Frequency of validation. - ot_solver: OT solver to match samples from the source and the target distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. If `None`, no matching will be performed as proposed in :cite:`lipman:22`. + ot_solver: OT solver to match samples from the source and the target + distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. + If `None`, no matching will be performed as proposed in :cite:`lipman:22`. flow: Flow between source and target distribution. time_sampler: Sampler for the time. optimizer: Optimizer for `neural_vector_field`. checkpoint_manager: Checkpoint manager. - epsilon: Entropy regularization term of the OT OT problem solved by the `ot_solver`. + epsilon: Entropy regularization term of the OT OT problem solved by the + `ot_solver`. cost_fn: Cost function for the OT problem solved by the `ot_solver`. - scale_cost: How to scale the cost matrix for the OT problem solved by the `ot_solver`. + scale_cost: How to scale the cost matrix for the OT problem solved by the + `ot_solver`. tau_a: If :math:`<1`, defines how much unbalanced the problem is on the first marginal. tau_b: If :math:`< 1`, defines how much unbalanced the problem is on the second marginal. - mlp_eta: Neural network to learn the left rescaling function as suggested in :cite:`TODO`. If `None`, the left rescaling factor is not learnt. - mlp_xi: Neural network to learn the right rescaling function as suggested in :cite:`TODO`. If `None`, the right rescaling factor is not learnt. + mlp_eta: Neural network to learn the left rescaling function as suggested + in :cite:`TODO`. If `None`, the left rescaling factor is not learnt. + mlp_xi: Neural network to learn the right rescaling function as suggested + in :cite:`TODO`. If `None`, the right rescaling factor is not learnt. unbalanced_kwargs: Keyword arguments for the unbalancedness solver. callback_fn: Callback function. num_eval_samples: Number of samples to evaluate on during evaluation. @@ -271,8 +280,10 @@ def transport( ) -> diffrax.Solution: """Transport data with the learnt map. - This method solves the neural ODE parameterized by the :attr:`~ott.neural.solvers.OTFlowMatching.neural_vector_field` from - :attr:`~ott.neural.flows.BaseTimeSampler.low` to :attr:`~ott.neural.flows.BaseTimeSampler.high` if `forward` is `True`, + This method solves the neural ODE parameterized by the + :attr:`~ott.neural.solvers.OTFlowMatching.neural_vector_field` from + :attr:`~ott.neural.flows.BaseTimeSampler.low` to + :attr:`~ott.neural.flows.BaseTimeSampler.high` if `forward` is `True`, else the other way round. Args: @@ -282,7 +293,8 @@ def transport( diffeqsolve_kwargs: Keyword arguments for the ODE solver. Returns: - The push-forward or pull-back distribution defined by the learnt transport plan. + The push-forward or pull-back distribution defined by the learnt + transport plan. """ diffeqsolve_kwargs = dict(diffeqsolve_kwargs) @@ -320,7 +332,7 @@ def _valid_step(self, valid_loader, iter) -> None: @property def learn_rescaling(self) -> bool: - """Whether to learn at least one rescaling factor of the marginal distributions.""" + """Whether to learn at least one rescaling factor.""" return self.mlp_eta is not None or self.mlp_xi is not None def save(self, path: str) -> None: diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 5c7c1d431..d7db29817 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -37,9 +37,8 @@ def test_genot_linear_unconditional( solver_latent_to_data = None if solver_latent_to_data is None else sinkhorn.Sinkhorn( ) batch = next(genot_data_loader_linear) - source_lin, source_quad, target_lin, target_quad, source_condition = batch[ - "source_lin"], batch["source_quad"], batch["target_lin"], batch[ - "target_quad"], batch["source_conditions"] + source_lin, target_lin, source_condition = batch[ + "source_lin"], batch["target_lin"], batch["source_conditions"] source_dim = source_lin.shape[1] target_dim = target_lin.shape[1] @@ -72,9 +71,8 @@ def test_genot_linear_unconditional( genot(genot_data_loader_linear, genot_data_loader_linear) batch = next(genot_data_loader_linear) - source_lin, source_quad, target_lin, target_quad, source_condition = batch[ - "source_lin"], batch["source_quad"], batch["target_lin"], batch[ - "target_quad"], batch["source_conditions"] + source_lin, target_lin, source_condition = batch["source_lin"], batch[ + "target_lin"], batch["source_conditions"] result_forward = genot.transport( source_lin, condition=source_condition, forward=True From ffec70c58765e7e75377f8a220a07b5e22b2aa13 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 29 Nov 2023 13:35:53 +0100 Subject: [PATCH 044/186] resolve pre-commit errors --- src/ott/neural/solvers/base_solver.py | 2 +- src/ott/neural/solvers/genot.py | 26 +++++++++++++++++--------- src/ott/neural/solvers/otfm.py | 21 ++++++++++++++------- tests/neural/genot_test.py | 14 +++++++++----- tests/neural/otfm_test.py | 5 ++++- 5 files changed, 45 insertions(+), 23 deletions(-) diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 5c291afaa..6a9ee84ea 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -152,7 +152,7 @@ def _get_sinkhorn_match_fn( self, ot_solver: Any, epsilon: float = 1e-2, - cost_fn: costs.CostFn = costs.SqEuclidean(), + cost_fn: Optional[costs.CostFn] = None, scale_cost: Union[bool, int, float, Literal["mean", "max_norm", "max_bound", "max_cost", "median"]] = "mean", diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 2eb81be98..ae5953de4 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -24,6 +24,7 @@ from jax import random from orbax import checkpoint +from ott import utils from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( @@ -130,7 +131,7 @@ def __init__( unbalanced_kwargs: Dict[str, Any] = types.MappingProxyType({}), callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, - rng: random.PRNGKeyArray = random.PRNGKey(0), + rng: Optional[jnp.ndarray] = None, ) -> None: rng, rng_unbalanced = random.split(rng) BaseNeuralSolver.__init__( @@ -158,7 +159,7 @@ def __init__( "the `epsilon` parameter is passed via the `ot_solver`." ) - self.rng = rng + self.rng = utils.default_prng_key(rng) self.neural_vector_field = neural_vector_field self.state_neural_vector_field: Optional[TrainState] = None self.flow = flow @@ -198,8 +199,10 @@ def setup(self) -> None: kwargs Keyword arguments for the setup function """ - self.state_neural_vector_field = self.neural_vector_field.create_train_state( - self.rng, self.optimizer, self.output_dim + self.state_neural_vector_field = ( + self.neural_vector_field.create_train_state( + self.rng, self.optimizer, self.output_dim + ) ) self.step_fn = self._get_step_fn() if self.solver_latent_to_data is not None: @@ -232,9 +235,10 @@ def __call__(self, train_loader, valid_loader) -> None: for iteration in range(self.iterations): batch = next(train_loader) - self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn = jax.random.split( - self.rng, 6 - ) + ( + self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, + rng_step_fn + ) = jax.random.split(self.rng, 6) batch_size = len( batch["source_lin"] ) if batch["source_lin"] is not None else len(batch["source_quad"]) @@ -303,7 +307,10 @@ def __call__(self, train_loader, valid_loader) -> None: rng_step_fn, self.state_neural_vector_field, batch ) if self.learn_rescaling: - self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( + ( + self.state_eta, self.state_xi, eta_predictions, xi_predictions, + loss_a, loss_b + ) = self.unbalancedness_step_fn( source=batch["source"], target=batch["target"], condition=batch["source_conditions"], @@ -371,7 +378,7 @@ def transport( self, source: jnp.ndarray, condition: Optional[jnp.ndarray], - rng: random.PRNGKeyArray = random.PRNGKey(0), + rng: Optional[jnp.ndarray] = None, forward: bool = True, diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}), ) -> Union[jnp.array, diffrax.Solution, Optional[jnp.ndarray]]: @@ -395,6 +402,7 @@ def transport( transport plan. """ + rng = utils.default_prng_key(rng) if not forward: raise NotImplementedError diffeqsolve_kwargs = dict(diffeqsolve_kwargs) diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 57720f139..378d4f108 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -34,6 +34,7 @@ from jax import random from orbax import checkpoint +from ott import utils from ott.geometry import costs from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( @@ -104,7 +105,7 @@ def __init__( optimizer: Type[optax.GradientTransformation], checkpoint_manager: Type[checkpoint.CheckpointManager] = None, epsilon: float = 1e-2, - cost_fn: Type[costs.CostFn] = costs.SqEuclidean(), + cost_fn: Optional[Type[costs.CostFn]] = None, scale_cost: Union[bool, int, float, Literal["mean", "max_norm", "max_bound", "max_cost", "median"]] = "mean", @@ -112,14 +113,15 @@ def __init__( tau_b: float = 1.0, mlp_eta: Callable[[jnp.ndarray], float] = None, mlp_xi: Callable[[jnp.ndarray], float] = None, - unbalanced_kwargs: Dict[str, Any] = {}, + unbalanced_kwargs: Dict[str, Any] = types.MappingProxyType({}), callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, logging_freq: int = 100, valid_freq: int = 5000, num_eval_samples: int = 1000, - rng: random.PRNGKeyArray = random.PRNGKey(0), + rng: Optional[jnp.ndarray] = None, ) -> None: + rng = utils.default_prng_key(rng) rng, rng_unbalanced = random.split(rng) BaseNeuralSolver.__init__( self, iterations=iterations, valid_freq=valid_freq @@ -158,8 +160,10 @@ def __init__( def setup(self) -> None: """Setup :class:`OTFlowMatching`.""" - self.state_neural_vector_field = self.neural_vector_field.create_train_state( - self.rng, self.optimizer, self.input_dim + self.state_neural_vector_field = ( + self.neural_vector_field.create_train_state( + self.rng, self.optimizer, self.input_dim + ) ) self.step_fn = self._get_step_fn() @@ -250,7 +254,10 @@ def __call__(self, train_loader, valid_loader) -> None: self._training_logs["loss"].append(curr_loss / self.logging_freq) curr_loss = 0.0 if self.learn_rescaling: - self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b = self.unbalancedness_step_fn( + ( + self.state_eta, self.state_xi, eta_predictions, xi_predictions, + loss_a, loss_b + ) = self.unbalancedness_step_fn( source=batch["source_lin"], target=batch["target_lin"], condition=batch["source_conditions"], @@ -293,7 +300,7 @@ def transport( diffeqsolve_kwargs: Keyword arguments for the ODE solver. Returns: - The push-forward or pull-back distribution defined by the learnt + The push-forward or pull-back distribution defined by the learnt transport plan. """ diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index d7db29817..794b5d44e 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -34,11 +34,12 @@ def test_genot_linear_unconditional( self, genot_data_loader_linear: Iterator, k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - solver_latent_to_data = None if solver_latent_to_data is None else sinkhorn.Sinkhorn( + solver_latent_to_data = ( + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() ) batch = next(genot_data_loader_linear) - source_lin, target_lin, source_condition = batch[ - "source_lin"], batch["target_lin"], batch["source_conditions"] + source_lin, target_lin, source_condition = batch["source_lin"], batch[ + "target_lin"], batch["source_conditions"] source_dim = source_lin.shape[1] target_dim = target_lin.shape[1] @@ -133,7 +134,7 @@ def test_genot_fused_unconditional( ): None if solver_latent_to_data is None else sinkhorn.Sinkhorn() batch = next(genot_data_loader_fused) - batch = source_lin, source_quad, target_lin, target_quad, source_condition = batch[ + source_lin, source_quad, target_lin, target_quad, source_condition = batch[ "source_lin"], batch["source_quad"], batch["target_lin"], batch[ "target_quad"], batch["source_conditions"] @@ -324,7 +325,10 @@ def test_genot_linear_learn_rescaling( genot_data_loader_linear_conditional: Iterator ): None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - data_loader = genot_data_loader_linear_conditional if conditional else genot_data_loader_linear + data_loader = ( + genot_data_loader_linear_conditional + if conditional else genot_data_loader_linear + ) batch = next(data_loader) source_lin, target_lin, source_condition = batch["source_lin"], batch[ diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index d8deb1102..e77789938 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -172,7 +172,10 @@ def test_flow_matching_learn_rescaling( self, conditional: bool, data_loader_gaussian: Iterator, data_loader_gaussian_conditional: Iterator ): - data_loader = data_loader_gaussian_conditional if conditional else data_loader_gaussian + data_loader = ( + data_loader_gaussian_conditional + if conditional else data_loader_gaussian + ) batch = next(data_loader) source_dim = batch["source_lin"].shape[1] condition_dim = batch["source_conditions"].shape[1] if conditional else 0 From 10d70f24f60c0639c597c06010520b27e7a19c3b Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 29 Nov 2023 13:37:24 +0100 Subject: [PATCH 045/186] fix rng bug --- src/ott/neural/solvers/genot.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index ae5953de4..fbfca23ce 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -133,6 +133,7 @@ def __init__( Any]] = None, rng: Optional[jnp.ndarray] = None, ) -> None: + rng = utils.default_prng_key(rng) rng, rng_unbalanced = random.split(rng) BaseNeuralSolver.__init__( self, iterations=iterations, valid_freq=valid_freq From 9fb308bb29abfeacde673c1debfdf299ade35d58 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 29 Nov 2023 13:45:06 +0100 Subject: [PATCH 046/186] Update pre-commit --- .pre-commit-config.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 396cca399..d54c42330 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,24 +7,24 @@ default_stages: minimum_pre_commit_version: 3.0.0 repos: - repo: https://github.com/google/yapf - rev: v0.40.0 + rev: v0.40.2 hooks: - id: yapf additional_dependencies: [toml] - repo: https://github.com/nbQA-dev/nbQA - rev: 1.7.0 + rev: 1.7.1 hooks: - id: nbqa-pyupgrade args: [--py38-plus] - id: nbqa-black - id: nbqa-isort - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.10.0 + rev: v2.11.0 hooks: - id: pretty-format-yaml args: [--autofix, --indent, '2'] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: detect-private-key - id: check-ast @@ -38,12 +38,12 @@ repos: - id: check-case-conflict - repo: https://github.com/charliermarsh/ruff-pre-commit # Ruff version. - rev: v0.0.285 + rev: v0.1.6 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/rstcheck/rstcheck - rev: v6.1.2 + rev: v6.2.0 hooks: - id: rstcheck additional_dependencies: [tomli] From aa0bdc58a1e2cffbafe412ada21d6ba3285bcb79 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 29 Nov 2023 14:00:14 +0100 Subject: [PATCH 047/186] fix import error --- tests/neural/icnn_test.py | 2 +- tests/neural/meta_initializer_test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/neural/icnn_test.py b/tests/neural/icnn_test.py index c52eac675..4d760557f 100644 --- a/tests/neural/icnn_test.py +++ b/tests/neural/icnn_test.py @@ -16,7 +16,7 @@ import numpy as np import pytest -from ott.neural import models +from ott.neural.models import models @pytest.mark.fast() diff --git a/tests/neural/meta_initializer_test.py b/tests/neural/meta_initializer_test.py index 98aa4f4d0..f978e8206 100644 --- a/tests/neural/meta_initializer_test.py +++ b/tests/neural/meta_initializer_test.py @@ -20,7 +20,7 @@ from ott.geometry import pointcloud from ott.initializers.linear import initializers as linear_init -from ott.neural import models as nn_init +from ott.neural.models import models as nn_init from ott.problems.linear import linear_problem from ott.solvers.linear import sinkhorn From b48dfdc80386100dada0d8953c5a115afeececa8 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 29 Nov 2023 14:25:43 +0100 Subject: [PATCH 048/186] Run linter --- .pre-commit-config.yaml | 40 ++++++++++--------- docs/tutorials/MetaOT.ipynb | 2 + docs/tutorials/Monge_Gap.ipynb | 3 +- docs/tutorials/icnn_inits.ipynb | 1 + docs/tutorials/neural_dual.ipynb | 3 +- docs/tutorials/point_clouds.ipynb | 4 ++ docs/tutorials/soft_sort.ipynb | 7 ++-- .../sparse_monge_displacements.ipynb | 2 + docs/tutorials/tracking_progress.ipynb | 2 + pyproject.toml | 12 +++--- src/ott/math/__init__.py | 7 +--- src/ott/neural/models/base_models.py | 3 +- src/ott/neural/models/layers.py | 3 +- src/ott/neural/models/models.py | 5 ++- src/ott/neural/solvers/base_solver.py | 4 +- src/ott/neural/solvers/genot.py | 16 ++++---- src/ott/neural/solvers/map_estimator.py | 1 + src/ott/neural/solvers/neuraldual.py | 3 +- src/ott/neural/solvers/otfm.py | 27 ++++++------- src/ott/problems/linear/potentials.py | 10 +---- src/ott/solvers/linear/lineax_implicit.py | 5 ++- tests/conftest.py | 6 ++- tests/geometry/costs_test.py | 3 +- tests/geometry/geodesic_test.py | 10 +++-- tests/geometry/graph_test.py | 10 +++-- tests/geometry/low_rank_test.py | 3 +- tests/geometry/pointcloud_test.py | 3 +- tests/geometry/scaling_cost_test.py | 3 +- tests/geometry/subsetting_test.py | 3 +- .../initializers/linear/sinkhorn_init_test.py | 3 +- .../linear/sinkhorn_lr_init_test.py | 3 +- tests/initializers/quadratic/gw_init_test.py | 3 +- tests/math/lse_test.py | 3 +- tests/math/math_utils_test.py | 3 +- tests/math/matrix_square_root_test.py | 3 +- tests/neural/conftest.py | 3 +- tests/neural/genot_test.py | 4 +- tests/neural/icnn_test.py | 3 +- tests/neural/losses_test.py | 3 +- tests/neural/map_estimator_test.py | 3 +- tests/neural/meta_initializer_test.py | 4 +- tests/neural/neuraldual_test.py | 3 +- tests/neural/otfm_test.py | 4 +- tests/problems/linear/potentials_test.py | 6 ++- .../linear/continuous_barycenter_test.py | 3 +- .../linear/discrete_barycenter_test.py | 3 +- tests/solvers/linear/sinkhorn_diff_test.py | 3 +- tests/solvers/linear/sinkhorn_grid_test.py | 3 +- tests/solvers/linear/sinkhorn_lr_test.py | 3 +- tests/solvers/linear/sinkhorn_misc_test.py | 7 +++- tests/solvers/linear/sinkhorn_test.py | 3 +- tests/solvers/linear/univariate_test.py | 3 +- tests/solvers/quadratic/fgw_test.py | 3 +- tests/solvers/quadratic/gw_barycenter_test.py | 3 +- tests/solvers/quadratic/gw_test.py | 3 +- tests/solvers/quadratic/lower_bound_test.py | 3 +- .../gaussian_mixture/fit_gmm_pair_test.py | 3 +- tests/tools/gaussian_mixture/fit_gmm_test.py | 3 +- .../gaussian_mixture_pair_test.py | 3 +- .../gaussian_mixture/gaussian_mixture_test.py | 3 +- tests/tools/gaussian_mixture/gaussian_test.py | 3 +- tests/tools/gaussian_mixture/linalg_test.py | 3 +- .../gaussian_mixture/probabilities_test.py | 3 +- .../tools/gaussian_mixture/scale_tril_test.py | 3 +- tests/tools/k_means_test.py | 3 +- tests/tools/plot_test.py | 1 + tests/tools/segment_sinkhorn_test.py | 3 +- tests/tools/sinkhorn_divergence_test.py | 3 +- tests/tools/soft_sort_test.py | 3 +- 69 files changed, 200 insertions(+), 129 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d54c42330..1f84672bb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,23 +6,6 @@ default_stages: - push minimum_pre_commit_version: 3.0.0 repos: -- repo: https://github.com/google/yapf - rev: v0.40.2 - hooks: - - id: yapf - additional_dependencies: [toml] -- repo: https://github.com/nbQA-dev/nbQA - rev: 1.7.1 - hooks: - - id: nbqa-pyupgrade - args: [--py38-plus] - - id: nbqa-black - - id: nbqa-isort -- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.11.0 - hooks: - - id: pretty-format-yaml - args: [--autofix, --indent, '2'] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: @@ -37,11 +20,32 @@ repos: - id: trailing-whitespace - id: check-case-conflict - repo: https://github.com/charliermarsh/ruff-pre-commit - # Ruff version. rev: v0.1.6 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] +- repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + name: isort +- repo: https://github.com/google/yapf + rev: v0.40.2 + hooks: + - id: yapf + additional_dependencies: [toml] +- repo: https://github.com/nbQA-dev/nbQA + rev: 1.7.1 + hooks: + - id: nbqa-pyupgrade + args: [--py38-plus] + - id: nbqa-black + - id: nbqa-isort +- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.11.0 + hooks: + - id: pretty-format-yaml + args: [--autofix, --indent, '2'] - repo: https://github.com/rstcheck/rstcheck rev: v6.2.0 hooks: diff --git a/docs/tutorials/MetaOT.ipynb b/docs/tutorials/MetaOT.ipynb index 79503617d..9349786a1 100644 --- a/docs/tutorials/MetaOT.ipynb +++ b/docs/tutorials/MetaOT.ipynb @@ -81,6 +81,8 @@ "outputs": [], "source": [ "# Obtain the MNIST dataset and flatten the images into discrete measures.\n", + "\n", + "\n", "def get_mnist_flat(train):\n", " dataset = torchvision.datasets.MNIST(\n", " \"/tmp/mnist/\",\n", diff --git a/docs/tutorials/Monge_Gap.ipynb b/docs/tutorials/Monge_Gap.ipynb index 53bc670dc..2fde4f923 100644 --- a/docs/tutorials/Monge_Gap.ipynb +++ b/docs/tutorials/Monge_Gap.ipynb @@ -31,8 +31,9 @@ "\n", "import jax\n", "import jax.numpy as jnp\n", - "import optax\n", "import sklearn.datasets\n", + "\n", + "import optax\n", "from flax import linen as nn\n", "\n", "from matplotlib import pyplot as plt\n", diff --git a/docs/tutorials/icnn_inits.ipynb b/docs/tutorials/icnn_inits.ipynb index 8d8444507..24ca43f6f 100644 --- a/docs/tutorials/icnn_inits.ipynb +++ b/docs/tutorials/icnn_inits.ipynb @@ -33,6 +33,7 @@ "import jax\n", "import jax.numpy as jnp\n", "import numpy as np\n", + "\n", "import optax\n", "\n", "import matplotlib.pyplot as plt\n", diff --git a/docs/tutorials/neural_dual.ipynb b/docs/tutorials/neural_dual.ipynb index c1d9461d2..3fadb58ca 100644 --- a/docs/tutorials/neural_dual.ipynb +++ b/docs/tutorials/neural_dual.ipynb @@ -49,9 +49,10 @@ "import jax\n", "import jax.numpy as jnp\n", "import numpy as np\n", - "import optax\n", "from torch.utils.data import DataLoader, IterableDataset\n", "\n", + "import optax\n", + "\n", "import matplotlib.pyplot as plt\n", "from IPython.display import clear_output, display\n", "\n", diff --git a/docs/tutorials/point_clouds.ipynb b/docs/tutorials/point_clouds.ipynb index fd20ffc9a..c01b51cfd 100644 --- a/docs/tutorials/point_clouds.ipynb +++ b/docs/tutorials/point_clouds.ipynb @@ -279,6 +279,8 @@ "outputs": [], "source": [ "# Helper function to plot successively the optimal transports\n", + "\n", + "\n", "def plot_ots(ots):\n", " fig = plt.figure(figsize=(8, 5))\n", " plott = ott.tools.plot.Plot(fig=fig)\n", @@ -366973,6 +366975,8 @@ "outputs": [], "source": [ "# Plotting utility\n", + "\n", + "\n", "def plot_map(x, y, z, forward: bool = True):\n", " plt.figure(figsize=(10, 8))\n", " marker_t = \"o\" if forward else \"X\"\n", diff --git a/docs/tutorials/soft_sort.ipynb b/docs/tutorials/soft_sort.ipynb index cf0f751ac..880506731 100644 --- a/docs/tutorials/soft_sort.ipynb +++ b/docs/tutorials/soft_sort.ipynb @@ -37,16 +37,17 @@ "\n", "from tqdm.notebook import tqdm\n", "\n", - "import flax.linen as nn\n", "import jax\n", "import jax.numpy as jnp\n", "import numpy as np\n", - "import optax\n", "import torchvision\n", - "from flax import struct\n", "from scipy import ndimage\n", "from torch.utils import data\n", "\n", + "import flax.linen as nn\n", + "import optax\n", + "from flax import struct\n", + "\n", "import matplotlib.pyplot as plt\n", "\n", "from ott.tools import soft_sort" diff --git a/docs/tutorials/sparse_monge_displacements.ipynb b/docs/tutorials/sparse_monge_displacements.ipynb index a21213703..8b735d9f7 100644 --- a/docs/tutorials/sparse_monge_displacements.ipynb +++ b/docs/tutorials/sparse_monge_displacements.ipynb @@ -114,6 +114,8 @@ "outputs": [], "source": [ "# Plotting utility\n", + "\n", + "\n", "def plot_map(x, y, x_new=None, z=None, ax=None, title=None):\n", " if ax is None:\n", " f, ax = plt.subplots(figsize=(10, 8))\n", diff --git a/docs/tutorials/tracking_progress.ipynb b/docs/tutorials/tracking_progress.ipynb index b8a230da6..cd358252b 100644 --- a/docs/tutorials/tracking_progress.ipynb +++ b/docs/tutorials/tracking_progress.ipynb @@ -373,6 +373,8 @@ "outputs": [], "source": [ "# Samples spiral\n", + "\n", + "\n", "def sample_spiral(\n", " n, min_radius, max_radius, key, min_angle=0, max_angle=10, noise=1.0\n", "):\n", diff --git a/pyproject.toml b/pyproject.toml index 530c55113..1961a5971 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,11 +103,14 @@ include = '\.ipynb$' [tool.isort] profile = "black" +line_length = 80 include_trailing_comma = true multi_line_output = 3 -sections = ["FUTURE", "STDLIB", "THIRDPARTY", "NUMERIC", "PLOTTING", "FIRSTPARTY", "LOCALFOLDER"] -# also contains what we import in notebooks -known_numeric = ["numpy", "scipy", "jax", "flax", "optax", "jaxopt", "torch", "ot", "torchvision", "pandas", "sklearn"] +sections = ["FUTURE", "STDLIB", "THIRDPARTY", "TEST", "NUMERIC", "NEURAL", "PLOTTING", "FIRSTPARTY", "LOCALFOLDER"] +# also contains what we import in notebooks/tests +known_neural = ["flax", "optax", "diffrax", "orbax"] +known_numeric = ["numpy", "scipy", "jax", "flax", "optax", "jaxopt", "torch", "ot", "torchvision", "pandas", "sklearn", "tslearn"] +known_test = ["pytest"] known_plotting = ["IPython", "matplotlib", "mpl_toolkits", "seaborn"] [tool.pytest.ini_options] @@ -286,7 +289,6 @@ ignore = [ line-length = 80 select = [ "D", # flake8-docstrings - "I", # isort "E", # pycodestyle "F", # pyflakes "W", # pycodestyle @@ -302,7 +304,7 @@ select = [ "T20", # flake8-print "RET", # flake8-raise ] -unfixable = ["B", "UP", "C4", "BLE", "T20", "RET"] +unfixable = ["I", "B", "UP", "C4", "BLE", "T20", "RET"] target-version = "py38" [tool.ruff.per-file-ignores] # TODO(michalk8): PO004 - remove `self.initialize` diff --git a/src/ott/math/__init__.py b/src/ott/math/__init__.py index 64bc1c07b..ce2a09a73 100644 --- a/src/ott/math/__init__.py +++ b/src/ott/math/__init__.py @@ -11,9 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import ( - fixed_point_loop, - matrix_square_root, - unbalanced_functions, - utils, -) +from . import fixed_point_loop, matrix_square_root, unbalanced_functions, utils diff --git a/src/ott/neural/models/base_models.py b/src/ott/neural/models/base_models.py index 8b5dc126a..d3ac7526a 100644 --- a/src/ott/neural/models/base_models.py +++ b/src/ott/neural/models/base_models.py @@ -14,9 +14,10 @@ import abc from typing import Optional -import flax.linen as nn import jax.numpy as jnp +import flax.linen as nn + __all__ = ["BaseNeuralVectorField", "BaseRescalingNet"] diff --git a/src/ott/neural/models/layers.py b/src/ott/neural/models/layers.py index 153087141..79e6394bc 100644 --- a/src/ott/neural/models/layers.py +++ b/src/ott/neural/models/layers.py @@ -15,7 +15,8 @@ import jax import jax.numpy as jnp -from flax import linen as nn + +import flax.linen as nn __all__ = ["PositiveDense", "PosDefPotentials"] diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 9b15cb803..c65cbbaf3 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -14,13 +14,14 @@ import functools from typing import Any, Callable, Dict, Optional, Sequence, Tuple -import flax.linen as nn import jax import jax.numpy as jnp +from jax.nn import initializers + +import flax.linen as nn import optax from flax.core import frozen_dict from flax.training import train_state -from jax.nn import initializers from ott import utils from ott.geometry import geometry diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index 6a9ee84ea..bde81e9da 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -18,9 +18,9 @@ import jax import jax.numpy as jnp + import optax from flax.training import train_state -from jax import random from ott.geometry import costs, pointcloud from ott.geometry.pointcloud import PointCloud @@ -89,7 +89,7 @@ def _resample_data( ) -> Tuple[jnp.ndarray, ...]: """Resample a batch according to coupling `tmat`.""" tmat_flattened = tmat.flatten() - indices = random.choice(key, len(tmat_flattened), shape=[tmat.shape[0]]) + indices = jax.random.choice(key, len(tmat_flattened), shape=[tmat.shape[0]]) indices_source = indices // tmat.shape[1] indices_target = indices % tmat.shape[1] return tuple( diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index fbfca23ce..0613ae53c 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -15,13 +15,13 @@ import types from typing import Any, Callable, Dict, Literal, Optional, Type, Union -import diffrax import jax import jax.numpy as jnp + +import diffrax import optax from flax.training import train_state from flax.training.train_state import TrainState -from jax import random from orbax import checkpoint from ott import utils @@ -134,7 +134,7 @@ def __init__( rng: Optional[jnp.ndarray] = None, ) -> None: rng = utils.default_prng_key(rng) - rng, rng_unbalanced = random.split(rng) + rng, rng_unbalanced = jax.random.split(rng) BaseNeuralSolver.__init__( self, iterations=iterations, valid_freq=valid_freq ) @@ -343,7 +343,7 @@ def step_fn( def loss_fn( params: jnp.ndarray, batch: Dict[str, jnp.array], - keys_model: random.PRNGKeyArray + keys_model: jax.random.PRNGKeyArray ): x_t = self.flow.compute_xt( batch["noise"], batch["time"], batch["latent"], batch["target"] @@ -366,7 +366,7 @@ def loss_fn( ) return jnp.mean((v_t - u_t) ** 2) - keys_model = random.split(key, len(batch["noise"])) + keys_model = jax.random.split(key, len(batch["noise"])) grad_fn = jax.value_and_grad(loss_fn, has_aux=False) loss, grads = grad_fn(state_neural_vector_field.params, batch, keys_model) @@ -472,7 +472,9 @@ def training_logs(self) -> Dict[str, Any]: """Logs of the training.""" raise NotImplementedError - def sample_noise(self, key: random.PRNGKey, batch_size: int) -> jnp.ndarray: + def sample_noise( + self, key: jax.random.PRNGKey, batch_size: int + ) -> jnp.ndarray: """Sample noise from a standard-normal distribution. Args: @@ -482,4 +484,4 @@ def sample_noise(self, key: random.PRNGKey, batch_size: int) -> jnp.ndarray: Returns: Samples from the standard normal distribution. """ - return random.normal(key, shape=(batch_size, self.output_dim)) + return jax.random.normal(key, shape=(batch_size, self.output_dim)) diff --git a/src/ott/neural/solvers/map_estimator.py b/src/ott/neural/solvers/map_estimator.py index b97f673b0..7eaffdfc8 100644 --- a/src/ott/neural/solvers/map_estimator.py +++ b/src/ott/neural/solvers/map_estimator.py @@ -26,6 +26,7 @@ import jax import jax.numpy as jnp + import optax from flax.core import frozen_dict from flax.training import train_state diff --git a/src/ott/neural/solvers/neuraldual.py b/src/ott/neural/solvers/neuraldual.py index e78666ec6..a7da8c3e7 100644 --- a/src/ott/neural/solvers/neuraldual.py +++ b/src/ott/neural/solvers/neuraldual.py @@ -27,9 +27,10 @@ import jax import jax.numpy as jnp + +import flax.linen as nn import optax from flax import core, struct -from flax import linen as nn from flax.core import frozen_dict from flax.training import train_state diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index 378d4f108..fb054e30a 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -26,12 +26,12 @@ Union, ) -import diffrax import jax import jax.numpy as jnp + +import diffrax import optax from flax.training import train_state -from jax import random from orbax import checkpoint from ott import utils @@ -42,10 +42,7 @@ ResampleMixin, UnbalancednessMixin, ) -from ott.neural.solvers.flows import ( - BaseFlow, - BaseTimeSampler, -) +from ott.neural.solvers.flows import BaseFlow, BaseTimeSampler from ott.solvers import was_solver __all__ = ["OTFlowMatching"] @@ -122,7 +119,7 @@ def __init__( rng: Optional[jnp.ndarray] = None, ) -> None: rng = utils.default_prng_key(rng) - rng, rng_unbalanced = random.split(rng) + rng, rng_unbalanced = jax.random.split(rng) BaseNeuralSolver.__init__( self, iterations=iterations, valid_freq=valid_freq ) @@ -183,14 +180,14 @@ def _get_step_fn(self) -> Callable: @jax.jit def step_fn( - key: random.PRNGKeyArray, + key: jax.random.PRNGKeyArray, state_neural_vector_field: train_state.TrainState, batch: Dict[str, jnp.ndarray], ) -> Tuple[Any, Any]: def loss_fn( params: jnp.ndarray, t: jnp.ndarray, noise: jnp.ndarray, - batch: Dict[str, jnp.ndarray], keys_model: random.PRNGKeyArray + batch: Dict[str, jnp.ndarray], keys_model: jax.random.PRNGKeyArray ) -> jnp.ndarray: x_t = self.flow.compute_xt( @@ -209,8 +206,8 @@ def loss_fn( return jnp.mean((v_t - u_t) ** 2) batch_size = len(batch["source_lin"]) - key_noise, key_t, key_model = random.split(key, 3) - keys_model = random.split(key_model, batch_size) + key_noise, key_t, key_model = jax.random.split(key, 3) + keys_model = jax.random.split(key_model, batch_size) t = self.time_sampler(key_t, batch_size) noise = self.sample_noise(key_noise, batch_size) grad_fn = jax.value_and_grad(loss_fn) @@ -235,7 +232,7 @@ def __call__(self, train_loader, valid_loader) -> None: curr_loss = 0.0 for iter in range(self.iterations): - rng_resample, rng_step_fn, self.rng = random.split(self.rng, 3) + rng_resample, rng_step_fn, self.rng = jax.random.split(self.rng, 3) batch = next(train_loader) if self.ot_solver is not None: tmat = self.match_fn(batch["source_lin"], batch["target_lin"]) @@ -366,7 +363,9 @@ def training_logs(self) -> Dict[str, Any]: """Logs of the training.""" raise NotImplementedError - def sample_noise(self, key: random.PRNGKey, batch_size: int) -> jnp.ndarray: + def sample_noise( + self, key: jax.random.PRNGKey, batch_size: int + ) -> jnp.ndarray: """Sample noise from a standard-normal distribution. Args: @@ -376,4 +375,4 @@ def sample_noise(self, key: random.PRNGKey, batch_size: int) -> jnp.ndarray: Returns: Samples from the standard normal distribution. """ - return random.normal(key, shape=(batch_size, self.input_dim)) + return jax.random.normal(key, shape=(batch_size, self.input_dim)) diff --git a/src/ott/problems/linear/potentials.py b/src/ott/problems/linear/potentials.py index 7ab226072..a91cf5038 100644 --- a/src/ott/problems/linear/potentials.py +++ b/src/ott/problems/linear/potentials.py @@ -11,15 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import ( - Any, - Callable, - Dict, - Literal, - Optional, - Sequence, - Tuple, -) +from typing import Any, Callable, Dict, Literal, Optional, Sequence, Tuple import jax import jax.numpy as jnp diff --git a/src/ott/solvers/linear/lineax_implicit.py b/src/ott/solvers/linear/lineax_implicit.py index 79b9e7c95..30200b073 100644 --- a/src/ott/solvers/linear/lineax_implicit.py +++ b/src/ott/solvers/linear/lineax_implicit.py @@ -14,11 +14,12 @@ from typing import Any, Callable, Optional, TypeVar import equinox as eqx +import lineax as lx +from jaxtyping import Array, Float, PyTree + import jax import jax.numpy as jnp import jax.tree_util as jtu -import lineax as lx -from jaxtyping import Array, Float, PyTree _T = TypeVar("_T") _FlatPyTree = tuple[list[_T], jtu.PyTreeDef] diff --git a/tests/conftest.py b/tests/conftest.py index bc4570343..da7e6a3dc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,11 +15,13 @@ import itertools from typing import Any, Mapping, Optional, Sequence +from _pytest.python import Metafunc + +import pytest + import jax import jax.experimental import jax.numpy as jnp -import pytest -from _pytest.python import Metafunc def pytest_generate_tests(metafunc: Metafunc) -> None: diff --git a/tests/geometry/costs_test.py b/tests/geometry/costs_test.py index b23e79071..02d9976da 100644 --- a/tests/geometry/costs_test.py +++ b/tests/geometry/costs_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Type +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import costs, pointcloud from ott.solvers import linear diff --git a/tests/geometry/geodesic_test.py b/tests/geometry/geodesic_test.py index 3891ac144..986246dfd 100644 --- a/tests/geometry/geodesic_test.py +++ b/tests/geometry/geodesic_test.py @@ -13,14 +13,16 @@ # limitations under the License. from typing import Optional, Union -import jax -import jax.numpy as jnp import networkx as nx -import numpy as np -import pytest from networkx.algorithms import shortest_paths from networkx.generators import balanced_tree, random_graphs +import pytest + +import jax +import jax.numpy as jnp +import numpy as np + from ott.geometry import geodesic, geometry, graph from ott.problems.linear import linear_problem from ott.solvers.linear import sinkhorn diff --git a/tests/geometry/graph_test.py b/tests/geometry/graph_test.py index 35dde4c4b..9c79d2b42 100644 --- a/tests/geometry/graph_test.py +++ b/tests/geometry/graph_test.py @@ -13,14 +13,16 @@ # limitations under the License. from typing import Literal, Optional, Tuple, Union +import networkx as nx +from networkx.algorithms import shortest_paths +from networkx.generators import balanced_tree, random_graphs + +import pytest + import jax import jax.numpy as jnp -import networkx as nx import numpy as np -import pytest from jax.experimental import sparse -from networkx.algorithms import shortest_paths -from networkx.generators import balanced_tree, random_graphs from ott.geometry import geometry, graph from ott.problems.linear import linear_problem diff --git a/tests/geometry/low_rank_test.py b/tests/geometry/low_rank_test.py index b3cda89cf..3e068f8e2 100644 --- a/tests/geometry/low_rank_test.py +++ b/tests/geometry/low_rank_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Callable, Optional, Tuple, Union +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import costs, geometry, grid, low_rank, pointcloud diff --git a/tests/geometry/pointcloud_test.py b/tests/geometry/pointcloud_test.py index ff32789fe..1a952132f 100644 --- a/tests/geometry/pointcloud_test.py +++ b/tests/geometry/pointcloud_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Union +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import costs, geometry, pointcloud diff --git a/tests/geometry/scaling_cost_test.py b/tests/geometry/scaling_cost_test.py index 9f4ad1d57..e321b8524 100644 --- a/tests/geometry/scaling_cost_test.py +++ b/tests/geometry/scaling_cost_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Optional, Union +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import geometry, low_rank, pointcloud from ott.problems.linear import linear_problem diff --git a/tests/geometry/subsetting_test.py b/tests/geometry/subsetting_test.py index 5d7306682..7298001cc 100644 --- a/tests/geometry/subsetting_test.py +++ b/tests/geometry/subsetting_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Optional, Sequence, Tuple, Type, Union +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import geometry, low_rank, pointcloud diff --git a/tests/initializers/linear/sinkhorn_init_test.py b/tests/initializers/linear/sinkhorn_init_test.py index 7686ddfa9..8cc20f4c0 100644 --- a/tests/initializers/linear/sinkhorn_init_test.py +++ b/tests/initializers/linear/sinkhorn_init_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Literal, Optional +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import geometry, pointcloud from ott.initializers.linear import initializers as linear_init diff --git a/tests/initializers/linear/sinkhorn_lr_init_test.py b/tests/initializers/linear/sinkhorn_lr_init_test.py index e954fec76..0b67d2286 100644 --- a/tests/initializers/linear/sinkhorn_lr_init_test.py +++ b/tests/initializers/linear/sinkhorn_lr_init_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import geometry, pointcloud from ott.initializers.linear import initializers_lr diff --git a/tests/initializers/quadratic/gw_init_test.py b/tests/initializers/quadratic/gw_init_test.py index e680e9c01..8ab6cc4e5 100644 --- a/tests/initializers/quadratic/gw_init_test.py +++ b/tests/initializers/quadratic/gw_init_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import pointcloud from ott.initializers.linear import initializers as lin_init diff --git a/tests/math/lse_test.py b/tests/math/lse_test.py index 36e7eba7f..3ff28eada 100644 --- a/tests/math/lse_test.py +++ b/tests/math/lse_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.math import utils as mu diff --git a/tests/math/math_utils_test.py b/tests/math/math_utils_test.py index 7848bc2a9..3bd4c8114 100644 --- a/tests/math/math_utils_test.py +++ b/tests/math/math_utils_test.py @@ -13,10 +13,11 @@ # limitations under the License. import functools +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.math import utils as mu diff --git a/tests/math/matrix_square_root_test.py b/tests/math/matrix_square_root_test.py index 7c8a1e7d5..3f4aee25b 100644 --- a/tests/math/matrix_square_root_test.py +++ b/tests/math/matrix_square_root_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Any, Callable +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.math import matrix_square_root diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index c6d25b128..74d66dea3 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -1,6 +1,7 @@ -import numpy as np import pytest +import numpy as np + from ott.neural.data.dataloaders import ConditionalDataLoader, OTDataLoader diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 794b5d44e..fddc4fc3c 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -13,9 +13,11 @@ # limitations under the License. from typing import Iterator, Optional +import pytest + import jax.numpy as jnp + import optax -import pytest from ott.geometry import costs from ott.neural.models.models import NeuralVectorField, RescalingMLP diff --git a/tests/neural/icnn_test.py b/tests/neural/icnn_test.py index c52eac675..f710bdcbc 100644 --- a/tests/neural/icnn_test.py +++ b/tests/neural/icnn_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.neural import models diff --git a/tests/neural/losses_test.py b/tests/neural/losses_test.py index 4569b04d1..8e4a2f96c 100644 --- a/tests/neural/losses_test.py +++ b/tests/neural/losses_test.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import costs from ott.neural.models import losses, models diff --git a/tests/neural/map_estimator_test.py b/tests/neural/map_estimator_test.py index 96f9a9797..b5df51170 100644 --- a/tests/neural/map_estimator_test.py +++ b/tests/neural/map_estimator_test.py @@ -13,9 +13,10 @@ # limitations under the License. from typing import Optional -import jax.numpy as jnp import pytest +import jax.numpy as jnp + from ott import datasets from ott.geometry import pointcloud from ott.neural.models import losses, models diff --git a/tests/neural/meta_initializer_test.py b/tests/neural/meta_initializer_test.py index 98aa4f4d0..92f0c0b40 100644 --- a/tests/neural/meta_initializer_test.py +++ b/tests/neural/meta_initializer_test.py @@ -13,9 +13,11 @@ # limitations under the License. from typing import Optional +import pytest + import jax import jax.numpy as jnp -import pytest + from flax import linen as nn from ott.geometry import pointcloud diff --git a/tests/neural/neuraldual_test.py b/tests/neural/neuraldual_test.py index 1b7818163..b31ba9b6a 100644 --- a/tests/neural/neuraldual_test.py +++ b/tests/neural/neuraldual_test.py @@ -13,9 +13,10 @@ # limitations under the License. from typing import Optional, Sequence, Tuple +import pytest + import jax import numpy as np -import pytest from ott import datasets from ott.neural import models diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index e77789938..a57588a43 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -13,9 +13,11 @@ # limitations under the License. from typing import Iterator, Type +import pytest + import jax.numpy as jnp + import optax -import pytest from ott.neural.models.models import NeuralVectorField, RescalingMLP from ott.neural.solvers.flows import ( diff --git a/tests/problems/linear/potentials_test.py b/tests/problems/linear/potentials_test.py index dd5d4bbd6..a13211119 100644 --- a/tests/problems/linear/potentials_test.py +++ b/tests/problems/linear/potentials_test.py @@ -12,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp -import matplotlib.pyplot as plt import numpy as np -import pytest + +import matplotlib.pyplot as plt from ott.geometry import costs, pointcloud from ott.problems.linear import linear_problem, potentials diff --git a/tests/solvers/linear/continuous_barycenter_test.py b/tests/solvers/linear/continuous_barycenter_test.py index 4989cc1db..730b529d3 100644 --- a/tests/solvers/linear/continuous_barycenter_test.py +++ b/tests/solvers/linear/continuous_barycenter_test.py @@ -14,10 +14,11 @@ import functools from typing import Tuple +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import costs, segment from ott.problems.linear import barycenter_problem diff --git a/tests/solvers/linear/discrete_barycenter_test.py b/tests/solvers/linear/discrete_barycenter_test.py index dc90e15c0..56784fb07 100644 --- a/tests/solvers/linear/discrete_barycenter_test.py +++ b/tests/solvers/linear/discrete_barycenter_test.py @@ -11,9 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import jax.numpy as jnp import pytest +import jax.numpy as jnp + from ott.geometry import grid, pointcloud from ott.problems.linear import barycenter_problem as bp from ott.solvers.linear import discrete_barycenter as db diff --git a/tests/solvers/linear/sinkhorn_diff_test.py b/tests/solvers/linear/sinkhorn_diff_test.py index 944534e14..04de4dca9 100644 --- a/tests/solvers/linear/sinkhorn_diff_test.py +++ b/tests/solvers/linear/sinkhorn_diff_test.py @@ -14,10 +14,11 @@ import functools from typing import Callable, List, Optional, Tuple +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import costs, geometry, grid, pointcloud from ott.problems.linear import linear_problem diff --git a/tests/solvers/linear/sinkhorn_grid_test.py b/tests/solvers/linear/sinkhorn_grid_test.py index dd22f63b7..e7c116c8d 100644 --- a/tests/solvers/linear/sinkhorn_grid_test.py +++ b/tests/solvers/linear/sinkhorn_grid_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import grid, pointcloud from ott.problems.linear import linear_problem diff --git a/tests/solvers/linear/sinkhorn_lr_test.py b/tests/solvers/linear/sinkhorn_lr_test.py index 90b149ea8..0ce5a2307 100644 --- a/tests/solvers/linear/sinkhorn_lr_test.py +++ b/tests/solvers/linear/sinkhorn_lr_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Any, Tuple +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import low_rank, pointcloud from ott.problems.linear import linear_problem diff --git a/tests/solvers/linear/sinkhorn_misc_test.py b/tests/solvers/linear/sinkhorn_misc_test.py index e97a34228..d9d6d616c 100644 --- a/tests/solvers/linear/sinkhorn_misc_test.py +++ b/tests/solvers/linear/sinkhorn_misc_test.py @@ -14,16 +14,19 @@ from typing import Optional import chex + +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import costs, geometry, pointcloud from ott.problems.linear import linear_problem from ott.solvers import linear -from ott.solvers.linear import acceleration, sinkhorn +from ott.solvers.linear import acceleration from ott.solvers.linear import implicit_differentiation as implicit_lib +from ott.solvers.linear import sinkhorn class TestSinkhornAnderson: diff --git a/tests/solvers/linear/sinkhorn_test.py b/tests/solvers/linear/sinkhorn_test.py index c7475c4f3..0437a4efa 100644 --- a/tests/solvers/linear/sinkhorn_test.py +++ b/tests/solvers/linear/sinkhorn_test.py @@ -15,10 +15,11 @@ import sys from typing import Optional, Tuple +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott import utils from ott.geometry import costs, epsilon_scheduler, geometry, grid, pointcloud diff --git a/tests/solvers/linear/univariate_test.py b/tests/solvers/linear/univariate_test.py index 166da36bc..a002882fb 100644 --- a/tests/solvers/linear/univariate_test.py +++ b/tests/solvers/linear/univariate_test.py @@ -13,10 +13,11 @@ # limitations under the License. import functools +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest import scipy as sp from ott.geometry import costs, pointcloud diff --git a/tests/solvers/quadratic/fgw_test.py b/tests/solvers/quadratic/fgw_test.py index 10361d088..f998e802e 100644 --- a/tests/solvers/quadratic/fgw_test.py +++ b/tests/solvers/quadratic/fgw_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Literal, Tuple, Union +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import geometry, low_rank, pointcloud from ott.problems.quadratic import quadratic_problem diff --git a/tests/solvers/quadratic/gw_barycenter_test.py b/tests/solvers/quadratic/gw_barycenter_test.py index a157f27e5..eba4e3054 100644 --- a/tests/solvers/quadratic/gw_barycenter_test.py +++ b/tests/solvers/quadratic/gw_barycenter_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Any, Optional, Sequence, Tuple +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import pointcloud from ott.problems.quadratic import gw_barycenter as gwb diff --git a/tests/solvers/quadratic/gw_test.py b/tests/solvers/quadratic/gw_test.py index e7ef7b558..816f7fcd6 100644 --- a/tests/solvers/quadratic/gw_test.py +++ b/tests/solvers/quadratic/gw_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Tuple, Union +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import geometry, low_rank, pointcloud from ott.problems.quadratic import quadratic_problem diff --git a/tests/solvers/quadratic/lower_bound_test.py b/tests/solvers/quadratic/lower_bound_test.py index ad65be477..2e30a1bbe 100644 --- a/tests/solvers/quadratic/lower_bound_test.py +++ b/tests/solvers/quadratic/lower_bound_test.py @@ -15,10 +15,11 @@ import functools from typing import Callable +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import costs, distrib_costs, pointcloud from ott.initializers.linear import initializers diff --git a/tests/tools/gaussian_mixture/fit_gmm_pair_test.py b/tests/tools/gaussian_mixture/fit_gmm_pair_test.py index 75fc3bef5..20fe4ef4a 100644 --- a/tests/tools/gaussian_mixture/fit_gmm_pair_test.py +++ b/tests/tools/gaussian_mixture/fit_gmm_pair_test.py @@ -11,9 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp -import pytest from ott.tools.gaussian_mixture import ( fit_gmm, diff --git a/tests/tools/gaussian_mixture/fit_gmm_test.py b/tests/tools/gaussian_mixture/fit_gmm_test.py index 1cfb4f95e..648e9a287 100644 --- a/tests/tools/gaussian_mixture/fit_gmm_test.py +++ b/tests/tools/gaussian_mixture/fit_gmm_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import jax.test_util -import pytest from ott.tools.gaussian_mixture import fit_gmm, gaussian_mixture diff --git a/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py b/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py index bf2b01699..b11431d8c 100644 --- a/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py +++ b/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.tools.gaussian_mixture import gaussian_mixture, gaussian_mixture_pair diff --git a/tests/tools/gaussian_mixture/gaussian_mixture_test.py b/tests/tools/gaussian_mixture/gaussian_mixture_test.py index fd7675d51..540ebe980 100644 --- a/tests/tools/gaussian_mixture/gaussian_mixture_test.py +++ b/tests/tools/gaussian_mixture/gaussian_mixture_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.tools.gaussian_mixture import gaussian_mixture, linalg diff --git a/tests/tools/gaussian_mixture/gaussian_test.py b/tests/tools/gaussian_mixture/gaussian_test.py index 0eac630e3..23deff00d 100644 --- a/tests/tools/gaussian_mixture/gaussian_test.py +++ b/tests/tools/gaussian_mixture/gaussian_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.tools.gaussian_mixture import gaussian, scale_tril diff --git a/tests/tools/gaussian_mixture/linalg_test.py b/tests/tools/gaussian_mixture/linalg_test.py index 6fedb13ae..4529364dc 100644 --- a/tests/tools/gaussian_mixture/linalg_test.py +++ b/tests/tools/gaussian_mixture/linalg_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.tools.gaussian_mixture import linalg diff --git a/tests/tools/gaussian_mixture/probabilities_test.py b/tests/tools/gaussian_mixture/probabilities_test.py index 5d28a52aa..4fce8186f 100644 --- a/tests/tools/gaussian_mixture/probabilities_test.py +++ b/tests/tools/gaussian_mixture/probabilities_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.tools.gaussian_mixture import probabilities diff --git a/tests/tools/gaussian_mixture/scale_tril_test.py b/tests/tools/gaussian_mixture/scale_tril_test.py index 36643b6d7..f7bbe9293 100644 --- a/tests/tools/gaussian_mixture/scale_tril_test.py +++ b/tests/tools/gaussian_mixture/scale_tril_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.math import matrix_square_root from ott.tools.gaussian_mixture import scale_tril diff --git a/tests/tools/k_means_test.py b/tests/tools/k_means_test.py index 9b504a82d..c00288cec 100644 --- a/tests/tools/k_means_test.py +++ b/tests/tools/k_means_test.py @@ -15,10 +15,11 @@ import sys from typing import Any, Literal, Optional, Tuple, Union +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from sklearn import datasets from sklearn.cluster import KMeans, kmeans_plusplus from sklearn.cluster._k_means_common import _is_same_clustering diff --git a/tests/tools/plot_test.py b/tests/tools/plot_test.py index 8c8b81a1c..1f9f9ba01 100644 --- a/tests/tools/plot_test.py +++ b/tests/tools/plot_test.py @@ -13,6 +13,7 @@ # limitations under the License. import jax + import matplotlib.pyplot as plt import ott diff --git a/tests/tools/segment_sinkhorn_test.py b/tests/tools/segment_sinkhorn_test.py index 6e8a8fb8c..f98c164bf 100644 --- a/tests/tools/segment_sinkhorn_test.py +++ b/tests/tools/segment_sinkhorn_test.py @@ -11,10 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import costs, pointcloud from ott.problems.linear import linear_problem diff --git a/tests/tools/sinkhorn_divergence_test.py b/tests/tools/sinkhorn_divergence_test.py index 0f3e56bfc..e3eab9912 100644 --- a/tests/tools/sinkhorn_divergence_test.py +++ b/tests/tools/sinkhorn_divergence_test.py @@ -13,10 +13,11 @@ # limitations under the License. from typing import Any, Dict, Optional +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.geometry import costs, geometry, pointcloud from ott.solvers import linear diff --git a/tests/tools/soft_sort_test.py b/tests/tools/soft_sort_test.py index 2432a2dee..c84680e9e 100644 --- a/tests/tools/soft_sort_test.py +++ b/tests/tools/soft_sort_test.py @@ -14,10 +14,11 @@ import functools from typing import Tuple +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest from ott.solvers.linear import acceleration from ott.solvers.linear import implicit_differentiation as implicit_lib From 4371e74949e3d252de1839afe10639bf01fcdaf0 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 29 Nov 2023 14:40:42 +0100 Subject: [PATCH 049/186] replace rng jnp.ndarray type by jax.array --- docs/tutorials/GWLRSinkhorn.ipynb | 2 +- src/ott/datasets.py | 2 +- .../initializers/linear/initializers_lr.py | 28 ++++++++-------- src/ott/neural/solvers/base_solver.py | 2 +- src/ott/neural/solvers/flows.py | 6 ++-- src/ott/neural/solvers/neuraldual.py | 4 +-- .../solvers/linear/continuous_barycenter.py | 2 +- .../solvers/quadratic/gromov_wasserstein.py | 4 +-- src/ott/solvers/quadratic/gw_barycenter.py | 2 +- src/ott/tools/gaussian_mixture/fit_gmm.py | 6 ++-- src/ott/tools/gaussian_mixture/gaussian.py | 4 +-- .../gaussian_mixture/gaussian_mixture.py | 4 +-- src/ott/tools/gaussian_mixture/linalg.py | 4 +-- .../tools/gaussian_mixture/probabilities.py | 4 +-- src/ott/tools/gaussian_mixture/scale_tril.py | 2 +- src/ott/tools/k_means.py | 10 +++--- tests/geometry/costs_test.py | 20 ++++++------ tests/geometry/graph_test.py | 10 +++--- tests/geometry/low_rank_test.py | 26 +++++++-------- tests/geometry/pointcloud_test.py | 10 +++--- tests/geometry/scaling_cost_test.py | 2 +- tests/geometry/subsetting_test.py | 8 ++--- .../initializers/linear/sinkhorn_init_test.py | 18 +++++------ .../linear/sinkhorn_lr_init_test.py | 8 ++--- tests/initializers/quadratic/gw_init_test.py | 3 +- tests/math/lse_test.py | 2 +- tests/math/math_utils_test.py | 2 +- tests/math/matrix_square_root_test.py | 6 ++-- tests/neural/icnn_test.py | 4 +-- tests/neural/losses_test.py | 7 ++-- tests/neural/meta_initializer_test.py | 4 +-- tests/problems/linear/potentials_test.py | 14 ++++---- .../linear/continuous_barycenter_test.py | 8 ++--- tests/solvers/linear/sinkhorn_diff_test.py | 24 +++++++------- tests/solvers/linear/sinkhorn_grid_test.py | 8 ++--- tests/solvers/linear/sinkhorn_lr_test.py | 2 +- tests/solvers/linear/sinkhorn_misc_test.py | 10 +++--- tests/solvers/linear/sinkhorn_test.py | 2 +- tests/solvers/linear/univariate_test.py | 4 +-- tests/solvers/quadratic/fgw_test.py | 6 ++-- tests/solvers/quadratic/gw_barycenter_test.py | 6 ++-- tests/solvers/quadratic/gw_test.py | 12 +++---- tests/solvers/quadratic/lower_bound_test.py | 4 +-- .../gaussian_mixture/fit_gmm_pair_test.py | 2 +- tests/tools/gaussian_mixture/fit_gmm_test.py | 2 +- .../gaussian_mixture_pair_test.py | 2 +- .../gaussian_mixture/gaussian_mixture_test.py | 16 +++++----- tests/tools/gaussian_mixture/gaussian_test.py | 18 +++++------ tests/tools/gaussian_mixture/linalg_test.py | 20 ++++++------ .../gaussian_mixture/probabilities_test.py | 4 +-- .../tools/gaussian_mixture/scale_tril_test.py | 12 +++---- tests/tools/k_means_test.py | 32 +++++++++---------- tests/tools/segment_sinkhorn_test.py | 2 +- tests/tools/sinkhorn_divergence_test.py | 4 +-- tests/tools/soft_sort_test.py | 24 +++++++------- 55 files changed, 222 insertions(+), 232 deletions(-) diff --git a/docs/tutorials/GWLRSinkhorn.ipynb b/docs/tutorials/GWLRSinkhorn.ipynb index 590671428..ace06be8f 100644 --- a/docs/tutorials/GWLRSinkhorn.ipynb +++ b/docs/tutorials/GWLRSinkhorn.ipynb @@ -66,7 +66,7 @@ }, "outputs": [], "source": [ - "def create_points(rng: jnp.ndarray, n: int, m: int, d1: int, d2: int):\n", + "def create_points(rng: jax.Array, n: int, m: int, d1: int, d2: int):\n", " rngs = jax.random.split(rng, 5)\n", " x = jax.random.uniform(rngs[0], (n, d1))\n", " y = jax.random.uniform(rngs[1], (m, d2))\n", diff --git a/src/ott/datasets.py b/src/ott/datasets.py index 1946bdcdd..3507c3418 100644 --- a/src/ott/datasets.py +++ b/src/ott/datasets.py @@ -57,7 +57,7 @@ class GaussianMixture: """ name: Name_t batch_size: int - rng: jnp.ndarray + rng: jax.Array scale: float = 5.0 std: float = 0.5 diff --git a/src/ott/initializers/linear/initializers_lr.py b/src/ott/initializers/linear/initializers_lr.py index b1f70d912..a3f615846 100644 --- a/src/ott/initializers/linear/initializers_lr.py +++ b/src/ott/initializers/linear/initializers_lr.py @@ -67,7 +67,7 @@ def __init__(self, rank: int, **kwargs: Any): def init_q( self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, *, init_g: jnp.ndarray, **kwargs: Any, @@ -88,7 +88,7 @@ def init_q( def init_r( self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, *, init_g: jnp.ndarray, **kwargs: Any, @@ -109,7 +109,7 @@ def init_r( def init_g( self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, **kwargs: Any, ) -> jnp.ndarray: """Initialize the low-rank factor :math:`g`. @@ -232,7 +232,7 @@ class RandomInitializer(LRInitializer): def init_q( # noqa: D102 self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, *, init_g: jnp.ndarray, **kwargs: Any, @@ -245,7 +245,7 @@ def init_q( # noqa: D102 def init_r( # noqa: D102 self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, *, init_g: jnp.ndarray, **kwargs: Any, @@ -258,7 +258,7 @@ def init_r( # noqa: D102 def init_g( # noqa: D102 self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, **kwargs: Any, ) -> jnp.ndarray: del kwargs @@ -305,7 +305,7 @@ def _compute_factor( def init_q( # noqa: D102 self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, *, init_g: jnp.ndarray, **kwargs: Any, @@ -316,7 +316,7 @@ def init_q( # noqa: D102 def init_r( # noqa: D102 self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, *, init_g: jnp.ndarray, **kwargs: Any, @@ -327,7 +327,7 @@ def init_r( # noqa: D102 def init_g( # noqa: D102 self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, **kwargs: Any, ) -> jnp.ndarray: del rng, kwargs @@ -376,7 +376,7 @@ def _extract_array(geom: geometry.Geometry, *, first: bool) -> jnp.ndarray: def _compute_factor( self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, *, init_g: jnp.ndarray, which: Literal["q", "r"], @@ -418,7 +418,7 @@ def _compute_factor( def init_q( # noqa: D102 self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, *, init_g: jnp.ndarray, **kwargs: Any, @@ -430,7 +430,7 @@ def init_q( # noqa: D102 def init_r( # noqa: D102 self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, *, init_g: jnp.ndarray, **kwargs: Any, @@ -442,7 +442,7 @@ def init_r( # noqa: D102 def init_g( # noqa: D102 self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, **kwargs: Any, ) -> jnp.ndarray: del rng, kwargs @@ -511,7 +511,7 @@ class State(NamedTuple): # noqa: D106 def _compute_factor( self, ot_prob: Problem_t, - rng: jnp.ndarray, + rng: jax.Array, *, init_g: jnp.ndarray, which: Literal["q", "r"], diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index bde81e9da..fe0ea6f3d 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -258,7 +258,7 @@ class UnbalancednessMixin: def __init__( self, - rng: jnp.ndarray, + rng: jax.Array, source_dim: int, target_dim: int, cond_dim: Optional[int], diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py index b61ff08d1..47be01fc5 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/solvers/flows.py @@ -180,7 +180,7 @@ def __init__(self, low: float, high: float) -> None: self.high = high @abc.abstractmethod - def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + def __call__(self, rng: jax.Array, num_samples: int) -> jnp.ndarray: """Generate `num_samples` samples of the time `math`:t:. Args: @@ -201,7 +201,7 @@ class UniformSampler(BaseTimeSampler): def __init__(self, low: float = 0.0, high: float = 1.0) -> None: super().__init__(low=low, high=high) - def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + def __call__(self, rng: jax.Array, num_samples: int) -> jnp.ndarray: """Generate `num_samples` samples of the time `math`:t:. Args: @@ -234,7 +234,7 @@ def __init__( super().__init__(low=low, high=high) self.offset = offset - def __call__(self, rng: jnp.ndarray, num_samples: int) -> jnp.ndarray: + def __call__(self, rng: jax.Array, num_samples: int) -> jnp.ndarray: """Generate `num_samples` samples of the time `math`:t:. Args: diff --git a/src/ott/neural/solvers/neuraldual.py b/src/ott/neural/solvers/neuraldual.py index a7da8c3e7..0d9e215bb 100644 --- a/src/ott/neural/solvers/neuraldual.py +++ b/src/ott/neural/solvers/neuraldual.py @@ -149,7 +149,7 @@ def potential_gradient_fn( def create_train_state( self, - rng: jnp.ndarray, + rng: jax.Array, optimizer: optax.OptState, input: Union[int, Tuple[int, ...]], **kwargs: Any, @@ -289,7 +289,7 @@ def __init__( def setup( self, - rng: jnp.ndarray, + rng: jax.Array, neural_f: BaseW2NeuralDual, neural_g: BaseW2NeuralDual, dim_data: int, diff --git a/src/ott/solvers/linear/continuous_barycenter.py b/src/ott/solvers/linear/continuous_barycenter.py index b93c14032..e1477e60f 100644 --- a/src/ott/solvers/linear/continuous_barycenter.py +++ b/src/ott/solvers/linear/continuous_barycenter.py @@ -196,7 +196,7 @@ def output_from_state( # noqa: D102 def iterations( solver: FreeWassersteinBarycenter, bar_size: int, bar_prob: barycenter_problem.FreeBarycenterProblem, x_init: jnp.ndarray, - rng: jnp.ndarray + rng: jax.Array ) -> FreeBarycenterState: """Jittable Wasserstein barycenter outer loop.""" diff --git a/src/ott/solvers/quadratic/gromov_wasserstein.py b/src/ott/solvers/quadratic/gromov_wasserstein.py index 862b91999..6180db73f 100644 --- a/src/ott/solvers/quadratic/gromov_wasserstein.py +++ b/src/ott/solvers/quadratic/gromov_wasserstein.py @@ -272,7 +272,7 @@ def init_state( self, prob: quadratic_problem.QuadraticProblem, init: linear_problem.LinearProblem, - rng: jnp.ndarray, + rng: jax.Array, ) -> GWState: """Initialize the state of the Gromov-Wasserstein iterations. @@ -361,7 +361,7 @@ def iterations( solver: GromovWasserstein, prob: quadratic_problem.QuadraticProblem, init: linear_problem.LinearProblem, - rng: jnp.ndarray, + rng: jax.Array, ) -> GWOutput: """Jittable Gromov-Wasserstein outer loop.""" diff --git a/src/ott/solvers/quadratic/gw_barycenter.py b/src/ott/solvers/quadratic/gw_barycenter.py index 8816c5ada..ea14880fe 100644 --- a/src/ott/solvers/quadratic/gw_barycenter.py +++ b/src/ott/solvers/quadratic/gw_barycenter.py @@ -282,7 +282,7 @@ def tree_unflatten( # noqa: D102 @partial(jax.vmap, in_axes=[None, 0, None, 0, None]) def init_transports( - solver, rng: jnp.ndarray, a: jnp.ndarray, b: jnp.ndarray, + solver, rng: jax.Array, a: jnp.ndarray, b: jnp.ndarray, epsilon: Optional[float] ) -> jnp.ndarray: """Initialize random 2D point cloud and solve the linear OT problem. diff --git a/src/ott/tools/gaussian_mixture/fit_gmm.py b/src/ott/tools/gaussian_mixture/fit_gmm.py index 0e3fbc4e8..4c62bded7 100644 --- a/src/ott/tools/gaussian_mixture/fit_gmm.py +++ b/src/ott/tools/gaussian_mixture/fit_gmm.py @@ -195,7 +195,7 @@ def _dist_sq_one_loc(points: jnp.ndarray, loc: jnp.ndarray) -> jnp.ndarray: def _get_locs( - rng: jnp.ndarray, points: jnp.ndarray, n_components: int + rng: jax.Array, points: jnp.ndarray, n_components: int ) -> jnp.ndarray: """Get the initial component means. @@ -229,7 +229,7 @@ def _get_locs( def from_kmeans_plusplus( - rng: jnp.ndarray, + rng: jax.Array, points: jnp.ndarray, point_weights: Optional[jnp.ndarray], n_components: int, @@ -265,7 +265,7 @@ def from_kmeans_plusplus( def initialize( - rng: jnp.ndarray, + rng: jax.Array, points: jnp.ndarray, point_weights: Optional[jnp.ndarray], n_components: int, diff --git a/src/ott/tools/gaussian_mixture/gaussian.py b/src/ott/tools/gaussian_mixture/gaussian.py index 70ac505f2..6e0a8ccb7 100644 --- a/src/ott/tools/gaussian_mixture/gaussian.py +++ b/src/ott/tools/gaussian_mixture/gaussian.py @@ -63,7 +63,7 @@ def from_samples( @classmethod def from_random( cls, - rng: jnp.ndarray, + rng: jax.Array, n_dimensions: int, stdev_mean: float = 0.1, stdev_cov: float = 0.1, @@ -138,7 +138,7 @@ def log_prob( -0.5 * (d * LOG2PI + log_det[None] + jnp.sum(z ** 2., axis=-1)) ) # (?, k) - def sample(self, rng: jnp.ndarray, size: int) -> jnp.ndarray: + def sample(self, rng: jax.Array, size: int) -> jnp.ndarray: """Generate samples from the distribution.""" std_samples_t = jax.random.normal(key=rng, shape=(self.n_dimensions, size)) return self.loc[None] + ( diff --git a/src/ott/tools/gaussian_mixture/gaussian_mixture.py b/src/ott/tools/gaussian_mixture/gaussian_mixture.py index 5d40a870d..313689939 100644 --- a/src/ott/tools/gaussian_mixture/gaussian_mixture.py +++ b/src/ott/tools/gaussian_mixture/gaussian_mixture.py @@ -78,7 +78,7 @@ def __init__( @classmethod def from_random( cls, - rng: jnp.ndarray, + rng: jax.Array, n_components: int, n_dimensions: int, stdev_mean: float = 0.1, @@ -219,7 +219,7 @@ def components(self) -> List[gaussian.Gaussian]: """List of all GMM components.""" return [self.get_component(i) for i in range(self.n_components)] - def sample(self, rng: jnp.ndarray, size: int) -> jnp.ndarray: + def sample(self, rng: jax.Array, size: int) -> jnp.ndarray: """Generate samples from the distribution.""" subrng0, subrng1 = jax.random.split(rng) component = self.component_weight_ob.sample(rng=subrng0, size=size) diff --git a/src/ott/tools/gaussian_mixture/linalg.py b/src/ott/tools/gaussian_mixture/linalg.py index 9c88df0cc..8e71369f3 100644 --- a/src/ott/tools/gaussian_mixture/linalg.py +++ b/src/ott/tools/gaussian_mixture/linalg.py @@ -132,9 +132,7 @@ def invmatvectril( def get_random_orthogonal( - rng: jnp.ndarray, - dim: int, - dtype: Optional[jnp.dtype] = None + rng: jax.Array, dim: int, dtype: Optional[jnp.dtype] = None ) -> jnp.ndarray: """Get a random orthogonal matrix with the specified dimension.""" m = jax.random.normal(key=rng, shape=[dim, dim], dtype=dtype) diff --git a/src/ott/tools/gaussian_mixture/probabilities.py b/src/ott/tools/gaussian_mixture/probabilities.py index 66a90c1a7..6df3bb023 100644 --- a/src/ott/tools/gaussian_mixture/probabilities.py +++ b/src/ott/tools/gaussian_mixture/probabilities.py @@ -35,7 +35,7 @@ def __init__(self, params): @classmethod def from_random( cls, - rng: jnp.ndarray, + rng: jax.Array, n_dimensions: int, stdev: Optional[float] = 0.1, dtype: Optional[jnp.dtype] = None @@ -76,7 +76,7 @@ def probs(self) -> jnp.ndarray: """Get the probabilities.""" return jax.nn.softmax(self.unnormalized_log_probs()) - def sample(self, rng: jnp.ndarray, size: int) -> jnp.ndarray: + def sample(self, rng: jax.Array, size: int) -> jnp.ndarray: """Sample from the distribution.""" return jax.random.categorical( key=rng, logits=self.unnormalized_log_probs(), shape=(size,) diff --git a/src/ott/tools/gaussian_mixture/scale_tril.py b/src/ott/tools/gaussian_mixture/scale_tril.py index 95b812d99..b286cc74e 100644 --- a/src/ott/tools/gaussian_mixture/scale_tril.py +++ b/src/ott/tools/gaussian_mixture/scale_tril.py @@ -44,7 +44,7 @@ def from_points_and_weights( @classmethod def from_random( cls, - rng: jnp.ndarray, + rng: jax.Array, n_dimensions: int, stdev: Optional[float] = 0.1, dtype: jnp.dtype = jnp.float32, diff --git a/src/ott/tools/k_means.py b/src/ott/tools/k_means.py index 9175abe2c..abbe99f34 100644 --- a/src/ott/tools/k_means.py +++ b/src/ott/tools/k_means.py @@ -29,7 +29,7 @@ class KPPState(NamedTuple): # noqa: D101 - rng: jnp.ndarray + rng: jax.Array centroids: jnp.ndarray centroid_dists: jnp.ndarray @@ -109,7 +109,7 @@ def _from_state( def _random_init( - geom: pointcloud.PointCloud, k: int, rng: jnp.ndarray + geom: pointcloud.PointCloud, k: int, rng: jax.Array ) -> jnp.ndarray: ixs = jnp.arange(geom.shape[0]) ixs = jax.random.choice(rng, ixs, shape=(k,), replace=False) @@ -119,11 +119,11 @@ def _random_init( def _k_means_plus_plus( geom: pointcloud.PointCloud, k: int, - rng: jnp.ndarray, + rng: jax.Array, n_local_trials: Optional[int] = None, ) -> jnp.ndarray: - def init_fn(geom: pointcloud.PointCloud, rng: jnp.ndarray) -> KPPState: + def init_fn(geom: pointcloud.PointCloud, rng: jax.Array) -> KPPState: rng, next_rng = jax.random.split(rng, 2) ix = jax.random.choice(rng, jnp.arange(geom.shape[0]), shape=()) centroids = jnp.full((k, geom.cost_rank), jnp.inf).at[0].set(geom.x[ix]) @@ -224,7 +224,7 @@ def _update_centroids( @functools.partial(jax.vmap, in_axes=[0] + [None] * 9) def _k_means( - rng: jnp.ndarray, + rng: jax.Array, geom: pointcloud.PointCloud, k: int, weights: Optional[jnp.ndarray] = None, diff --git a/tests/geometry/costs_test.py b/tests/geometry/costs_test.py index 02d9976da..0a7bead17 100644 --- a/tests/geometry/costs_test.py +++ b/tests/geometry/costs_test.py @@ -36,7 +36,7 @@ def _proj(matrix: jnp.ndarray) -> jnp.ndarray: @pytest.mark.fast() class TestCostFn: - def test_cosine(self, rng: jnp.ndarray): + def test_cosine(self, rng: jax.Array): """Test the cosine cost function.""" x = jnp.array([0, 0]) y = jnp.array([0, 0]) @@ -85,7 +85,7 @@ def test_cosine(self, rng: jnp.ndarray): @pytest.mark.fast() class TestBuresBarycenter: - def test_bures(self, rng: jnp.ndarray): + def test_bures(self, rng: jax.Array): d = 3 r = jnp.array([1.2036, 0.2825, 0.013]) Sigma1 = r * jnp.eye(d) @@ -142,7 +142,7 @@ class TestRegTICost: ) def test_reg_cost_legendre( self, - rng: jnp.ndarray, + rng: jax.Array, scaling_reg: float, cost_fn_t: Type[costs.RegTICost], use_mat: bool, @@ -164,7 +164,7 @@ def test_reg_cost_legendre( @pytest.mark.parametrize("k", [1, 3, 10]) @pytest.mark.parametrize("d", [10, 50]) - def test_elastic_sq_k_overlap(self, rng: jnp.ndarray, k: int, d: int): + def test_elastic_sq_k_overlap(self, rng: jax.Array, k: int, d: int): expected = jax.random.normal(rng, (d,)) cost_fn = costs.ElasticSqKOverlap(k=k, scaling_reg=1e-2) @@ -179,9 +179,7 @@ def test_elastic_sq_k_overlap(self, rng: jnp.ndarray, k: int, d: int): costs.ElasticSqKOverlap(k=3, scaling_reg=17) ] ) - def test_sparse_displacement( - self, rng: jnp.ndarray, cost_fn: costs.RegTICost - ): + def test_sparse_displacement(self, rng: jax.Array, cost_fn: costs.RegTICost): frac_sparse = 0.7 rng1, rng2 = jax.random.split(rng, 2) d = 17 @@ -197,7 +195,7 @@ def test_sparse_displacement( @pytest.mark.parametrize("cost_type_t", [costs.ElasticL1, costs.ElasticSTVS]) def test_stronger_regularization_increases_sparsity( - self, rng: jnp.ndarray, cost_type_t: Type[costs.RegTICost] + self, rng: jax.Array, cost_type_t: Type[costs.RegTICost] ): d, rngs = 17, jax.random.split(rng, 4) x = jax.random.normal(rngs[0], (50, d)) @@ -226,7 +224,7 @@ class TestSoftDTW: @pytest.mark.parametrize("n", [7, 10]) @pytest.mark.parametrize("m", [9, 10]) @pytest.mark.parametrize("gamma", [1e-3, 5]) - def test_soft_dtw(self, rng: jnp.ndarray, n: int, m: int, gamma: float): + def test_soft_dtw(self, rng: jax.Array, n: int, m: int, gamma: float): rng1, rng2 = jax.random.split(rng, 2) t1 = jax.random.normal(rng1, (n,)) t2 = jax.random.normal(rng2, (m,)) @@ -239,7 +237,7 @@ def test_soft_dtw(self, rng: jnp.ndarray, n: int, m: int, gamma: float): @pytest.mark.parametrize(("debiased", "jit"), [(False, True), (True, False)]) def test_soft_dtw_debiased( self, - rng: jnp.ndarray, + rng: jax.Array, debiased: bool, jit: bool, ): @@ -266,7 +264,7 @@ def test_soft_dtw_debiased( @pytest.mark.parametrize(("debiased", "jit"), [(False, False), (True, True)]) @pytest.mark.parametrize("gamma", [1e-2, 1]) def test_soft_dtw_grad( - self, rng: jnp.ndarray, debiased: bool, jit: bool, gamma: float + self, rng: jax.Array, debiased: bool, jit: bool, gamma: float ): rngs = jax.random.split(rng, 4) eps, tol = 1e-3, 1e-5 diff --git a/tests/geometry/graph_test.py b/tests/geometry/graph_test.py index 9c79d2b42..a68179db3 100644 --- a/tests/geometry/graph_test.py +++ b/tests/geometry/graph_test.py @@ -78,7 +78,7 @@ def gt_geometry( class TestGraph: - def test_kernel_is_symmetric_positive_definite(self, rng: jnp.ndarray): + def test_kernel_is_symmetric_positive_definite(self, rng: jax.Array): n, tol = 65, 0.02 x = jax.random.normal(rng, (n,)) geom = graph.Graph.from_graph(random_graph(n), t=1e-3) @@ -115,7 +115,7 @@ def test_automatic_t(self): ) def test_approximates_ground_truth( self, - rng: jnp.ndarray, + rng: jax.Array, numerical_scheme: Literal["backward_euler", "crank_nicolson"], ): eps, n_steps = 1e-5, 20 @@ -209,7 +209,7 @@ def laplacian(G: jnp.ndarray) -> jnp.ndarray: np.testing.assert_allclose(actual, expected, rtol=1e-6, atol=1e-6) @pytest.mark.fast.with_args(jit=[False, True], only_fast=0) - def test_graph_sinkhorn(self, rng: jnp.ndarray, jit: bool): + def test_graph_sinkhorn(self, rng: jax.Array, jit: bool): def callback(geom: geometry.Geometry) -> sinkhorn.SinkhornOutput: solver = sinkhorn.Sinkhorn(lse_mode=False) @@ -252,7 +252,7 @@ def callback(geom: geometry.Geometry) -> sinkhorn.SinkhornOutput: ids=["not-implicit", "implicit"], ) def test_dense_graph_differentiability( - self, rng: jnp.ndarray, implicit_diff: bool + self, rng: jax.Array, implicit_diff: bool ): def callback( @@ -287,7 +287,7 @@ def callback( actual = 2 * jnp.vdot(v_w, grad_w) np.testing.assert_allclose(actual, expected, rtol=1e-4, atol=1e-4) - def test_tolerance_hilbert_metric(self, rng: jnp.ndarray): + def test_tolerance_hilbert_metric(self, rng: jax.Array): n, n_steps, t, tol = 256, 1000, 1e-4, 3e-4 G = random_graph(n, p=0.15) x = jnp.abs(jax.random.normal(rng, (n,))) diff --git a/tests/geometry/low_rank_test.py b/tests/geometry/low_rank_test.py index 3e068f8e2..507042f68 100644 --- a/tests/geometry/low_rank_test.py +++ b/tests/geometry/low_rank_test.py @@ -25,7 +25,7 @@ @pytest.mark.fast() class TestLRGeometry: - def test_apply(self, rng: jnp.ndarray): + def test_apply(self, rng: jax.Array): """Test application of cost to vec or matrix.""" n, m, r = 17, 11, 7 rngs = jax.random.split(rng, 5) @@ -46,7 +46,7 @@ def test_apply(self, rng: jnp.ndarray): @pytest.mark.parametrize("scale_cost", ["mean", "max_cost", "max_bound", 42.]) def test_conversion_pointcloud( - self, rng: jnp.ndarray, scale_cost: Union[str, float] + self, rng: jax.Array, scale_cost: Union[str, float] ): """Test conversion from PointCloud to LRCGeometry.""" n, m, d = 17, 11, 3 @@ -70,7 +70,7 @@ def test_conversion_pointcloud( rtol=1e-4 ) - def test_apply_squared(self, rng: jnp.ndarray): + def test_apply_squared(self, rng: jax.Array): """Test application of squared cost to vec or matrix.""" n, m = 27, 25 rngs = jax.random.split(rng, 5) @@ -95,7 +95,7 @@ def test_apply_squared(self, rng: jnp.ndarray): @pytest.mark.parametrize("bias", [(0, 0), (4, 5)]) @pytest.mark.parametrize("scale_factor", [(1, 1), (2, 3)]) def test_add_lr_geoms( - self, rng: jnp.ndarray, bias: Tuple[float, float], + self, rng: jax.Array, bias: Tuple[float, float], scale_factor: Tuple[float, float] ): """Test application of cost to vec or matrix.""" @@ -134,7 +134,7 @@ def test_add_lr_geoms( @pytest.mark.parametrize(("scale", "scale_cost", "epsilon"), [(0.1, "mean", None), (0.9, "max_cost", 1e-2)]) def test_add_lr_geoms_scale_factor( - self, rng: jnp.ndarray, scale: float, scale_cost: str, + self, rng: jax.Array, scale: float, scale_cost: str, epsilon: Optional[float] ): n, d = 71, 2 @@ -161,8 +161,7 @@ def test_add_lr_geoms_scale_factor( @pytest.mark.parametrize("axis", [0, 1]) @pytest.mark.parametrize("fn", [lambda x: x + 10, lambda x: x * 2]) def test_apply_affine_function_efficient( - self, rng: jnp.ndarray, fn: Callable[[jnp.ndarray], jnp.ndarray], - axis: int + self, rng: jax.Array, fn: Callable[[jnp.ndarray], jnp.ndarray], axis: int ): n, m, d = 21, 13, 3 rngs = jax.random.split(rng, 3) @@ -182,7 +181,7 @@ def test_apply_affine_function_efficient( np.testing.assert_allclose(res_ineff, res_eff, rtol=1e-4, atol=1e-4) @pytest.mark.parametrize("rank", [5, 1000]) - def test_point_cloud_to_lr(self, rng: jnp.ndarray, rank: int): + def test_point_cloud_to_lr(self, rng: jax.Array, rank: int): n, m = 1500, 1000 scale = 2.0 rngs = jax.random.split(rng, 2) @@ -222,7 +221,7 @@ def assert_upper_bound( assert lhs <= rhs @pytest.mark.fast.with_args(rank=[2, 3], tol=[5e-1, 1e-2], only_fast=0) - def test_geometry_to_lr(self, rng: jnp.ndarray, rank: int, tol: float): + def test_geometry_to_lr(self, rng: jax.Array, rank: int, tol: float): rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(370, 3)) y = jax.random.normal(rng2, shape=(460, 3)) @@ -243,8 +242,7 @@ def test_geometry_to_lr(self, rng: jnp.ndarray, rank: int, tol: float): only_fast=1 ) def test_point_cloud_to_lr( - self, rng: jnp.ndarray, batch_size: Optional[int], - scale_cost: Optional[str] + self, rng: jax.Array, batch_size: Optional[int], scale_cost: Optional[str] ): rank, tol = 7, 1e-1 rng1, rng2 = jax.random.split(rng, 2) @@ -268,7 +266,7 @@ def test_point_cloud_to_lr( assert geom_lr.cost_rank == rank self.assert_upper_bound(geom, geom_lr, rank=rank, tol=tol) - def test_to_lrc_geometry_noop(self, rng: jnp.ndarray): + def test_to_lrc_geometry_noop(self, rng: jax.Array): rng1, rng2 = jax.random.split(rng, 2) cost1 = jax.random.normal(rng1, shape=(32, 2)) cost2 = jax.random.normal(rng2, shape=(23, 2)) @@ -290,7 +288,7 @@ def test_apply_transport_from_potentials(self): np.testing.assert_allclose(res, 1.1253539e-07, rtol=1e-6, atol=1e-6) @pytest.mark.limit_memory("190 MB") - def test_large_scale_factorization(self, rng: jnp.ndarray): + def test_large_scale_factorization(self, rng: jax.Array): rank, tol = 4, 1e-2 rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(10_000, 7)) @@ -321,7 +319,7 @@ def test_conversion_grid(self): cost_matrix, cost_matrix_lrc, rtol=1e-5, atol=1e-5 ) - def test_full_to_lrc_geometry(self, rng: jnp.ndarray): + def test_full_to_lrc_geometry(self, rng: jax.Array): rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(13, 7)) y = jax.random.normal(rng2, shape=(29, 7)) diff --git a/tests/geometry/pointcloud_test.py b/tests/geometry/pointcloud_test.py index 1a952132f..cd7cb671c 100644 --- a/tests/geometry/pointcloud_test.py +++ b/tests/geometry/pointcloud_test.py @@ -25,7 +25,7 @@ @pytest.mark.fast() class TestPointCloudApply: - def test_apply_cost_and_kernel(self, rng: jnp.ndarray): + def test_apply_cost_and_kernel(self, rng: jax.Array): """Test consistency of cost/kernel apply to vec.""" n, m, p, b = 5, 8, 10, 7 rngs = jax.random.split(rng, 5) @@ -69,7 +69,7 @@ def test_apply_cost_and_kernel(self, rng: jnp.ndarray): np.testing.assert_allclose(prod0_online, prod0, rtol=1e-03, atol=1e-02) np.testing.assert_allclose(prod1_online, prod1, rtol=1e-03, atol=1e-02) - def test_general_cost_fn(self, rng: jnp.ndarray): + def test_general_cost_fn(self, rng: jax.Array): """Test non-vec cost apply to vec.""" n, m, p, b = 5, 8, 10, 7 rngs = jax.random.split(rng, 5) @@ -98,7 +98,7 @@ def test_correct_shape(self): np.testing.assert_array_equal(pc.shape, (n, m)) @pytest.mark.parametrize("axis", [0, 1]) - def test_apply_cost_without_norm(self, rng: jnp.ndarray, axis: 1): + def test_apply_cost_without_norm(self, rng: jax.Array, axis: 1): rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(17, 3)) y = jax.random.normal(rng2, shape=(12, 3)) @@ -123,7 +123,7 @@ class TestPointCloudCosineConversion: "scale_cost", ["mean", "median", "max_cost", "max_norm", 41] ) def test_cosine_to_sqeucl_conversion( - self, rng: jnp.ndarray, scale_cost: Union[str, float] + self, rng: jax.Array, scale_cost: Union[str, float] ): rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(101, 4)) @@ -156,7 +156,7 @@ def test_cosine_to_sqeucl_conversion( ) @pytest.mark.parametrize("axis", [0, 1]) def test_apply_cost_cosine_to_sqeucl( - self, rng: jnp.ndarray, axis: int, scale_cost: Union[str, float] + self, rng: jax.Array, axis: int, scale_cost: Union[str, float] ): rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, shape=(17, 5)) diff --git a/tests/geometry/scaling_cost_test.py b/tests/geometry/scaling_cost_test.py index e321b8524..6cd5dcaa9 100644 --- a/tests/geometry/scaling_cost_test.py +++ b/tests/geometry/scaling_cost_test.py @@ -27,7 +27,7 @@ class TestScaleCost: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self.dim = 4 self.n = 7 self.m = 9 diff --git a/tests/geometry/subsetting_test.py b/tests/geometry/subsetting_test.py index 7298001cc..ebaa6d4ac 100644 --- a/tests/geometry/subsetting_test.py +++ b/tests/geometry/subsetting_test.py @@ -26,7 +26,7 @@ @pytest.fixture() def pc_masked( - rng: jnp.ndarray + rng: jax.Array ) -> Tuple[pointcloud.PointCloud, pointcloud.PointCloud]: n, m = 20, 30 rng1, rng2 = jax.random.split(rng, 2) @@ -67,7 +67,7 @@ class TestMaskPointCloud: "clazz", [geometry.Geometry, pointcloud.PointCloud, low_rank.LRCGeometry] ) def test_mask( - self, rng: jnp.ndarray, clazz: Type[geometry.Geometry], + self, rng: jax.Array, clazz: Type[geometry.Geometry], src_ixs: Optional[Union[int, Sequence[int]]], tgt_ixs: Optional[Union[int, Sequence[int]]] ): @@ -141,7 +141,7 @@ def test_masked_summary( ) def test_mask_permutation( - self, geom_masked: Tuple[Geom_t, pointcloud.PointCloud], rng: jnp.ndarray + self, geom_masked: Tuple[Geom_t, pointcloud.PointCloud], rng: jax.Array ): rng1, rng2 = jax.random.split(rng) geom, _ = geom_masked @@ -163,7 +163,7 @@ def test_mask_permutation( ) def test_boolean_mask( - self, geom_masked: Tuple[Geom_t, pointcloud.PointCloud], rng: jnp.ndarray + self, geom_masked: Tuple[Geom_t, pointcloud.PointCloud], rng: jax.Array ): rng1, rng2 = jax.random.split(rng) p = jnp.array([0.5, 0.5]) diff --git a/tests/initializers/linear/sinkhorn_init_test.py b/tests/initializers/linear/sinkhorn_init_test.py index 8cc20f4c0..5af512a4a 100644 --- a/tests/initializers/linear/sinkhorn_init_test.py +++ b/tests/initializers/linear/sinkhorn_init_test.py @@ -26,7 +26,7 @@ def create_sorting_problem( - rng: jnp.ndarray, + rng: jax.Array, n: int, epsilon: float = 1e-2, batch_size: Optional[int] = None @@ -56,7 +56,7 @@ def create_sorting_problem( def create_ot_problem( - rng: jnp.ndarray, + rng: jax.Array, n: int, m: int, d: int, @@ -133,9 +133,7 @@ def test_create_initializer(self, init: str): @pytest.mark.parametrize(("vector_min", "lse_mode"), [(True, True), (True, False), (False, True)]) - def test_sorting_init( - self, vector_min: bool, lse_mode: bool, rng: jnp.ndarray - ): + def test_sorting_init(self, vector_min: bool, lse_mode: bool, rng: jax.Array): """Tests sorting dual initializer.""" n = 50 epsilon = 1e-2 @@ -169,7 +167,7 @@ def test_sorting_init( assert sink_out_init.converged assert sink_out_base.n_iters > sink_out_init.n_iters - def test_sorting_init_online(self, rng: jnp.ndarray): + def test_sorting_init_online(self, rng: jax.Array): n = 10 epsilon = 1e-2 @@ -180,7 +178,7 @@ def test_sorting_init_online(self, rng: jnp.ndarray): with pytest.raises(AssertionError, match=r"online"): sort_init.init_dual_a(ot_problem, lse_mode=True) - def test_sorting_init_square_cost(self, rng: jnp.ndarray): + def test_sorting_init_square_cost(self, rng: jax.Array): n, m, d = 10, 15, 1 epsilon = 1e-2 @@ -189,7 +187,7 @@ def test_sorting_init_square_cost(self, rng: jnp.ndarray): with pytest.raises(AssertionError, match=r"square"): sort_init.init_dual_a(ot_problem, lse_mode=True) - def test_default_initializer(self, rng: jnp.ndarray): + def test_default_initializer(self, rng: jax.Array): """Tests default initializer""" n, m, d = 20, 20, 2 epsilon = 1e-2 @@ -207,7 +205,7 @@ def test_default_initializer(self, rng: jnp.ndarray): np.testing.assert_array_equal(0., default_potential_a) np.testing.assert_array_equal(0., default_potential_b) - def test_gauss_pointcloud_geom(self, rng: jnp.ndarray): + def test_gauss_pointcloud_geom(self, rng: jax.Array): n, m, d = 20, 20, 2 epsilon = 1e-2 @@ -228,7 +226,7 @@ def test_gauss_pointcloud_geom(self, rng: jnp.ndarray): @pytest.mark.parametrize("jit", [False, True]) @pytest.mark.parametrize("initializer", ["sorting", "gaussian", "subsample"]) def test_initializer_n_iter( - self, rng: jnp.ndarray, lse_mode: bool, jit: bool, + self, rng: jax.Array, lse_mode: bool, jit: bool, initializer: Literal["sorting", "gaussian", "subsample"] ): """Tests Gaussian initializer""" diff --git a/tests/initializers/linear/sinkhorn_lr_init_test.py b/tests/initializers/linear/sinkhorn_lr_init_test.py index 0b67d2286..1d2a0e01b 100644 --- a/tests/initializers/linear/sinkhorn_lr_init_test.py +++ b/tests/initializers/linear/sinkhorn_lr_init_test.py @@ -37,7 +37,7 @@ def test_explicit_initializer(self): ) @pytest.mark.parametrize("partial_init", ["q", "r", "g"]) def test_partial_initialization( - self, rng: jnp.ndarray, initializer: str, partial_init: str + self, rng: jax.Array, initializer: str, partial_init: str ): n, d, rank = 27, 5, 6 rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) @@ -65,7 +65,7 @@ def test_partial_initialization( @pytest.mark.fast.with_args("rank", [2, 4, 10, 13], only_fast=True) def test_generalized_k_means_has_correct_rank( - self, rng: jnp.ndarray, rank: int + self, rng: jax.Array, rank: int ): n, d = 27, 5 x = jax.random.normal(rng, (n, d)) @@ -82,7 +82,7 @@ def test_generalized_k_means_has_correct_rank( assert jnp.linalg.matrix_rank(q) == rank assert jnp.linalg.matrix_rank(r) == rank - def test_generalized_k_means_matches_k_means(self, rng: jnp.ndarray): + def test_generalized_k_means_matches_k_means(self, rng: jax.Array): n, d, rank = 27, 7, 5 eps = 1e-1 rng1, rng2 = jax.random.split(rng, 2) @@ -112,7 +112,7 @@ def test_generalized_k_means_matches_k_means(self, rng: jnp.ndarray): ) @pytest.mark.parametrize("epsilon", [0., 1e-1]) - def test_better_initialization_helps(self, rng: jnp.ndarray, epsilon: float): + def test_better_initialization_helps(self, rng: jax.Array, epsilon: float): n, d, rank = 81, 13, 3 rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, (n, d)) diff --git a/tests/initializers/quadratic/gw_init_test.py b/tests/initializers/quadratic/gw_init_test.py index 8ab6cc4e5..ea630f4a2 100644 --- a/tests/initializers/quadratic/gw_init_test.py +++ b/tests/initializers/quadratic/gw_init_test.py @@ -14,7 +14,6 @@ import pytest import jax -import jax.numpy as jnp import numpy as np from ott.geometry import pointcloud @@ -51,7 +50,7 @@ def test_explicit_initializer_lr(self): assert solver.initializer.rank == rank @pytest.mark.parametrize("eps", [0., 1e-2]) - def test_gw_better_initialization_helps(self, rng: jnp.ndarray, eps: float): + def test_gw_better_initialization_helps(self, rng: jax.Array, eps: float): n, m, d1, d2, rank = 83, 84, 8, 6, 4 rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) diff --git a/tests/math/lse_test.py b/tests/math/lse_test.py index 3ff28eada..7a1c469be 100644 --- a/tests/math/lse_test.py +++ b/tests/math/lse_test.py @@ -23,7 +23,7 @@ @pytest.mark.fast() class TestGeometryLse: - def test_lse(self, rng: jnp.ndarray): + def test_lse(self, rng: jax.Array): """Test consistency of custom lse's jvp.""" n, m = 12, 8 rngs = jax.random.split(rng, 5) diff --git a/tests/math/math_utils_test.py b/tests/math/math_utils_test.py index 3bd4c8114..a3afb0dca 100644 --- a/tests/math/math_utils_test.py +++ b/tests/math/math_utils_test.py @@ -27,7 +27,7 @@ class TestNorm: @pytest.mark.parametrize("ord", [1.1, 2.0, jnp.inf]) def test_norm( self, - rng: jnp.ndarray, + rng: jax.Array, ord, ): d = 5 diff --git a/tests/math/matrix_square_root_test.py b/tests/math/matrix_square_root_test.py index 3f4aee25b..ddb25458b 100644 --- a/tests/math/matrix_square_root_test.py +++ b/tests/math/matrix_square_root_test.py @@ -22,7 +22,7 @@ from ott.math import matrix_square_root -def _get_random_spd_matrix(dim: int, rng: jnp.ndarray): +def _get_random_spd_matrix(dim: int, rng: jax.Array): # Get a random symmetric, positive definite matrix of a specified size. rng, subrng0, subrng1 = jax.random.split(rng, num=3) @@ -38,7 +38,7 @@ def _get_random_spd_matrix(dim: int, rng: jnp.ndarray): def _get_test_fn( - fn: Callable[[jnp.ndarray], jnp.ndarray], dim: int, rng: jnp.ndarray, + fn: Callable[[jnp.ndarray], jnp.ndarray], dim: int, rng: jax.Array, **kwargs: Any ) -> Callable[[jnp.ndarray], jnp.ndarray]: # We want to test gradients of a function fn that maps positive definite @@ -72,7 +72,7 @@ def _sqrt_plus_inv_sqrt(x: jnp.ndarray) -> jnp.ndarray: class TestMatrixSquareRoot: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self.dim = 13 self.batch = 3 # Values for testing the Sylvester solver diff --git a/tests/neural/icnn_test.py b/tests/neural/icnn_test.py index d0d4e92b8..dba2f7b7c 100644 --- a/tests/neural/icnn_test.py +++ b/tests/neural/icnn_test.py @@ -23,7 +23,7 @@ @pytest.mark.fast() class TestICNN: - def test_icnn_convexity(self, rng: jnp.ndarray): + def test_icnn_convexity(self, rng: jax.Array): """Tests convexity of ICNN.""" n_samples, n_features = 10, 2 dim_hidden = (64, 64) @@ -49,7 +49,7 @@ def test_icnn_convexity(self, rng: jnp.ndarray): np.testing.assert_array_equal(jnp.asarray(out) >= 0, True) - def test_icnn_hessian(self, rng: jnp.ndarray): + def test_icnn_hessian(self, rng: jax.Array): """Tests if Hessian of ICNN is positive-semidefinite.""" # define icnn model diff --git a/tests/neural/losses_test.py b/tests/neural/losses_test.py index 8e4a2f96c..f18681c7a 100644 --- a/tests/neural/losses_test.py +++ b/tests/neural/losses_test.py @@ -15,7 +15,6 @@ import pytest import jax -import jax.numpy as jnp import numpy as np from ott.geometry import costs @@ -28,7 +27,7 @@ class TestMongeGap: @pytest.mark.parametrize("n_samples", [5, 25]) @pytest.mark.parametrize("n_features", [10, 50, 100]) def test_monge_gap_non_negativity( - self, rng: jnp.ndarray, n_samples: int, n_features: int + self, rng: jax.Array, n_samples: int, n_features: int ): # generate data @@ -54,7 +53,7 @@ def test_monge_gap_non_negativity( np.testing.assert_array_equal(monge_gap_value, monge_gap_from_samples_value) - def test_monge_gap_jit(self, rng: jnp.ndarray): + def test_monge_gap_jit(self, rng: jax.Array): n_samples, n_features = 31, 17 # generate data rng1, rng2 = jax.random.split(rng, 2) @@ -86,7 +85,7 @@ def test_monge_gap_jit(self, rng: jnp.ndarray): ], ) def test_monge_gap_from_samples_different_cost( - self, rng: jnp.ndarray, cost_fn: costs.CostFn, n_samples: int, + self, rng: jax.Array, cost_fn: costs.CostFn, n_samples: int, n_features: int ): """Test that the Monge gap for different costs. diff --git a/tests/neural/meta_initializer_test.py b/tests/neural/meta_initializer_test.py index 442fe9272..e84554940 100644 --- a/tests/neural/meta_initializer_test.py +++ b/tests/neural/meta_initializer_test.py @@ -42,7 +42,7 @@ def __call__(self, a: jnp.ndarray, b: jnp.ndarray) -> jnp.ndarray: def create_ot_problem( - rng: jnp.ndarray, + rng: jax.Array, n: int, m: int, d: int, @@ -88,7 +88,7 @@ def run_sinkhorn( class TestMetaInitializer: @pytest.mark.parametrize("lse_mode", [True, False]) - def test_meta_initializer(self, rng: jnp.ndarray, lse_mode: bool): + def test_meta_initializer(self, rng: jax.Array, lse_mode: bool): """Tests Meta initializer""" n, m, d = 20, 20, 2 epsilon = 1e-2 diff --git a/tests/problems/linear/potentials_test.py b/tests/problems/linear/potentials_test.py index a13211119..aa492c628 100644 --- a/tests/problems/linear/potentials_test.py +++ b/tests/problems/linear/potentials_test.py @@ -38,7 +38,7 @@ def test_device_put(self): class TestEntropicPotentials: - def test_device_put(self, rng: jnp.ndarray): + def test_device_put(self, rng: jax.Array): n = 10 device = jax.devices()[0] rngs = jax.random.split(rng, 5) @@ -55,7 +55,7 @@ def test_device_put(self, rng: jnp.ndarray): _ = jax.device_put(pot, device) @pytest.mark.fast.with_args(eps=[5e-2, 1e-1], only_fast=0) - def test_entropic_potentials_dist(self, rng: jnp.ndarray, eps: float): + def test_entropic_potentials_dist(self, rng: jax.Array, eps: float): n1, n2, d = 64, 96, 2 rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) @@ -93,7 +93,7 @@ def test_entropic_potentials_dist(self, rng: jnp.ndarray, eps: float): @pytest.mark.fast.with_args(forward=[False, True], only_fast=0) def test_entropic_potentials_displacement( - self, rng: jnp.ndarray, forward: bool, monkeypatch + self, rng: jax.Array, forward: bool, monkeypatch ): """Tests entropic displacements, as well as their plots.""" n1, n2, d = 96, 128, 2 @@ -136,7 +136,7 @@ def test_entropic_potentials_displacement( p=[1.3, 2.2, 1.0], forward=[False, True], only_fast=0 ) def test_entropic_potentials_sqpnorm( - self, rng: jnp.ndarray, p: float, forward: bool + self, rng: jax.Array, p: float, forward: bool ): epsilon = None cost_fn = costs.SqPNorm(p=p) @@ -176,7 +176,7 @@ def test_entropic_potentials_sqpnorm( p=[1.45, 2.2, 1.0], forward=[False, True], only_fast=0 ) def test_entropic_potentials_pnorm( - self, rng: jnp.ndarray, p: float, forward: bool + self, rng: jax.Array, p: float, forward: bool ): epsilon = None cost_fn = costs.PNormP(p=p) @@ -218,7 +218,7 @@ def test_entropic_potentials_pnorm( assert div < .1 * div_0 @pytest.mark.parametrize("jit", [False, True]) - def test_distance_differentiability(self, rng: jnp.ndarray, jit: bool): + def test_distance_differentiability(self, rng: jax.Array, jit: bool): rng1, rng2, rng3 = jax.random.split(rng, 3) n, m, d = 18, 36, 5 @@ -240,7 +240,7 @@ def test_distance_differentiability(self, rng: jnp.ndarray, jit: bool): np.testing.assert_allclose(actual, expected, rtol=1e-4, atol=1e-4) @pytest.mark.parametrize("eps", [None, 1e-1, 1e1, 1e2, 1e3]) - def test_potentials_sinkhorn_divergence(self, rng: jnp.ndarray, eps: float): + def test_potentials_sinkhorn_divergence(self, rng: jax.Array, eps: float): rng1, rng2, rng3 = jax.random.split(rng, 3) n, m, d = 32, 36, 4 fwd = True diff --git a/tests/solvers/linear/continuous_barycenter_test.py b/tests/solvers/linear/continuous_barycenter_test.py index 730b529d3..48b9e7e0d 100644 --- a/tests/solvers/linear/continuous_barycenter_test.py +++ b/tests/solvers/linear/continuous_barycenter_test.py @@ -51,7 +51,7 @@ class TestBarycenter: }, ) def test_euclidean_barycenter( - self, rng: jnp.ndarray, rank: int, epsilon: float, init_random: bool, + self, rng: jax.Array, rank: int, epsilon: float, init_random: bool, jit: bool ): rngs = jax.random.split(rng, 20) @@ -116,7 +116,7 @@ def test_euclidean_barycenter( assert jnp.all(out.x.ravel() > .7) @pytest.mark.parametrize("segment_before", [False, True]) - def test_barycenter_jit(self, rng: jnp.ndarray, segment_before: bool): + def test_barycenter_jit(self, rng: jax.Array, segment_before: bool): @functools.partial(jax.jit, static_argnums=(2, 3)) def barycenter( @@ -171,7 +171,7 @@ def barycenter( @pytest.mark.fast() def test_bures_barycenter( self, - rng: jnp.ndarray, + rng: jax.Array, ): lse_mode = True, epsilon = 1e-1 @@ -257,7 +257,7 @@ def test_bures_barycenter( @pytest.mark.fast() def test_bures_barycenter_different_number_of_components( self, - rng: jnp.ndarray, + rng: jax.Array, ): alpha = 5. epsilon = 0.01 diff --git a/tests/solvers/linear/sinkhorn_diff_test.py b/tests/solvers/linear/sinkhorn_diff_test.py index 04de4dca9..69c01f9ad 100644 --- a/tests/solvers/linear/sinkhorn_diff_test.py +++ b/tests/solvers/linear/sinkhorn_diff_test.py @@ -30,7 +30,7 @@ class TestSinkhornImplicit: """Check implicit and autodiff match for Sinkhorn.""" @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self.dim = 3 self.n = 38 self.m = 73 @@ -138,7 +138,7 @@ class TestSinkhornJacobian: only_fast=0, ) def test_autograd_sinkhorn( - self, rng: jnp.ndarray, lse_mode: bool, shape_data: Tuple[int, int] + self, rng: jax.Array, lse_mode: bool, shape_data: Tuple[int, int] ): """Test gradient w.r.t. probability weights.""" n, m = shape_data @@ -181,7 +181,7 @@ def reg_ot(a: jnp.ndarray, b: jnp.ndarray) -> float: @pytest.mark.parametrize(("lse_mode", "shape_data"), [(True, (7, 9)), (False, (11, 5))]) def test_gradient_sinkhorn_geometry( - self, rng: jnp.ndarray, lse_mode: bool, shape_data: Tuple[int, int] + self, rng: jax.Array, lse_mode: bool, shape_data: Tuple[int, int] ): """Test gradient w.r.t. cost matrix.""" n, m = shape_data @@ -244,7 +244,7 @@ def loss_fn(cm: jnp.ndarray): only_fast=[0, 1], ) def test_gradient_sinkhorn_euclidean( - self, rng: jnp.ndarray, lse_mode: bool, implicit: bool, min_iter: int, + self, rng: jax.Array, lse_mode: bool, implicit: bool, min_iter: int, max_iter: int, epsilon: float, cost_fn: costs.CostFn ): """Test gradient w.r.t. locations x of reg-ot-cost.""" @@ -318,7 +318,7 @@ def loss_fn(x: jnp.ndarray, ) np.testing.assert_array_equal(jnp.isnan(custom_grad), False) - def test_autoepsilon_differentiability(self, rng: jnp.ndarray): + def test_autoepsilon_differentiability(self, rng: jax.Array): cost = jax.random.uniform(rng, (15, 17)) def reg_ot_cost(c: jnp.ndarray) -> float: @@ -330,7 +330,7 @@ def reg_ot_cost(c: jnp.ndarray) -> float: np.testing.assert_array_equal(jnp.isnan(gradient), False) @pytest.mark.fast() - def test_differentiability_with_jit(self, rng: jnp.ndarray): + def test_differentiability_with_jit(self, rng: jax.Array): def reg_ot_cost(c: jnp.ndarray) -> float: geom = geometry.Geometry(c, epsilon=1e-2) @@ -348,7 +348,7 @@ def reg_ot_cost(c: jnp.ndarray) -> float: only_fast=0 ) def test_apply_transport_jacobian( - self, rng: jnp.ndarray, lse_mode: bool, tau_a: float, tau_b: float, + self, rng: jax.Array, lse_mode: bool, tau_a: float, tau_b: float, arg: int, axis: int ): """Tests Jacobian of application of OT to vector, w.r.t. @@ -460,7 +460,7 @@ def apply_ot(a: jnp.ndarray, x: jnp.ndarray, implicit: bool) -> jnp.ndarray: only_fast=0, ) def test_potential_jacobian_sinkhorn( - self, rng: jnp.ndarray, lse_mode: bool, tau_a: float, tau_b: float, + self, rng: jax.Array, lse_mode: bool, tau_a: float, tau_b: float, shape: Tuple[int, int], arg: int ): """Test Jacobian of optimal potential w.r.t. weights and locations.""" @@ -542,7 +542,7 @@ class TestSinkhornGradGrid: @pytest.mark.parametrize("lse_mode", [False, True]) def test_diff_sinkhorn_x_grid_x_perturbation( - self, rng: jnp.ndarray, lse_mode: bool + self, rng: jax.Array, lse_mode: bool ): """Test gradient w.r.t. probability weights.""" eps = 1e-3 # perturbation magnitude @@ -587,7 +587,7 @@ def reg_ot(x: List[jnp.ndarray]) -> float: @pytest.mark.parametrize("lse_mode", [False, True]) def test_diff_sinkhorn_x_grid_weights_perturbation( - self, rng: jnp.ndarray, lse_mode: bool + self, rng: jax.Array, lse_mode: bool ): """Test gradient w.r.t. probability weights.""" eps = 1e-4 # perturbation magnitude @@ -638,7 +638,7 @@ class TestSinkhornJacobianPreconditioning: only_fast=[0, -1], ) def test_potential_jacobian_sinkhorn_precond( - self, rng: jnp.ndarray, lse_mode: bool, tau_a: float, tau_b: float, + self, rng: jax.Array, lse_mode: bool, tau_a: float, tau_b: float, shape: Tuple[int, int], arg: int ): """Test Jacobian of optimal potential works across 2 precond_fun.""" @@ -741,7 +741,7 @@ class TestSinkhornHessian: only_fast=-1 ) def test_hessian_sinkhorn( - self, rng: jnp.ndarray, lse_mode: bool, tau_a: float, tau_b: float, + self, rng: jax.Array, lse_mode: bool, tau_a: float, tau_b: float, arg: int, lineax_ridge: float ): """Test hessian w.r.t. weights and locations.""" diff --git a/tests/solvers/linear/sinkhorn_grid_test.py b/tests/solvers/linear/sinkhorn_grid_test.py index e7c116c8d..d73bc124b 100644 --- a/tests/solvers/linear/sinkhorn_grid_test.py +++ b/tests/solvers/linear/sinkhorn_grid_test.py @@ -26,7 +26,7 @@ class TestSinkhornGrid: @pytest.mark.parametrize("lse_mode", [False, True]) - def test_separable_grid(self, rng: jnp.ndarray, lse_mode: bool): + def test_separable_grid(self, rng: jax.Array, lse_mode: bool): """Two histograms in a grid of size 5 x 6 x 7 in the hypercube^3.""" grid_size = (5, 6, 7) rngs = jax.random.split(rng, 2) @@ -47,7 +47,7 @@ def test_separable_grid(self, rng: jnp.ndarray, lse_mode: bool): assert threshold > err @pytest.mark.fast.with_args("lse_mode", [False, True], only_fast=0) - def test_grid_vs_euclidean(self, rng: jnp.ndarray, lse_mode: bool): + def test_grid_vs_euclidean(self, rng: jax.Array, lse_mode: bool): grid_size = (5, 6, 7) rngs = jax.random.split(rng, 2) a = jax.random.uniform(rngs[0], grid_size) @@ -70,7 +70,7 @@ def test_grid_vs_euclidean(self, rng: jnp.ndarray, lse_mode: bool): ) @pytest.mark.fast.with_args("lse_mode", [False, True], only_fast=1) - def test_apply_transport_grid(self, rng: jnp.ndarray, lse_mode: bool): + def test_apply_transport_grid(self, rng: jax.Array, lse_mode: bool): grid_size = (5, 6, 7) rngs = jax.random.split(rng, 4) a = jax.random.uniform(rngs[0], grid_size) @@ -119,7 +119,7 @@ def test_apply_transport_grid(self, rng: jnp.ndarray, lse_mode: bool): np.testing.assert_array_equal(jnp.isnan(mat_transport_t_vec_a), False) @pytest.mark.fast() - def test_apply_cost(self, rng: jnp.ndarray): + def test_apply_cost(self, rng: jax.Array): grid_size = (5, 6, 7) geom_grid = grid.Grid(grid_size=grid_size, epsilon=0.1) diff --git a/tests/solvers/linear/sinkhorn_lr_test.py b/tests/solvers/linear/sinkhorn_lr_test.py index 0ce5a2307..1bfbd4843 100644 --- a/tests/solvers/linear/sinkhorn_lr_test.py +++ b/tests/solvers/linear/sinkhorn_lr_test.py @@ -27,7 +27,7 @@ class TestLRSinkhorn: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self.dim = 4 self.n = 23 self.m = 27 diff --git a/tests/solvers/linear/sinkhorn_misc_test.py b/tests/solvers/linear/sinkhorn_misc_test.py index d9d6d616c..9d45c518c 100644 --- a/tests/solvers/linear/sinkhorn_misc_test.py +++ b/tests/solvers/linear/sinkhorn_misc_test.py @@ -39,7 +39,7 @@ class TestSinkhornAnderson: only_fast=0, ) def test_anderson( - self, rng: jnp.ndarray, lse_mode: bool, tau_a: float, tau_b: float + self, rng: jax.Array, lse_mode: bool, tau_a: float, tau_b: float ): """Test efficiency of Anderson acceleration. @@ -131,7 +131,7 @@ def initialize(self): @pytest.mark.parametrize(("unbalanced", "thresh"), [(False, 1e-3), (True, 1e-4)]) def test_bures_point_cloud( - self, rng: jnp.ndarray, lse_mode: bool, unbalanced: bool, thresh: float + self, rng: jax.Array, lse_mode: bool, unbalanced: bool, thresh: float ): """Two point clouds of Gaussians, tested with various parameters.""" if unbalanced: @@ -172,7 +172,7 @@ def test_regularized_unbalanced_bures_cost(self): class TestSinkhornOnline: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self.dim = 3 self.n = 100 self.m = 42 @@ -237,7 +237,7 @@ def callback(epsilon: float, batch_size: int) -> sinkhorn.SinkhornOutput: class TestSinkhornUnbalanced: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self.dim = 4 self.n = 17 self.m = 23 @@ -318,7 +318,7 @@ class TestSinkhornJIT: """Check jitted and non jit match for Sinkhorn, and that everything jits.""" @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self.dim = 3 self.n = 10 self.m = 11 diff --git a/tests/solvers/linear/sinkhorn_test.py b/tests/solvers/linear/sinkhorn_test.py index 0437a4efa..2676e74af 100644 --- a/tests/solvers/linear/sinkhorn_test.py +++ b/tests/solvers/linear/sinkhorn_test.py @@ -31,7 +31,7 @@ class TestSinkhorn: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self.rng = rng self.dim = 4 self.n = 17 diff --git a/tests/solvers/linear/univariate_test.py b/tests/solvers/linear/univariate_test.py index a002882fb..6e0263611 100644 --- a/tests/solvers/linear/univariate_test.py +++ b/tests/solvers/linear/univariate_test.py @@ -29,7 +29,7 @@ class TestUnivariate: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self.rng = rng self.n = 7 self.m = 5 @@ -120,7 +120,7 @@ def test_cdf_distance_and_scipy(self): @pytest.mark.fast() def test_univariate_grad( self, - rng: jnp.ndarray, + rng: jax.Array, ): # TODO: Once a `check_grad` function is implemented, replace the code # blocks before with `check_grad`'s. diff --git a/tests/solvers/quadratic/fgw_test.py b/tests/solvers/quadratic/fgw_test.py index f998e802e..47810b16a 100644 --- a/tests/solvers/quadratic/fgw_test.py +++ b/tests/solvers/quadratic/fgw_test.py @@ -30,7 +30,7 @@ class TestFusedGromovWasserstein: # TODO(michalk8): refactor me in the future @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): d_x = 2 d_y = 3 d_xy = 4 @@ -217,7 +217,7 @@ def reg_gw( @pytest.mark.limit_memory("200 MB") @pytest.mark.parametrize("jit", [False, True]) - def test_fgw_lr_memory(self, rng: jnp.ndarray, jit: bool): + def test_fgw_lr_memory(self, rng: jax.Array, jit: bool): rngs = jax.random.split(rng, 4) n, m, d1, d2 = 5_000, 2_500, 1, 2 x = jax.random.uniform(rngs[0], (n, d1)) @@ -244,7 +244,7 @@ def test_fgw_lr_memory(self, rng: jnp.ndarray, jit: bool): @pytest.mark.parametrize("cost_rank", [4, (2, 3, 4)]) def test_fgw_lr_generic_cost_matrix( - self, rng: jnp.ndarray, cost_rank: Union[int, Tuple[int, int, int]] + self, rng: jax.Array, cost_rank: Union[int, Tuple[int, int, int]] ): n, m = 20, 30 rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) diff --git a/tests/solvers/quadratic/gw_barycenter_test.py b/tests/solvers/quadratic/gw_barycenter_test.py index eba4e3054..02ecc953b 100644 --- a/tests/solvers/quadratic/gw_barycenter_test.py +++ b/tests/solvers/quadratic/gw_barycenter_test.py @@ -32,7 +32,7 @@ class TestGWBarycenter: def random_pc( n: int, d: int, - rng: jnp.ndarray, + rng: jax.Array, m: Optional[int] = None, **kwargs: Any ) -> pointcloud.PointCloud: @@ -66,7 +66,7 @@ def pad_cost_matrices( [("sqeucl", 17, None)] # , ("kl", 22, 1e-2)] ) def test_gw_barycenter( - self, rng: jnp.ndarray, gw_loss: str, bar_size: int, + self, rng: jax.Array, gw_loss: str, bar_size: int, epsilon: Optional[float] ): tol = 1e-3 if gw_loss == "sqeucl" else 1e-1 @@ -127,7 +127,7 @@ def test_gw_barycenter( ) def test_fgw_barycenter( self, - rng: jnp.ndarray, + rng: jax.Array, jit: bool, fused_penalty: float, scale_cost: str, diff --git a/tests/solvers/quadratic/gw_test.py b/tests/solvers/quadratic/gw_test.py index 816f7fcd6..2e5573fbe 100644 --- a/tests/solvers/quadratic/gw_test.py +++ b/tests/solvers/quadratic/gw_test.py @@ -32,7 +32,7 @@ class TestQuadraticProblem: @pytest.mark.parametrize("as_pc", [False, True]) @pytest.mark.parametrize("rank", [-1, 5, (1, 2, 3), (2, 3, 5)]) def test_quad_to_low_rank( - self, rng: jnp.ndarray, as_pc: bool, rank: Union[int, Tuple[int, ...]] + self, rng: jax.Array, as_pc: bool, rank: Union[int, Tuple[int, ...]] ): n, m, d1, d2, d = 100, 120, 4, 6, 10 rng1, rng2, rng3, rng4 = jax.random.split(rng, 4) @@ -88,7 +88,7 @@ def test_quad_to_low_rank( assert lr_prob._is_low_rank_convertible assert lr_prob.to_low_rank() is lr_prob - def test_gw_implicit_conversion_mixed_input(self, rng: jnp.ndarray): + def test_gw_implicit_conversion_mixed_input(self, rng: jax.Array): n, m, d1, d2 = 13, 77, 3, 4 rng1, rng2 = jax.random.split(rng, 2) x = jax.random.normal(rng1, (n, d1)) @@ -108,7 +108,7 @@ def test_gw_implicit_conversion_mixed_input(self, rng: jnp.ndarray): class TestGromovWasserstein: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): d_x = 2 d_y = 3 self.n, self.m = 6, 7 @@ -311,7 +311,7 @@ def loss_thre(threshold: float) -> float: assert loss_thre(1e-3) >= loss_thre(1e-5) @pytest.mark.fast() - def test_gw_lr(self, rng: jnp.ndarray): + def test_gw_lr(self, rng: jax.Array): """Checking LR and Entropic have similar outputs on same problem.""" rngs = jax.random.split(rng, 4) n, m, d1, d2 = 24, 17, 2, 3 @@ -335,7 +335,7 @@ def test_gw_lr(self, rng: jnp.ndarray): ot_gwlr.primal_cost, ot_gw.primal_cost, rtol=5e-2 ) - def test_gw_lr_matches_fused(self, rng: jnp.ndarray): + def test_gw_lr_matches_fused(self, rng: jax.Array): """Checking LR and Entropic have similar outputs on same fused problem.""" rngs = jax.random.split(rng, 5) n, m, d1, d2 = 24, 17, 2, 3 @@ -386,7 +386,7 @@ def test_gw_lr_apply(self, axis: int): @pytest.mark.parametrize("scale_cost", [1.0, "mean"]) def test_relative_epsilon( self, - rng: jnp.ndarray, + rng: jax.Array, scale_cost: Union[float, str], ): eps = 1e-2 diff --git a/tests/solvers/quadratic/lower_bound_test.py b/tests/solvers/quadratic/lower_bound_test.py index 2e30a1bbe..37bf2a8b3 100644 --- a/tests/solvers/quadratic/lower_bound_test.py +++ b/tests/solvers/quadratic/lower_bound_test.py @@ -32,7 +32,7 @@ class TestLowerBoundSolver: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): d_x = 2 d_y = 3 self.n, self.m = 13, 15 @@ -95,7 +95,7 @@ def test_lb_pointcloud(self, ground_cost: costs.TICost): ] ) def test_lb_grad( - self, rng: jnp.ndarray, sort_fn: Callable[[jnp.ndarray], jnp.ndarray], + self, rng: jax.Array, sort_fn: Callable[[jnp.ndarray], jnp.ndarray], method: str ): diff --git a/tests/tools/gaussian_mixture/fit_gmm_pair_test.py b/tests/tools/gaussian_mixture/fit_gmm_pair_test.py index 20fe4ef4a..06103cf7f 100644 --- a/tests/tools/gaussian_mixture/fit_gmm_pair_test.py +++ b/tests/tools/gaussian_mixture/fit_gmm_pair_test.py @@ -30,7 +30,7 @@ class TestFitGmmPair: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): mean_generator0 = jnp.array([[2., -1.], [-2., 0.], [4., 3.]]) cov_generator0 = jnp.array([[[0.2, 0.], [0., 0.1]], [[0.6, 0.], [0., 0.3]], [[0.5, 0.4], [0.4, 0.5]]]) diff --git a/tests/tools/gaussian_mixture/fit_gmm_test.py b/tests/tools/gaussian_mixture/fit_gmm_test.py index 648e9a287..82bbe3ec6 100644 --- a/tests/tools/gaussian_mixture/fit_gmm_test.py +++ b/tests/tools/gaussian_mixture/fit_gmm_test.py @@ -24,7 +24,7 @@ class TestFitGmm: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): mean_generator = jnp.array([[2., -1.], [-2., 0.], [4., 3.]]) cov_generator = jnp.array([[[0.2, 0.], [0., 0.1]], [[0.6, 0.], [0., 0.3]], [[0.5, 0.4], [0.4, 0.5]]]) diff --git a/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py b/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py index b11431d8c..690f07e33 100644 --- a/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py +++ b/tests/tools/gaussian_mixture/gaussian_mixture_pair_test.py @@ -23,7 +23,7 @@ class TestGaussianMixturePair: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self.n_components = 3 self.n_dimensions = 2 self.epsilon = 1.e-3 diff --git a/tests/tools/gaussian_mixture/gaussian_mixture_test.py b/tests/tools/gaussian_mixture/gaussian_mixture_test.py index 540ebe980..864e11efc 100644 --- a/tests/tools/gaussian_mixture/gaussian_mixture_test.py +++ b/tests/tools/gaussian_mixture/gaussian_mixture_test.py @@ -24,7 +24,7 @@ class TestGaussianMixture: def test_get_summary_stats_from_points_and_assignment_probs( - self, rng: jnp.ndarray + self, rng: jax.Array ): n = 50 rng, subrng0, subrng1 = jax.random.split(rng, num=3) @@ -57,7 +57,7 @@ def test_get_summary_stats_from_points_and_assignment_probs( np.testing.assert_allclose(expected_cov, cov, atol=1e-4, rtol=1e-4) np.testing.assert_allclose(expected_wt, comp_wt, atol=1e-4, rtol=1e-4) - def test_from_random(self, rng: jnp.ndarray): + def test_from_random(self, rng: jax.Array): gmm = gaussian_mixture.GaussianMixture.from_random( rng=rng, n_components=3, n_dimensions=2 ) @@ -80,7 +80,7 @@ def test_from_mean_cov_component_weights(self,): comp_wts, gmm.component_weights, atol=1e-4, rtol=1e-4 ) - def test_covariance(self, rng: jnp.ndarray): + def test_covariance(self, rng: jax.Array): gmm = gaussian_mixture.GaussianMixture.from_random( rng=rng, n_components=3, n_dimensions=2 ) @@ -90,7 +90,7 @@ def test_covariance(self, rng: jnp.ndarray): cov[i], component.covariance(), atol=1e-4, rtol=1e-4 ) - def test_sample(self, rng: jnp.ndarray): + def test_sample(self, rng: jax.Array): gmm = gaussian_mixture.GaussianMixture.from_mean_cov_component_weights( mean=jnp.array([[-1., 0.], [1., 0.]]), cov=jnp.array([[[0.01, 0.], [0., 0.01]], [[0.01, 0.], [0., 0.01]]]), @@ -112,7 +112,7 @@ def test_sample(self, rng: jnp.ndarray): atol=1.e-1 ) - def test_log_prob(self, rng: jnp.ndarray): + def test_log_prob(self, rng: jax.Array): n_components = 3 size = 100 subrng0, subrng1 = jax.random.split(rng, num=2) @@ -136,7 +136,7 @@ def test_log_prob(self, rng: jnp.ndarray): np.testing.assert_allclose(expected, actual, atol=1e-4, rtol=1e-4) - def test_log_component_posterior(self, rng: jnp.ndarray): + def test_log_component_posterior(self, rng: jax.Array): gmm = gaussian_mixture.GaussianMixture.from_random( rng=rng, n_components=3, n_dimensions=2 ) @@ -150,7 +150,7 @@ def test_log_component_posterior(self, rng: jnp.ndarray): expected, gmm.get_log_component_posterior(x), atol=1e-4, rtol=1e-4 ) - def test_flatten_unflatten(self, rng: jnp.ndarray): + def test_flatten_unflatten(self, rng: jax.Array): gmm = gaussian_mixture.GaussianMixture.from_random( rng=rng, n_components=3, n_dimensions=2 ) @@ -159,7 +159,7 @@ def test_flatten_unflatten(self, rng: jnp.ndarray): assert gmm == gmm_new - def test_pytree_mapping(self, rng: jnp.ndarray): + def test_pytree_mapping(self, rng: jax.Array): gmm = gaussian_mixture.GaussianMixture.from_random( rng=rng, n_components=3, n_dimensions=2 ) diff --git a/tests/tools/gaussian_mixture/gaussian_test.py b/tests/tools/gaussian_mixture/gaussian_test.py index 23deff00d..b731c2a8f 100644 --- a/tests/tools/gaussian_mixture/gaussian_test.py +++ b/tests/tools/gaussian_mixture/gaussian_test.py @@ -23,7 +23,7 @@ @pytest.mark.fast() class TestGaussian: - def test_from_random(self, rng: jnp.ndarray): + def test_from_random(self, rng: jax.Array): g = gaussian.Gaussian.from_random(rng=rng, n_dimensions=3) np.testing.assert_array_equal(g.loc.shape, (3,)) @@ -37,7 +37,7 @@ def test_from_mean_and_cov(self): np.testing.assert_array_equal(mean, g.loc) np.testing.assert_allclose(cov, g.covariance(), atol=1e-4, rtol=1e-4) - def test_to_z(self, rng: jnp.ndarray): + def test_to_z(self, rng: jax.Array): g = gaussian.Gaussian( loc=jnp.array([1., 2.]), scale=scale_tril.ScaleTriL( @@ -53,7 +53,7 @@ def test_to_z(self, rng: jnp.ndarray): np.testing.assert_allclose(sample_mean, jnp.zeros(2), atol=0.1) np.testing.assert_allclose(sample_cov, jnp.eye(2), atol=0.1) - def test_from_z(self, rng: jnp.ndarray): + def test_from_z(self, rng: jax.Array): g = gaussian.Gaussian( loc=jnp.array([0., 0.]), scale=scale_tril.ScaleTriL( @@ -65,7 +65,7 @@ def test_from_z(self, rng: jnp.ndarray): xnew = g.from_z(z) np.testing.assert_allclose(x, xnew, atol=1e-4, rtol=1e-4) - def test_log_prob(self, rng: jnp.ndarray): + def test_log_prob(self, rng: jax.Array): g = gaussian.Gaussian( loc=jnp.array([0., 0.]), scale=scale_tril.ScaleTriL( @@ -79,7 +79,7 @@ def test_log_prob(self, rng: jnp.ndarray): ) np.testing.assert_allclose(expected, actual, atol=1e-5, rtol=1e-5) - def test_sample(self, rng: jnp.ndarray): + def test_sample(self, rng: jax.Array): mean = jnp.array([1., 2.]) cov = jnp.diag(jnp.array([1., 4.])) g = gaussian.Gaussian.from_mean_and_cov(mean, cov) @@ -90,7 +90,7 @@ def test_sample(self, rng: jnp.ndarray): np.testing.assert_allclose(sample_mean, mean, atol=3. * 2. / 100.) np.testing.assert_allclose(sample_cov, cov, atol=2e-1) - def test_w2_dist(self, rng: jnp.ndarray): + def test_w2_dist(self, rng: jax.Array): # make sure distance between a random normal and itself is 0 rng, subrng = jax.random.split(rng) n = gaussian.Gaussian.from_random(rng=subrng, n_dimensions=3) @@ -119,7 +119,7 @@ def test_w2_dist(self, rng: jnp.ndarray): expected = delta_mean + delta_sigma np.testing.assert_allclose(expected, w2, rtol=1e-6, atol=1e-6) - def test_transport(self, rng: jnp.ndarray): + def test_transport(self, rng: jax.Array): diag0 = jnp.array([1.]) diag1 = jnp.array([4.]) g0 = gaussian.Gaussian( @@ -135,14 +135,14 @@ def test_transport(self, rng: jnp.ndarray): expected = 2. * points + 1. np.testing.assert_allclose(expected, actual, atol=1e-5, rtol=1e-5) - def test_flatten_unflatten(self, rng: jnp.ndarray): + def test_flatten_unflatten(self, rng: jax.Array): g = gaussian.Gaussian.from_random(rng, n_dimensions=3) children, aux_data = jax.tree_util.tree_flatten(g) g_new = jax.tree_util.tree_unflatten(aux_data, children) assert g == g_new - def test_pytree_mapping(self, rng: jnp.ndarray): + def test_pytree_mapping(self, rng: jax.Array): g = gaussian.Gaussian.from_random(rng, n_dimensions=3) g_x_2 = jax.tree_map(lambda x: 2 * x, g) diff --git a/tests/tools/gaussian_mixture/linalg_test.py b/tests/tools/gaussian_mixture/linalg_test.py index 4529364dc..345f6bfa8 100644 --- a/tests/tools/gaussian_mixture/linalg_test.py +++ b/tests/tools/gaussian_mixture/linalg_test.py @@ -23,7 +23,7 @@ @pytest.mark.fast() class TestLinalg: - def test_get_mean_and_var(self, rng: jnp.ndarray): + def test_get_mean_and_var(self, rng: jax.Array): points = jax.random.normal(key=rng, shape=(10, 2)) weights = jnp.ones(10) expected_mean = jnp.mean(points, axis=0) @@ -34,7 +34,7 @@ def test_get_mean_and_var(self, rng: jnp.ndarray): np.testing.assert_allclose(expected_mean, actual_mean, atol=1E-5, rtol=1E-5) np.testing.assert_allclose(expected_var, actual_var, atol=1E-5, rtol=1E-5) - def test_get_mean_and_var_nonuniform_weights(self, rng: jnp.ndarray): + def test_get_mean_and_var_nonuniform_weights(self, rng: jax.Array): points = jax.random.normal(key=rng, shape=(10, 2)) weights = jnp.concatenate([jnp.ones(5), jnp.zeros(5)], axis=-1) expected_mean = jnp.mean(points[:5], axis=0) @@ -45,7 +45,7 @@ def test_get_mean_and_var_nonuniform_weights(self, rng: jnp.ndarray): np.testing.assert_allclose(expected_mean, actual_mean, rtol=1e-6, atol=1e-6) np.testing.assert_allclose(expected_var, actual_var, rtol=1e-6, atol=1e-6) - def test_get_mean_and_cov(self, rng: jnp.ndarray): + def test_get_mean_and_cov(self, rng: jax.Array): points = jax.random.normal(key=rng, shape=(10, 2)) weights = jnp.ones(10) expected_mean = jnp.mean(points, axis=0) @@ -56,7 +56,7 @@ def test_get_mean_and_cov(self, rng: jnp.ndarray): np.testing.assert_allclose(expected_mean, actual_mean, atol=1e-5, rtol=1e-5) np.testing.assert_allclose(expected_cov, actual_cov, atol=1e-5, rtol=1e-5) - def test_get_mean_and_cov_nonuniform_weights(self, rng: jnp.ndarray): + def test_get_mean_and_cov_nonuniform_weights(self, rng: jax.Array): points = jax.random.normal(key=rng, shape=(10, 2)) weights = jnp.concatenate([jnp.ones(5), jnp.zeros(5)], axis=-1) expected_mean = jnp.mean(points[:5], axis=0) @@ -67,7 +67,7 @@ def test_get_mean_and_cov_nonuniform_weights(self, rng: jnp.ndarray): np.testing.assert_allclose(expected_mean, actual_mean, rtol=1e-6, atol=1e-6) np.testing.assert_allclose(expected_cov, actual_cov, rtol=1e-6, atol=1e-6) - def test_flat_to_tril(self, rng: jnp.ndarray): + def test_flat_to_tril(self, rng: jax.Array): size = 3 x = jax.random.normal(key=rng, shape=(5, 4, size * (size + 1) // 2)) m = linalg.flat_to_tril(x, size) @@ -87,7 +87,7 @@ def test_flat_to_tril(self, rng: jnp.ndarray): actual = linalg.tril_to_flat(m) np.testing.assert_allclose(x, actual) - def test_tril_to_flat(self, rng: jnp.ndarray): + def test_tril_to_flat(self, rng: jax.Array): size = 3 m = jax.random.normal(key=rng, shape=(5, 4, size, size)) for i in range(size): @@ -104,7 +104,7 @@ def test_tril_to_flat(self, rng: jnp.ndarray): inverted = linalg.flat_to_tril(flat, size) np.testing.assert_allclose(m, inverted) - def test_apply_to_diag(self, rng: jnp.ndarray): + def test_apply_to_diag(self, rng: jax.Array): size = 3 m = jax.random.normal(key=rng, shape=(5, 4, size, size)) mnew = linalg.apply_to_diag(m, jnp.exp) @@ -115,7 +115,7 @@ def test_apply_to_diag(self, rng: jnp.ndarray): else: np.testing.assert_allclose(jnp.exp(m[..., i, j]), mnew[..., i, j]) - def test_matrix_powers(self, rng: jnp.ndarray): + def test_matrix_powers(self, rng: jax.Array): rng, subrng = jax.random.split(rng) m = jax.random.normal(key=subrng, shape=(4, 4)) m += jnp.swapaxes(m, axis1=-2, axis2=-1) # symmetric @@ -126,7 +126,7 @@ def test_matrix_powers(self, rng: jnp.ndarray): np.testing.assert_allclose(m, actual[0], rtol=1.e-5) np.testing.assert_allclose(inv_m, actual[1], rtol=1.e-4) - def test_invmatvectril(self, rng: jnp.ndarray): + def test_invmatvectril(self, rng: jax.Array): rng, subrng = jax.random.split(rng) m = jax.random.normal(key=subrng, shape=(2, 2)) m += jnp.swapaxes(m, axis1=-2, axis2=-1) # symmetric @@ -139,7 +139,7 @@ def test_invmatvectril(self, rng: jnp.ndarray): actual = linalg.invmatvectril(m=cholesky, x=x, lower=True) np.testing.assert_allclose(expected, actual, atol=1e-4, rtol=1.e-4) - def test_get_random_orthogonal(self, rng: jnp.ndarray): + def test_get_random_orthogonal(self, rng: jax.Array): rng, subrng = jax.random.split(rng) q = linalg.get_random_orthogonal(rng=subrng, dim=3) qt = jnp.transpose(q) diff --git a/tests/tools/gaussian_mixture/probabilities_test.py b/tests/tools/gaussian_mixture/probabilities_test.py index 4fce8186f..fa0753c9f 100644 --- a/tests/tools/gaussian_mixture/probabilities_test.py +++ b/tests/tools/gaussian_mixture/probabilities_test.py @@ -40,7 +40,7 @@ def test_log_probs(self): np.testing.assert_allclose(jnp.sum(probs), 1.0, rtol=1e-6, atol=1e-6) np.testing.assert_array_equal(probs > 0., True) - def test_from_random(self, rng: jnp.ndarray): + def test_from_random(self, rng: jax.Array): n_dimensions = 4 pp = probabilities.Probabilities.from_random( rng=rng, n_dimensions=n_dimensions, stdev=0.1 @@ -52,7 +52,7 @@ def test_from_probs(self): pp = probabilities.Probabilities.from_probs(probs) np.testing.assert_allclose(probs, pp.probs(), rtol=1e-6, atol=1e-6) - def test_sample(self, rng: jnp.ndarray): + def test_sample(self, rng: jax.Array): p = 0.4 probs = jnp.array([p, 1. - p]) pp = probabilities.Probabilities.from_probs(probs) diff --git a/tests/tools/gaussian_mixture/scale_tril_test.py b/tests/tools/gaussian_mixture/scale_tril_test.py index f7bbe9293..e8244590b 100644 --- a/tests/tools/gaussian_mixture/scale_tril_test.py +++ b/tests/tools/gaussian_mixture/scale_tril_test.py @@ -48,7 +48,7 @@ def test_log_det_covariance(self, chol: scale_tril.ScaleTriL): actual = chol.log_det_covariance() np.testing.assert_almost_equal(actual, expected) - def test_from_random(self, rng: jnp.ndarray): + def test_from_random(self, rng: jax.Array): n_dimensions = 4 cov = scale_tril.ScaleTriL.from_random( rng=rng, n_dimensions=n_dimensions, stdev=0.1 @@ -57,7 +57,7 @@ def test_from_random(self, rng: jnp.ndarray): cov.cholesky().shape, (n_dimensions, n_dimensions) ) - def test_from_cholesky(self, rng: jnp.ndarray): + def test_from_cholesky(self, rng: jax.Array): n_dimensions = 4 cholesky = scale_tril.ScaleTriL.from_random( rng=rng, n_dimensions=n_dimensions, stdev=1. @@ -65,7 +65,7 @@ def test_from_cholesky(self, rng: jnp.ndarray): scale = scale_tril.ScaleTriL.from_cholesky(cholesky) np.testing.assert_allclose(cholesky, scale.cholesky(), atol=1e-4, rtol=1e-4) - def test_w2_dist(self, rng: jnp.ndarray): + def test_w2_dist(self, rng: jax.Array): # make sure distance between a random normal and itself is 0 rng, subrng = jax.random.split(rng) s = scale_tril.ScaleTriL.from_random(rng=subrng, n_dimensions=3) @@ -86,7 +86,7 @@ def test_w2_dist(self, rng: jnp.ndarray): delta_sigma = jnp.sum((jnp.sqrt(diag0) - jnp.sqrt(diag1)) ** 2.) np.testing.assert_allclose(delta_sigma, w2, atol=1e-4, rtol=1e-4) - def test_transport(self, rng: jnp.ndarray): + def test_transport(self, rng: jax.Array): size = 4 rng, subrng0, subrng1 = jax.random.split(rng, num=3) diag0 = jnp.exp(jax.random.normal(key=subrng0, shape=(size,))) @@ -100,14 +100,14 @@ def test_transport(self, rng: jnp.ndarray): expected = x * jnp.sqrt(diag1)[None] / jnp.sqrt(diag0)[None] np.testing.assert_allclose(expected, transported, atol=1e-4, rtol=1e-4) - def test_flatten_unflatten(self, rng: jnp.ndarray): + def test_flatten_unflatten(self, rng: jax.Array): scale = scale_tril.ScaleTriL.from_random(rng=rng, n_dimensions=3) children, aux_data = jax.tree_util.tree_flatten(scale) scale_new = jax.tree_util.tree_unflatten(aux_data, children) np.testing.assert_array_equal(scale.params, scale_new.params) assert scale == scale_new - def test_pytree_mapping(self, rng: jnp.ndarray): + def test_pytree_mapping(self, rng: jax.Array): scale = scale_tril.ScaleTriL.from_random(rng=rng, n_dimensions=3) scale_x_2 = jax.tree_map(lambda x: 2 * x, scale) np.testing.assert_allclose(2. * scale.params, scale_x_2.params) diff --git a/tests/tools/k_means_test.py b/tests/tools/k_means_test.py index c00288cec..6fc0fd403 100644 --- a/tests/tools/k_means_test.py +++ b/tests/tools/k_means_test.py @@ -64,7 +64,7 @@ def compute_assignment( class TestKmeansPlusPlus: @pytest.mark.fast.with_args("n_local_trials", [None, 3], only_fast=-1) - def test_n_local_trials(self, rng: jnp.ndarray, n_local_trials): + def test_n_local_trials(self, rng: jax.Array, n_local_trials): n, k = 100, 4 rng1, rng2 = jax.random.split(rng) geom, _, c = make_blobs( @@ -79,7 +79,7 @@ def test_n_local_trials(self, rng: jnp.ndarray, n_local_trials): assert shift1 > shift2 @pytest.mark.fast.with_args("k", [3, 5], only_fast=0) - def test_matches_sklearn(self, rng: jnp.ndarray, k: int): + def test_matches_sklearn(self, rng: jax.Array, k: int): ndim = 2 geom, _, _ = make_blobs( n_samples=100, @@ -103,7 +103,7 @@ def test_matches_sklearn(self, rng: jnp.ndarray, k: int): ) assert jnp.abs(pred_inertia - gt_inertia) <= 200 - def test_initialization_differentiable(self, rng: jnp.ndarray): + def test_initialization_differentiable(self, rng: jax.Array): def callback(x: jnp.ndarray) -> float: geom = pointcloud.PointCloud(x) @@ -123,7 +123,7 @@ class TestKmeans: @pytest.mark.fast() @pytest.mark.parametrize("k", [1, 6]) - def test_k_means_output(self, rng: jnp.ndarray, k: int): + def test_k_means_output(self, rng: jax.Array, k: int): max_iter, ndim = 10, 4 geom, gt_assignment, _ = make_blobs( n_samples=50, n_features=ndim, centers=k, random_state=42 @@ -161,7 +161,7 @@ def test_k_means_simple_example(self): ["k-means++", "random", "callable", "wrong-callable"], only_fast=1, ) - def test_init_method(self, rng: jnp.ndarray, init: str): + def test_init_method(self, rng: jax.Array, init: str): if init == "callable": init_fn = lambda geom, k, _: geom.x[:k] elif init == "wrong-callable": @@ -177,7 +177,7 @@ def test_init_method(self, rng: jnp.ndarray, init: str): else: _ = k_means.k_means(geom, k, init=init_fn) - def test_k_means_plus_plus_better_than_random(self, rng: jnp.ndarray): + def test_k_means_plus_plus_better_than_random(self, rng: jax.Array): k = 5 rng1, rng2 = jax.random.split(rng, 2) geom, _, _ = make_blobs(n_samples=50, centers=k, random_state=10) @@ -190,7 +190,7 @@ def test_k_means_plus_plus_better_than_random(self, rng: jnp.ndarray): assert res_kpp.iteration < res_random.iteration assert res_kpp.error <= res_random.error - def test_larger_n_init_helps(self, rng: jnp.ndarray): + def test_larger_n_init_helps(self, rng: jax.Array): k = 10 geom, _, _ = make_blobs(n_samples=150, centers=k, random_state=0) @@ -200,7 +200,7 @@ def test_larger_n_init_helps(self, rng: jnp.ndarray): assert res_larger_n_init.error < res.error @pytest.mark.parametrize("max_iter", [8, 16]) - def test_store_inner_errors(self, rng: jnp.ndarray, max_iter: int): + def test_store_inner_errors(self, rng: jax.Array, max_iter: int): ndim, k = 10, 4 geom, _, _ = make_blobs( n_samples=40, n_features=ndim, centers=k, random_state=43 @@ -216,7 +216,7 @@ def test_store_inner_errors(self, rng: jnp.ndarray, max_iter: int): # check if error is decreasing np.testing.assert_array_equal(jnp.diff(errors[::-1]) >= 0., True) - def test_strict_tolerance(self, rng: jnp.ndarray): + def test_strict_tolerance(self, rng: jax.Array): k = 11 geom, _, _ = make_blobs(n_samples=200, centers=k, random_state=39) @@ -230,7 +230,7 @@ def test_strict_tolerance(self, rng: jnp.ndarray): @pytest.mark.parametrize( "tol", [1e-3, 0.], ids=["weak-convergence", "strict-convergence"] ) - def test_convergence_force_scan(self, rng: jnp.ndarray, tol: float): + def test_convergence_force_scan(self, rng: jax.Array, tol: float): k, n_iter = 9, 20 geom, _, _ = make_blobs(n_samples=100, centers=k, random_state=37) @@ -248,7 +248,7 @@ def test_convergence_force_scan(self, rng: jnp.ndarray, tol: float): assert res.iteration == n_iter np.testing.assert_array_equal(res.inner_errors == -1, False) - def test_k_means_min_iterations(self, rng: jnp.ndarray): + def test_k_means_min_iterations(self, rng: jax.Array): k, min_iter = 8, 12 geom, _, _ = make_blobs(n_samples=160, centers=k, random_state=38) @@ -265,7 +265,7 @@ def test_k_means_min_iterations(self, rng: jnp.ndarray): assert res.converged assert jnp.sum(res.inner_errors != -1) >= min_iter - def test_weight_scaling_effects_only_inertia(self, rng: jnp.ndarray): + def test_weight_scaling_effects_only_inertia(self, rng: jax.Array): k = 10 rng1, rng2 = jax.random.split(rng) geom, _, _ = make_blobs(n_samples=130, centers=k, random_state=3) @@ -286,7 +286,7 @@ def test_weight_scaling_effects_only_inertia(self, rng: jnp.ndarray): ) @pytest.mark.fast() - def test_empty_weights(self, rng: jnp.ndarray): + def test_empty_weights(self, rng: jax.Array): n, ndim, k, d = 20, 2, 3, 5. gen = np.random.RandomState(0) x = gen.normal(size=(n, ndim)) @@ -334,7 +334,7 @@ def test_cosine_cost_fn(self): @pytest.mark.fast.with_args("init", ["k-means++", "random"], only_fast=0) def test_k_means_jitting( - self, rng: jnp.ndarray, init: Literal["k-means++", "random"] + self, rng: jax.Array, init: Literal["k-means++", "random"] ): def callback(x: jnp.ndarray) -> k_means.KMeansOutput: @@ -366,7 +366,7 @@ def callback(x: jnp.ndarray) -> k_means.KMeansOutput: (False, True)], ids=["jit-while-loop", "nojit-for-loop"]) def test_k_means_differentiability( - self, rng: jnp.ndarray, jit: bool, force_scan: bool + self, rng: jax.Array, jit: bool, force_scan: bool ): def inertia(x: jnp.ndarray, w: jnp.ndarray) -> float: @@ -405,7 +405,7 @@ def inertia(x: jnp.ndarray, w: jnp.ndarray) -> float: @pytest.mark.parametrize("tol", [1e-3, 0.]) @pytest.mark.parametrize(("n", "k"), [(37, 4), (128, 6)]) def test_clustering_matches_sklearn( - self, rng: jnp.ndarray, n: int, k: int, tol: float + self, rng: jax.Array, n: int, k: int, tol: float ): x, _, _ = make_blobs(n_samples=n, centers=k, random_state=41) diff --git a/tests/tools/segment_sinkhorn_test.py b/tests/tools/segment_sinkhorn_test.py index f98c164bf..53fb4ae85 100644 --- a/tests/tools/segment_sinkhorn_test.py +++ b/tests/tools/segment_sinkhorn_test.py @@ -27,7 +27,7 @@ class TestSegmentSinkhorn: @pytest.fixture(autouse=True) - def setUp(self, rng: jnp.ndarray): + def setUp(self, rng: jax.Array): self._dim = 4 self._num_points = 13, 17 self._max_measure_size = 20 diff --git a/tests/tools/sinkhorn_divergence_test.py b/tests/tools/sinkhorn_divergence_test.py index e3eab9912..040a04e00 100644 --- a/tests/tools/sinkhorn_divergence_test.py +++ b/tests/tools/sinkhorn_divergence_test.py @@ -29,7 +29,7 @@ class TestSinkhornDivergence: @pytest.fixture(autouse=True) - def setUp(self, rng: jnp.ndarray): + def setUp(self, rng: jax.Array): self._dim = 4 self._num_points = 13, 17 self.rng, *rngs = jax.random.split(rng, 3) @@ -390,7 +390,7 @@ def test_euclidean_momentum_params( class TestSinkhornDivergenceGrad: @pytest.fixture(autouse=True) - def initialize(self, rng: jnp.ndarray): + def initialize(self, rng: jax.Array): self._dim = 3 self._num_points = 13, 12 self.rng, *rngs = jax.random.split(rng, 3) diff --git a/tests/tools/soft_sort_test.py b/tests/tools/soft_sort_test.py index c84680e9e..b4fa68ddf 100644 --- a/tests/tools/soft_sort_test.py +++ b/tests/tools/soft_sort_test.py @@ -28,14 +28,14 @@ class TestSoftSort: @pytest.mark.parametrize("shape", [(20,), (20, 1)]) - def test_sort_one_array(self, rng: jnp.ndarray, shape: Tuple[int, ...]): + def test_sort_one_array(self, rng: jax.Array, shape: Tuple[int, ...]): x = jax.random.uniform(rng, shape) xs = soft_sort.sort(x, axis=0) np.testing.assert_array_equal(x.shape, xs.shape) np.testing.assert_array_equal(jnp.diff(xs, axis=0) >= 0.0, True) - def test_sort_array_squashing_momentum(self, rng: jnp.ndarray): + def test_sort_array_squashing_momentum(self, rng: jax.Array): shape = (33, 1) x = jax.random.uniform(rng, shape) xs_lin = soft_sort.sort( @@ -62,7 +62,7 @@ def test_sort_array_squashing_momentum(self, rng: jnp.ndarray): @pytest.mark.fast() @pytest.mark.parametrize("k", [-1, 4, 100]) - def test_topk_one_array(self, rng: jnp.ndarray, k: int): + def test_topk_one_array(self, rng: jax.Array, k: int): n = 20 x = jax.random.uniform(rng, (n,)) axis = 0 @@ -76,7 +76,7 @@ def test_topk_one_array(self, rng: jnp.ndarray, k: int): np.testing.assert_allclose(xs, jnp.sort(x, axis=axis)[-outsize:], atol=0.01) @pytest.mark.fast.with_args("topk", [-1, 2, 11], only_fast=-1) - def test_sort_batch(self, rng: jnp.ndarray, topk: int): + def test_sort_batch(self, rng: jax.Array, topk: int): x = jax.random.uniform(rng, (32, 10, 6, 4)) axis = 1 xs = soft_sort.sort(x, axis=axis, topk=topk) @@ -86,7 +86,7 @@ def test_sort_batch(self, rng: jnp.ndarray, topk: int): np.testing.assert_array_equal(xs.shape, expected_shape) np.testing.assert_array_equal(jnp.diff(xs, axis=axis) >= 0.0, True) - def test_multivariate_cdf_quantiles(self, rng: jnp.ndarray): + def test_multivariate_cdf_quantiles(self, rng: jax.Array): n, d = 512, 3 key1, key2, key3 = jax.random.split(rng, 3) @@ -129,7 +129,7 @@ def mv_c_q(inputs, num_target_samples, rng, epsilon): np.testing.assert_allclose(z, qua(q), atol=atol) @pytest.mark.fast.with_args("axis,jit", [(0, False), (1, True)], only_fast=0) - def test_ranks(self, axis, rng: jnp.ndarray, jit: bool): + def test_ranks(self, axis, rng: jax.Array, jit: bool): rng1, rng2 = jax.random.split(rng, 2) num_targets = 13 x = jax.random.uniform(rng1, (8, 5, 2)) @@ -164,7 +164,7 @@ def test_ranks(self, axis, rng: jnp.ndarray, jit: bool): np.testing.assert_allclose(ranks, expected_ranks, atol=0.3, rtol=0.1) @pytest.mark.fast.with_args("axis,jit", [(0, False), (1, True)], only_fast=0) - def test_topk_mask(self, axis, rng: jnp.ndarray, jit: bool): + def test_topk_mask(self, axis, rng: jax.Array, jit: bool): def boolean_topk_mask(u, k): return u >= jnp.flip(jax.numpy.sort(u))[k - 1] @@ -195,7 +195,7 @@ def test_quantile(self, q: float): np.testing.assert_allclose(x_q, q, atol=1e-3, rtol=1e-2) - def test_quantile_on_several_axes(self, rng: jnp.ndarray): + def test_quantile_on_several_axes(self, rng: jax.Array): batch, height, width, channels = 4, 47, 45, 3 x = jax.random.uniform(rng, shape=(batch, height, width, channels)) q = soft_sort.quantile( @@ -209,7 +209,7 @@ def test_quantile_on_several_axes(self, rng: jnp.ndarray): @pytest.mark.fast() @pytest.mark.parametrize("jit", [False, True]) - def test_quantiles(self, rng: jnp.ndarray, jit: bool): + def test_quantiles(self, rng: jax.Array, jit: bool): inputs = jax.random.uniform(rng, (100, 2, 3)) q = jnp.array([.1, .8, .4]) quantile_fn = soft_sort.quantile @@ -221,7 +221,7 @@ def test_quantiles(self, rng: jnp.ndarray, jit: bool): np.testing.assert_allclose(m1.mean(axis=[1, 2]), q, atol=5e-2) @pytest.mark.parametrize("jit", [False, True]) - def test_soft_quantile_normalization(self, rng: jnp.ndarray, jit: bool): + def test_soft_quantile_normalization(self, rng: jax.Array, jit: bool): rngs = jax.random.split(rng, 2) x = jax.random.uniform(rngs[0], shape=(100,)) mu, sigma = 2.0, 1.2 @@ -238,7 +238,7 @@ def test_soft_quantile_normalization(self, rng: jnp.ndarray, jit: bool): [mu_target, sigma_target], rtol=0.05) - def test_sort_with(self, rng: jnp.ndarray): + def test_sort_with(self, rng: jax.Array): n, d = 20, 4 inputs = jax.random.uniform(rng, shape=(n, d)) criterion = jnp.linspace(0.1, 1.2, n) @@ -270,7 +270,7 @@ def test_quantize(self, jit: bool): np.testing.assert_allclose(min_distances, min_distances, atol=0.05) @pytest.mark.parametrize("implicit", [False, True]) - def test_soft_sort_jacobian(self, rng: jnp.ndarray, implicit: bool): + def test_soft_sort_jacobian(self, rng: jax.Array, implicit: bool): # Add a ridge when using JAX solvers. try: from ott.solvers.linear import lineax_implicit # noqa: F401 From 2bc683a2bf39f80708603848ec4be3bd20ff2290 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 29 Nov 2023 16:00:43 +0100 Subject: [PATCH 050/186] replace rng jnp.ndarray type by jax.array --- docs/tutorials/Monge_Gap.ipynb | 6 +++--- src/ott/datasets.py | 2 +- src/ott/geometry/geometry.py | 2 +- src/ott/geometry/low_rank.py | 2 +- src/ott/initializers/linear/initializers.py | 16 ++++++++-------- src/ott/initializers/linear/initializers_lr.py | 2 +- src/ott/neural/models/models.py | 4 ++-- src/ott/neural/solvers/genot.py | 4 ++-- src/ott/neural/solvers/map_estimator.py | 2 +- src/ott/neural/solvers/neuraldual.py | 2 +- src/ott/neural/solvers/otfm.py | 2 +- src/ott/problems/quadratic/quadratic_problem.py | 2 +- src/ott/solvers/linear/continuous_barycenter.py | 4 ++-- src/ott/solvers/linear/sinkhorn.py | 2 +- src/ott/solvers/linear/sinkhorn_lr.py | 2 +- src/ott/solvers/quadratic/gromov_wasserstein.py | 2 +- .../solvers/quadratic/gromov_wasserstein_lr.py | 2 +- src/ott/solvers/quadratic/gw_barycenter.py | 2 +- src/ott/tools/k_means.py | 2 +- src/ott/tools/soft_sort.py | 2 +- src/ott/utils.py | 2 +- tests/neural/neuraldual_test.py | 2 +- 22 files changed, 34 insertions(+), 34 deletions(-) diff --git a/docs/tutorials/Monge_Gap.ipynb b/docs/tutorials/Monge_Gap.ipynb index 2fde4f923..78b4ce602 100644 --- a/docs/tutorials/Monge_Gap.ipynb +++ b/docs/tutorials/Monge_Gap.ipynb @@ -99,7 +99,7 @@ " noise: float = 0.01\n", " scale: float = 1.0\n", " batch_size: int = 1024\n", - " rng: Optional[jnp.ndarray] = (None,)\n", + " rng: Optional[jax.Array] = (None,)\n", "\n", " def __iter__(self) -> Iterator[jnp.ndarray]:\n", " \"\"\"Random sample generator from Gaussian mixture.\n", @@ -152,7 +152,7 @@ " target_kwargs: Mapping[str, Any] = MappingProxyType({}),\n", " train_batch_size: int = 256,\n", " valid_batch_size: int = 256,\n", - " rng: Optional[jnp.ndarray] = None,\n", + " rng: Optional[jax.Array] = None,\n", ") -> Tuple[dataset.Dataset, dataset.Dataset, int]:\n", " \"\"\"Samplers from ``SklearnDistribution``.\"\"\"\n", " rng = jax.random.PRNGKey(0) if rng is None else rng\n", @@ -203,7 +203,7 @@ " num_points: Optional[int] = None,\n", " title: Optional[str] = None,\n", " figsize: Tuple[int, int] = (8, 6),\n", - " rng: Optional[jnp.ndarray] = None,\n", + " rng: Optional[jax.Array] = None,\n", "):\n", " \"\"\"Plot samples from the source and target measures.\n", "\n", diff --git a/src/ott/datasets.py b/src/ott/datasets.py index 3507c3418..e5077c87c 100644 --- a/src/ott/datasets.py +++ b/src/ott/datasets.py @@ -110,7 +110,7 @@ def create_gaussian_mixture_samplers( name_target: Name_t, train_batch_size: int = 2048, valid_batch_size: int = 2048, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> Tuple[Dataset, Dataset, int]: """Gaussian samplers. diff --git a/src/ott/geometry/geometry.py b/src/ott/geometry/geometry.py index 6894176a6..766c5e618 100644 --- a/src/ott/geometry/geometry.py +++ b/src/ott/geometry/geometry.py @@ -625,7 +625,7 @@ def to_LRCGeometry( self, rank: int = 0, tol: float = 1e-2, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, scale: float = 1. ) -> "low_rank.LRCGeometry": r"""Factorize the cost matrix using either SVD (full) or :cite:`indyk:19`. diff --git a/src/ott/geometry/low_rank.py b/src/ott/geometry/low_rank.py index 1bfaeae0a..966db28d4 100644 --- a/src/ott/geometry/low_rank.py +++ b/src/ott/geometry/low_rank.py @@ -229,7 +229,7 @@ def to_LRCGeometry( self, rank: int = 0, tol: float = 1e-2, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, scale: float = 1.0, ) -> "LRCGeometry": """Return self.""" diff --git a/src/ott/initializers/linear/initializers.py b/src/ott/initializers/linear/initializers.py index bc4871841..f3ba93321 100644 --- a/src/ott/initializers/linear/initializers.py +++ b/src/ott/initializers/linear/initializers.py @@ -36,7 +36,7 @@ def init_dual_a( self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> jnp.ndarray: """Initialize Sinkhorn potential/scaling f_u. @@ -54,7 +54,7 @@ def init_dual_b( self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> jnp.ndarray: """Initialize Sinkhorn potential/scaling g_v. @@ -73,7 +73,7 @@ def __call__( a: Optional[jnp.ndarray], b: Optional[jnp.ndarray], lse_mode: bool, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Initialize Sinkhorn potentials/scalings f_u and g_v. @@ -128,7 +128,7 @@ def init_dual_a( # noqa: D102 self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> jnp.ndarray: del rng return jnp.zeros_like(ot_prob.a) if lse_mode else jnp.ones_like(ot_prob.a) @@ -137,7 +137,7 @@ def init_dual_b( # noqa: D102 self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> jnp.ndarray: del rng return jnp.zeros_like(ot_prob.b) if lse_mode else jnp.ones_like(ot_prob.b) @@ -158,7 +158,7 @@ def init_dual_a( # noqa: D102 self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> jnp.ndarray: # import Gaussian here due to circular imports from ott.tools.gaussian_mixture import gaussian @@ -245,7 +245,7 @@ def init_dual_a( self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, init_f: Optional[jnp.ndarray] = None, ) -> jnp.ndarray: """Apply DualSort algorithm. @@ -324,7 +324,7 @@ def init_dual_a( # noqa: D102 self, ot_prob: linear_problem.LinearProblem, lse_mode: bool, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> jnp.ndarray: from ott.solvers import linear diff --git a/src/ott/initializers/linear/initializers_lr.py b/src/ott/initializers/linear/initializers_lr.py index a3f615846..5c2302156 100644 --- a/src/ott/initializers/linear/initializers_lr.py +++ b/src/ott/initializers/linear/initializers_lr.py @@ -169,7 +169,7 @@ def __call__( r: Optional[jnp.ndarray] = None, g: Optional[jnp.ndarray] = None, *, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, **kwargs: Any ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]: """Initialize the factors :math:`Q`, :math:`R` and :math:`g`. diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index c65cbbaf3..8e2562e97 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -268,7 +268,7 @@ def __init__( meta_model: nn.Module, opt: Optional[optax.GradientTransformation ] = optax.adam(learning_rate=1e-3), # noqa: B008 - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, state: Optional[train_state.TrainState] = None ): self.geom = geom @@ -334,7 +334,7 @@ def init_dual_a( # noqa: D102 self, ot_prob: "linear_problem.LinearProblem", lse_mode: bool, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> jnp.ndarray: del rng # Detect if the problem is batched. diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 0613ae53c..61b368a67 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -131,7 +131,7 @@ def __init__( unbalanced_kwargs: Dict[str, Any] = types.MappingProxyType({}), callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> None: rng = utils.default_prng_key(rng) rng, rng_unbalanced = jax.random.split(rng) @@ -379,7 +379,7 @@ def transport( self, source: jnp.ndarray, condition: Optional[jnp.ndarray], - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, forward: bool = True, diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}), ) -> Union[jnp.array, diffrax.Solution, Optional[jnp.ndarray]]: diff --git a/src/ott/neural/solvers/map_estimator.py b/src/ott/neural/solvers/map_estimator.py index 7eaffdfc8..fb65917c7 100644 --- a/src/ott/neural/solvers/map_estimator.py +++ b/src/ott/neural/solvers/map_estimator.py @@ -88,7 +88,7 @@ def __init__( num_train_iters: int = 10_000, logging: bool = False, valid_freq: int = 500, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ): self._fitting_loss = fitting_loss self._regularizer = regularizer diff --git a/src/ott/neural/solvers/neuraldual.py b/src/ott/neural/solvers/neuraldual.py index 0d9e215bb..019fb836f 100644 --- a/src/ott/neural/solvers/neuraldual.py +++ b/src/ott/neural/solvers/neuraldual.py @@ -243,7 +243,7 @@ def __init__( valid_freq: int = 1000, log_freq: int = 1000, logging: bool = False, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, pos_weights: bool = True, beta: float = 1.0, conjugate_solver: Optional[conjugate.FenchelConjugateSolver diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index fb054e30a..d145c4128 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -116,7 +116,7 @@ def __init__( logging_freq: int = 100, valid_freq: int = 5000, num_eval_samples: int = 1000, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> None: rng = utils.default_prng_key(rng) rng, rng_unbalanced = jax.random.split(rng) diff --git a/src/ott/problems/quadratic/quadratic_problem.py b/src/ott/problems/quadratic/quadratic_problem.py index a17aaf9fb..5deb4558c 100644 --- a/src/ott/problems/quadratic/quadratic_problem.py +++ b/src/ott/problems/quadratic/quadratic_problem.py @@ -382,7 +382,7 @@ def convertible(geom: geometry.Geometry) -> bool: def to_low_rank( self, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> "QuadraticProblem": """Convert geometries to low-rank. diff --git a/src/ott/solvers/linear/continuous_barycenter.py b/src/ott/solvers/linear/continuous_barycenter.py index e1477e60f..2d89a74ea 100644 --- a/src/ott/solvers/linear/continuous_barycenter.py +++ b/src/ott/solvers/linear/continuous_barycenter.py @@ -130,7 +130,7 @@ def __call__( # noqa: D102 bar_prob: barycenter_problem.FreeBarycenterProblem, bar_size: int = 100, x_init: Optional[jnp.ndarray] = None, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> FreeBarycenterState: # TODO(michalk8): no reason for iterations to be outside this class rng = utils.default_prng_key(rng) @@ -141,7 +141,7 @@ def init_state( bar_prob: barycenter_problem.FreeBarycenterProblem, bar_size: int, x_init: Optional[jnp.ndarray] = None, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> FreeBarycenterState: """Initialize the state of the Wasserstein barycenter iterations. diff --git a/src/ott/solvers/linear/sinkhorn.py b/src/ott/solvers/linear/sinkhorn.py index 56c718c1f..d9ab53f3a 100644 --- a/src/ott/solvers/linear/sinkhorn.py +++ b/src/ott/solvers/linear/sinkhorn.py @@ -843,7 +843,7 @@ def __call__( self, ot_prob: linear_problem.LinearProblem, init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray]] = (None, None), - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> SinkhornOutput: """Run Sinkhorn algorithm. diff --git a/src/ott/solvers/linear/sinkhorn_lr.py b/src/ott/solvers/linear/sinkhorn_lr.py index ba83aeb99..db948cf8b 100644 --- a/src/ott/solvers/linear/sinkhorn_lr.py +++ b/src/ott/solvers/linear/sinkhorn_lr.py @@ -343,7 +343,7 @@ def __call__( ot_prob: linear_problem.LinearProblem, init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], Optional[jnp.ndarray]] = (None, None, None), - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, **kwargs: Any, ) -> LRSinkhornOutput: """Run low-rank Sinkhorn. diff --git a/src/ott/solvers/quadratic/gromov_wasserstein.py b/src/ott/solvers/quadratic/gromov_wasserstein.py index 6180db73f..5e23d88e6 100644 --- a/src/ott/solvers/quadratic/gromov_wasserstein.py +++ b/src/ott/solvers/quadratic/gromov_wasserstein.py @@ -213,7 +213,7 @@ def __call__( self, prob: quadratic_problem.QuadraticProblem, init: Optional[linear_problem.LinearProblem] = None, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, **kwargs: Any, ) -> GWOutput: """Run the Gromov-Wasserstein solver. diff --git a/src/ott/solvers/quadratic/gromov_wasserstein_lr.py b/src/ott/solvers/quadratic/gromov_wasserstein_lr.py index 710d8f617..214853f4c 100644 --- a/src/ott/solvers/quadratic/gromov_wasserstein_lr.py +++ b/src/ott/solvers/quadratic/gromov_wasserstein_lr.py @@ -336,7 +336,7 @@ def __call__( ot_prob: quadratic_problem.QuadraticProblem, init: Tuple[Optional[jnp.ndarray], Optional[jnp.ndarray], Optional[jnp.ndarray]] = (None, None, None), - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, **kwargs: Any, ) -> LRGWOutput: """Run low-rank Gromov-Wasserstein solver. diff --git a/src/ott/solvers/quadratic/gw_barycenter.py b/src/ott/solvers/quadratic/gw_barycenter.py index ea14880fe..f0d350b08 100644 --- a/src/ott/solvers/quadratic/gw_barycenter.py +++ b/src/ott/solvers/quadratic/gw_barycenter.py @@ -136,7 +136,7 @@ def init_state( bar_init: Optional[Union[jnp.ndarray, Tuple[jnp.ndarray, jnp.ndarray]]] = None, a: Optional[jnp.ndarray] = None, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> GWBarycenterState: """Initialize the (fused) Gromov-Wasserstein barycenter state. diff --git a/src/ott/tools/k_means.py b/src/ott/tools/k_means.py index abbe99f34..986b919d0 100644 --- a/src/ott/tools/k_means.py +++ b/src/ott/tools/k_means.py @@ -352,7 +352,7 @@ def k_means( min_iterations: int = 0, max_iterations: int = 300, store_inner_errors: bool = False, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, ) -> KMeansOutput: r"""K-means clustering using Lloyd's algorithm :cite:`lloyd:82`. diff --git a/src/ott/tools/soft_sort.py b/src/ott/tools/soft_sort.py index beb88365f..ccde3bd2c 100644 --- a/src/ott/tools/soft_sort.py +++ b/src/ott/tools/soft_sort.py @@ -459,7 +459,7 @@ def multivariate_cdf_quantile_maps( inputs: jnp.ndarray, target_sampler: Optional[Callable[[jnp.ndarray, Tuple[int, int]], jnp.ndarray]] = None, - rng: Optional[jnp.ndarray] = None, + rng: Optional[jax.Array] = None, num_target_samples: Optional[int] = None, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, diff --git a/src/ott/utils.py b/src/ott/utils.py index 2acfd8420..63a36f2b4 100644 --- a/src/ott/utils.py +++ b/src/ott/utils.py @@ -69,7 +69,7 @@ def wrapper(*args: Any, **kwargs: Any) -> Any: return functools.wraps(func)(wrapper) -def default_prng_key(rng: Optional[jnp.ndarray] = None) -> jnp.ndarray: +def default_prng_key(rng: Optional[jax.Array] = None) -> jnp.ndarray: """Get the default PRNG key. Args: diff --git a/tests/neural/neuraldual_test.py b/tests/neural/neuraldual_test.py index b31ba9b6a..8a362affa 100644 --- a/tests/neural/neuraldual_test.py +++ b/tests/neural/neuraldual_test.py @@ -19,7 +19,7 @@ import numpy as np from ott import datasets -from ott.neural import models +from ott.neural.models import models from ott.neural.solvers import conjugate, neuraldual ModelPair_t = Tuple[neuraldual.BaseW2NeuralDual, neuraldual.BaseW2NeuralDual] From 542dd0a7b6d509622afdcf2b6bb455198f1669d8 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 29 Nov 2023 17:28:40 +0100 Subject: [PATCH 051/186] fix import error --- src/ott/neural/solvers/neuraldual.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ott/neural/solvers/neuraldual.py b/src/ott/neural/solvers/neuraldual.py index 019fb836f..455d6b50e 100644 --- a/src/ott/neural/solvers/neuraldual.py +++ b/src/ott/neural/solvers/neuraldual.py @@ -36,7 +36,7 @@ from ott import utils from ott.geometry import costs -from ott.neural import models +from ott.neural.models import models from ott.neural.solvers import conjugate from ott.problems.linear import potentials From f585c247d1fe5a07d12cd9d52247ebb7157d1e5f Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 1 Dec 2023 18:01:55 +0100 Subject: [PATCH 052/186] [ci skip] start to incorporate feedback --- docs/tutorials/point_clouds.ipynb | 2 +- src/ott/geometry/geometry.py | 2 +- src/ott/geometry/low_rank.py | 2 +- src/ott/geometry/pointcloud.py | 2 +- src/ott/neural/data/dataloaders.py | 9 ++-- src/ott/neural/models/base_models.py | 61 --------------------------- src/ott/neural/models/layers.py | 18 +++++++- src/ott/neural/models/models.py | 46 ++++++-------------- src/ott/neural/solvers/base_solver.py | 27 ++++-------- src/ott/neural/solvers/flows.py | 40 ++++++------------ src/ott/neural/solvers/genot.py | 15 ++++--- src/ott/neural/solvers/neuraldual.py | 8 +--- src/ott/neural/solvers/otfm.py | 15 ++++--- tests/neural/genot_test.py | 16 +++---- tests/neural/otfm_test.py | 10 ++--- 15 files changed, 89 insertions(+), 184 deletions(-) delete mode 100644 src/ott/neural/models/base_models.py diff --git a/docs/tutorials/point_clouds.ipynb b/docs/tutorials/point_clouds.ipynb index c01b51cfd..156bafaa9 100644 --- a/docs/tutorials/point_clouds.ipynb +++ b/docs/tutorials/point_clouds.ipynb @@ -64,7 +64,7 @@ }, "outputs": [], "source": [ - "def create_points(rng: jax.random.PRNGKeyArray, n: int, m: int, d: int):\n", + "def create_points(rng: jax.Array, n: int, m: int, d: int):\n", " rngs = jax.random.split(rng, 3)\n", " x = jax.random.normal(rngs[0], (n, d)) + 1\n", " y = jax.random.uniform(rngs[1], (m, d))\n", diff --git a/src/ott/geometry/geometry.py b/src/ott/geometry/geometry.py index 766c5e618..f953bf38c 100644 --- a/src/ott/geometry/geometry.py +++ b/src/ott/geometry/geometry.py @@ -201,7 +201,7 @@ def is_symmetric(self) -> bool: @property def inv_scale_cost(self) -> float: """Compute and return inverse of scaling factor for cost matrix.""" - if isinstance(self._scale_cost, (int, float, np.number, jnp.ndarray)): + if isinstance(self._scale_cost, (int, float, np.number, jax.Array)): return 1.0 / self._scale_cost self = self._masked_geom(mask_value=jnp.nan) if self._scale_cost == "max_cost": diff --git a/src/ott/geometry/low_rank.py b/src/ott/geometry/low_rank.py index 966db28d4..e759b4cb9 100644 --- a/src/ott/geometry/low_rank.py +++ b/src/ott/geometry/low_rank.py @@ -107,7 +107,7 @@ def is_symmetric(self) -> bool: # noqa: D102 @property def inv_scale_cost(self) -> float: # noqa: D102 - if isinstance(self._scale_cost, (int, float, jnp.ndarray)): + if isinstance(self._scale_cost, (int, float, jax.Array)): return 1.0 / self._scale_cost self = self._masked_geom() if self._scale_cost == "max_bound": diff --git a/src/ott/geometry/pointcloud.py b/src/ott/geometry/pointcloud.py index 2050e1562..e7f46a020 100644 --- a/src/ott/geometry/pointcloud.py +++ b/src/ott/geometry/pointcloud.py @@ -141,7 +141,7 @@ def cost_rank(self) -> int: # noqa: D102 @property def inv_scale_cost(self) -> float: # noqa: D102 - if isinstance(self._scale_cost, (int, float, jnp.ndarray)): + if isinstance(self._scale_cost, (int, float, jax.Array)): return 1.0 / self._scale_cost self = self._masked_geom() if self._scale_cost == "max_cost": diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index 4fe8a9a8c..832117013 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -43,7 +43,7 @@ def __init__( source_conditions: Optional[np.ndarray] = None, target_conditions: Optional[np.ndarray] = None, seed: int = 0, - ) -> None: + ): super().__init__() if source_lin is not None: if source_quad is not None: @@ -115,11 +115,8 @@ class ConditionalDataLoader: """ def __init__( - self, - dataloaders: Dict[str, Iterator], - p: np.ndarray, - seed: int = 0 - ) -> None: + self, dataloaders: Dict[str, Iterator], p: np.ndarray, seed: int = 0 + ): super().__init__() self.dataloaders = dataloaders self.conditions = list(dataloaders.keys()) diff --git a/src/ott/neural/models/base_models.py b/src/ott/neural/models/base_models.py deleted file mode 100644 index d3ac7526a..000000000 --- a/src/ott/neural/models/base_models.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import abc -from typing import Optional - -import jax.numpy as jnp - -import flax.linen as nn - -__all__ = ["BaseNeuralVectorField", "BaseRescalingNet"] - - -class BaseNeuralVectorField(nn.Module, abc.ABC): - """Base class for neural vector field models.""" - - @abc.abstractmethod - def __call__( - self, - t: jnp.ndarray, - x: jnp.ndarray, - condition: Optional[jnp.ndarray] = None, - keys_model: Optional[jnp.ndarray] = None - ) -> jnp.ndarray: - """"Evaluate the vector field. - - Args: - t: Time. - x: Input data. - condition: Condition. - keys_model: Random keys for the model. - """ - pass - - -class BaseRescalingNet(nn.Module, abc.ABC): - """Base class for models to learn distributional rescaling factors.""" - - @abc.abstractmethod - def __call__( - self, - x: jnp.ndarray, - condition: Optional[jnp.ndarray] = None - ) -> jnp.ndarray: - """Evaluate the model. - - Args: - x: Input data. - condition: Condition. - """ - pass diff --git a/src/ott/neural/models/layers.py b/src/ott/neural/models/layers.py index 79e6394bc..50c2c6301 100644 --- a/src/ott/neural/models/layers.py +++ b/src/ott/neural/models/layers.py @@ -18,14 +18,28 @@ import flax.linen as nn -__all__ = ["PositiveDense", "PosDefPotentials"] +__all__ = ["PositiveDense", "PosDefPotentials", "MLPBlock"] -PRNGKey = jnp.ndarray +PRNGKey = jax.Array Shape = Tuple[int, ...] Dtype = Any Array = Any +class MLPBlock(nn.Module): + dim: int = 128 + num_layers: int = 3 + act_fn: Any = nn.silu + out_dim: int = 32 + + @nn.compact + def __call__(self, x): + for _ in range(self.num_layers): + x = nn.Dense(self.dim)(x) + x = self.act_fn(x) + return nn.Dense(self.out_dim)(x) + + class PositiveDense(nn.Module): """A linear transformation using a weight matrix with all entries positive. diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 8e2562e97..0fc7d4f30 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -28,16 +28,10 @@ from ott.initializers.linear import initializers as lin_init from ott.math import matrix_square_root from ott.neural.models import layers -from ott.neural.models.base_models import ( - BaseNeuralVectorField, - BaseRescalingNet, -) from ott.neural.solvers import neuraldual from ott.problems.linear import linear_problem -__all__ = [ - "ICNN", "MLP", "MetaInitializer", "NeuralVectorField", "RescalingMLP" -] +__all__ = ["ICNN", "MLP", "MetaInitializer", "VelocityField", "RescalingMLP"] class ICNN(neuraldual.BaseW2NeuralDual): @@ -76,7 +70,7 @@ class ICNN(neuraldual.BaseW2NeuralDual): def is_potential(self) -> bool: # noqa: D102 return True - def setup(self) -> None: # noqa: D102 + def setup(self): # noqa: D102 self.num_hidden = len(self.dim_hidden) if self.pos_weights: @@ -410,21 +404,7 @@ def tree_flatten(self) -> Tuple[Sequence[Any], Dict[str, Any]]: # noqa: D102 } -class Block(nn.Module): - dim: int = 128 - num_layers: int = 3 - act_fn: Any = nn.silu - out_dim: int = 32 - - @nn.compact - def __call__(self, x): - for _ in range(self.num_layers): - x = nn.Dense(self.dim)(x) - x = self.act_fn(x) - return nn.Dense(self.out_dim)(x) - - -class NeuralVectorField(BaseNeuralVectorField): +class VelocityField(nn.Module): """Parameterized neural vector field. Each of the input, condition, and time embeddings are passed through a block @@ -515,7 +495,7 @@ def __call__( Output of the neural vector field. """ t = self.time_encoder(t) - t = Block( + t = layers.MLPBlock( dim=self.t_embed_dim, out_dim=self.t_embed_dim, num_layers=self.num_layers_per_block, @@ -524,7 +504,7 @@ def __call__( t ) - x = Block( + x = layers.MLPBlock( dim=self.latent_embed_dim, out_dim=self.latent_embed_dim, num_layers=self.num_layers_per_block, @@ -534,7 +514,7 @@ def __call__( ) if self.condition_dim > 0: - condition = Block( + condition = layers.MLPBlock( dim=self.condition_embed_dim, out_dim=self.condition_embed_dim, num_layers=self.num_layers_per_block, @@ -546,7 +526,7 @@ def __call__( else: concatenated = jnp.concatenate((t, x), axis=-1) - out = Block( + out = layers.MLPBlock( dim=self.joint_hidden_dim, out_dim=self.joint_hidden_dim, num_layers=self.num_layers_per_block, @@ -564,7 +544,7 @@ def __call__( def create_train_state( self, - rng: jax.random.PRNGKeyArray, + rng: jax.Array, optimizer: optax.OptState, input_dim: int, ) -> train_state.TrainState: @@ -587,7 +567,7 @@ def create_train_state( ) -class RescalingMLP(BaseRescalingNet): +class RescalingMLP(nn.Module): """Network to learn distributional rescaling factors based on a MLP. The input is passed through a block consisting of ``num_layers_per_block`` @@ -626,7 +606,7 @@ def __call__( Returns: Estimated rescaling factors. """ - x = Block( + x = layers.MLPBlock( dim=self.hidden_dim, out_dim=self.hidden_dim, num_layers=self.num_layers_per_block, @@ -636,7 +616,7 @@ def __call__( ) if self.condition_dim > 0: condition = jnp.atleast_1d(condition) - condition = Block( + condition = layers.MLPBlock( dim=self.hidden_dim, out_dim=self.hidden_dim, num_layers=self.num_layers_per_block, @@ -648,7 +628,7 @@ def __call__( else: concatenated = x - out = Block( + out = layers.MLPBlock( dim=self.hidden_dim, out_dim=self.hidden_dim, num_layers=self.num_layers_per_block, @@ -661,7 +641,7 @@ def __call__( def create_train_state( self, - rng: jax.random.PRNGKeyArray, + rng: jax.Array, optimizer: optax.OptState, input_dim: int, ) -> train_state.TrainState: diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/solvers/base_solver.py index fe0ea6f3d..780bf61ad 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/solvers/base_solver.py @@ -23,8 +23,6 @@ from flax.training import train_state from ott.geometry import costs, pointcloud -from ott.geometry.pointcloud import PointCloud -from ott.neural.models import models from ott.problems.linear import linear_problem from ott.problems.quadratic import quadratic_problem from ott.solvers.linear import sinkhorn @@ -38,48 +36,39 @@ class BaseNeuralSolver(ABC): valid_freq: Frequency at which to run validation. """ - def __init__(self, iterations: int, valid_freq: int, **_: Any) -> None: + def __init__(self, iterations: int, valid_freq: int, **_: Any): self.iterations = iterations self.valid_freq = valid_freq @abstractmethod - def setup(self, *args: Any, **kwargs: Any) -> None: + def setup(self, *args: Any, **kwargs: Any): """Setup the model.""" - pass @abstractmethod - def __call__(self, *args: Any, **kwargs: Any) -> None: + def __call__(self, *args: Any, **kwargs: Any): """Train the model.""" - pass @abstractmethod def transport(self, *args: Any, forward: bool, **kwargs: Any) -> Any: """Transport.""" - pass @abstractmethod def save(self, path: Path): """Save the model.""" - pass @abstractmethod def load(self, path: Path): """Load the model.""" - pass @property @abstractmethod def training_logs(self) -> Dict[str, Any]: """Return the training logs.""" - pass class ResampleMixin: """Mixin class for mini-batch OT in neural optimal transport solvers.""" - def __init__(*args, **kwargs): - pass - def _resample_data( self, key: jax.random.KeyArray, @@ -264,8 +253,10 @@ def __init__( cond_dim: Optional[int], tau_a: float = 1.0, tau_b: float = 1.0, - mlp_eta: Optional[models.BaseRescalingNet] = None, - mlp_xi: Optional[models.BaseRescalingNet] = None, + mlp_eta: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], + jnp.ndarray]] = None, + mlp_xi: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], + jnp.ndarray]] = None, seed: Optional[int] = None, opt_eta: Optional[optax.GradientTransformation] = None, opt_xi: Optional[optax.GradientTransformation] = None, @@ -274,7 +265,7 @@ def __init__( "median"]] = "mean", sinkhorn_kwargs: Mapping[str, Any] = MappingProxyType({}), **_: Any, - ) -> None: + ): self.rng_unbalanced = rng self.source_dim = source_dim self.target_dim = target_dim @@ -313,7 +304,7 @@ def _get_compute_unbalanced_marginals( def compute_unbalanced_marginals( batch_source: jnp.ndarray, batch_target: jnp.ndarray ) -> Tuple[jnp.ndarray, jnp.ndarray]: - geom = PointCloud( + geom = pointcloud.PointCloud( batch_source, batch_target, epsilon=resample_epsilon, diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/solvers/flows.py index 47be01fc5..0ff81a560 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/solvers/flows.py @@ -29,11 +29,13 @@ class BaseFlow(abc.ABC): sigma: Constant noise used for computing time-dependent noise schedule. """ - def __init__(self, sigma: float) -> None: + def __init__(self, sigma: float): self.sigma = sigma @abc.abstractmethod - def compute_mu_t(self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray): + def compute_mu_t( + self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray + ) -> jnp.ndarray: """Compute the mean of the probablitiy path. Compute the mean of the probablitiy path between :math:`x` and :math:`y` @@ -44,7 +46,6 @@ def compute_mu_t(self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray): x_0: Sample from the source distribution. x_1: Sample from the target distribution. """ - pass @abc.abstractmethod def compute_sigma_t(self, t: jnp.ndarray): @@ -53,7 +54,6 @@ def compute_sigma_t(self, t: jnp.ndarray): Args: t: Time :math:`t`. """ - pass @abc.abstractmethod def compute_ut( @@ -61,15 +61,14 @@ def compute_ut( ) -> jnp.ndarray: """Evaluate the conditional vector field. - Evaluate the conditional vector field defined between :math:`x_0` and - :math:`x_1` at time :math:`t`. + Evaluate the conditional vector field defined between :math:`x_0` and + :math:`x_1` at time :math:`t`. Args: t: Time :math:`t`. x_0: Sample from the source distribution. x_1: Sample from the target distribution. """ - pass def compute_xt( self, noise: jnp.ndarray, t: jnp.ndarray, x_0: jnp.ndarray, @@ -77,8 +76,8 @@ def compute_xt( ) -> jnp.ndarray: """Sample from the probability path. - Sample from the probability path between :math:`x_0` and :math:`x_1` at - time :math:`t`. + Sample from the probability path between :math:`x_0` and :math:`x_1` at + time :math:`t`. Args: noise: Noise sampled from a standard normal distribution. @@ -88,7 +87,7 @@ def compute_xt( Returns: Samples from the probability path between :math:`x_0` and :math:`x_1` - at time :math:`t`. + at time :math:`t`. """ mu_t = self.compute_mu_t(t, x_0, x_1) sigma_t = self.compute_sigma_t(t) @@ -101,16 +100,6 @@ class StraightFlow(BaseFlow, abc.ABC): def compute_mu_t( self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray ) -> jnp.ndarray: - """Compute the mean of the probablitiy path. - - Compute the mean of the probablitiy path between :math:`x` and :math:`y` - at time :math:`t`. - - Args: - t: Time :math:`t`. - x_0: Sample from the source distribution. - x_1: Sample from the target distribution. - """ return t * x_0 + (1 - t) * x_1 def compute_ut( @@ -119,7 +108,7 @@ def compute_ut( """Evaluate the conditional vector field. Evaluate the conditional vector field defined between :math:`x_0` and - :math:`x_1` at time :math:`t`. + :math:`x_1` at time :math:`t`. Args: t: Time :math:`t`. @@ -175,7 +164,7 @@ class BaseTimeSampler(abc.ABC): high: Upper bound of the distribution to sample from . """ - def __init__(self, low: float, high: float) -> None: + def __init__(self, low: float, high: float): self.low = low self.high = high @@ -187,7 +176,6 @@ def __call__(self, rng: jax.Array, num_samples: int) -> jnp.ndarray: rng: Random number generator. num_samples: Number of samples to generate. """ - pass class UniformSampler(BaseTimeSampler): @@ -198,7 +186,7 @@ class UniformSampler(BaseTimeSampler): high: Upper bound of the uniform distribution. """ - def __init__(self, low: float = 0.0, high: float = 1.0) -> None: + def __init__(self, low: float = 0.0, high: float = 1.0): super().__init__(low=low, high=high) def __call__(self, rng: jax.Array, num_samples: int) -> jnp.ndarray: @@ -228,9 +216,7 @@ class OffsetUniformSampler(BaseTimeSampler): high: Upper bound of the uniform distribution. """ - def __init__( - self, offset: float, low: float = 0.0, high: float = 1.0 - ) -> None: + def __init__(self, offset: float, low: float = 0.0, high: float = 1.0): super().__init__(low=low, high=high) self.offset = offset diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/solvers/genot.py index 61b368a67..fb76ded77 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/solvers/genot.py @@ -26,7 +26,6 @@ from ott import utils from ott.geometry import costs -from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( BaseNeuralSolver, ResampleMixin, @@ -100,7 +99,9 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): def __init__( self, - neural_vector_field: Type[BaseNeuralVectorField], + neural_vector_field: Callable[[ + jnp.ndarray, jnp.ndarray, Optional[jnp.ndarray], Optional[jnp.ndarray] + ], jnp.ndarray], input_dim: int, output_dim: int, cond_dim: int, @@ -132,7 +133,7 @@ def __init__( callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, rng: Optional[jax.Array] = None, - ) -> None: + ): rng = utils.default_prng_key(rng) rng, rng_unbalanced = jax.random.split(rng) BaseNeuralSolver.__init__( @@ -192,7 +193,7 @@ def __init__( self.callback_fn = callback_fn self.setup() - def setup(self) -> None: + def setup(self): """Set up the model. Parameters @@ -230,7 +231,7 @@ def setup(self) -> None: self.fused_penalty ) - def __call__(self, train_loader, valid_loader) -> None: + def __call__(self, train_loader, valid_loader): """Train GENOT.""" batch: Dict[str, jnp.array] = {} for iteration in range(self.iterations): @@ -439,7 +440,7 @@ def solve_ode(input: jnp.ndarray, cond: jnp.ndarray): return jax.vmap(solve_ode)(latent_batch, cond_input) - def _valid_step(self, valid_loader, iter) -> None: + def _valid_step(self, valid_loader, iter): """TODO.""" next(valid_loader) @@ -448,7 +449,7 @@ def learn_rescaling(self) -> bool: """Whether to learn at least one rescaling factor.""" return self.mlp_eta is not None or self.mlp_xi is not None - def save(self, path: str) -> None: + def save(self, path: str): """Save the model. Args: diff --git a/src/ott/neural/solvers/neuraldual.py b/src/ott/neural/solvers/neuraldual.py index 455d6b50e..ade11a085 100644 --- a/src/ott/neural/solvers/neuraldual.py +++ b/src/ott/neural/solvers/neuraldual.py @@ -69,10 +69,6 @@ class W2NeuralTrainState(train_state.TrainState): ) -class BaseNeuralVectorField(nn.Module): - pass - - class BaseW2NeuralDual(abc.ABC, nn.Module): """Base class for the neural solver models.""" @@ -295,7 +291,7 @@ def setup( dim_data: int, optimizer_f: optax.OptState, optimizer_g: optax.OptState, - ) -> None: + ): """Setup all components required to train the network.""" # split random number generator rng, rng_f, rng_g = jax.random.split(rng, 3) @@ -700,7 +696,7 @@ def _update_logs( loss_f: jnp.ndarray, loss_g: jnp.ndarray, w_dist: jnp.ndarray, - ) -> None: + ): logs["loss_f"].append(float(loss_f)) logs["loss_g"].append(float(loss_g)) logs["w_dist"].append(float(w_dist)) diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/solvers/otfm.py index d145c4128..b7885c1d5 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/solvers/otfm.py @@ -36,7 +36,6 @@ from ott import utils from ott.geometry import costs -from ott.neural.models.models import BaseNeuralVectorField from ott.neural.solvers.base_solver import ( BaseNeuralSolver, ResampleMixin, @@ -92,7 +91,9 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): def __init__( self, - neural_vector_field: Type[BaseNeuralVectorField], + neural_vector_field: Callable[[ + jnp.ndarray, jnp.ndarray, Optional[jnp.ndarray], Optional[jnp.ndarray] + ], jnp.ndarray], input_dim: int, cond_dim: int, iterations: int, @@ -117,7 +118,7 @@ def __init__( valid_freq: int = 5000, num_eval_samples: int = 1000, rng: Optional[jax.Array] = None, - ) -> None: + ): rng = utils.default_prng_key(rng) rng, rng_unbalanced = jax.random.split(rng) BaseNeuralSolver.__init__( @@ -155,7 +156,7 @@ def __init__( self.setup() - def setup(self) -> None: + def setup(self): """Setup :class:`OTFlowMatching`.""" self.state_neural_vector_field = ( self.neural_vector_field.create_train_state( @@ -218,7 +219,7 @@ def loss_fn( return step_fn - def __call__(self, train_loader, valid_loader) -> None: + def __call__(self, train_loader, valid_loader): """Train :class:`OTFlowMatching`. Args; @@ -330,7 +331,7 @@ def solve_ode(input: jnp.ndarray, cond: jnp.ndarray): return jax.vmap(solve_ode)(data, condition) - def _valid_step(self, valid_loader, iter) -> None: + def _valid_step(self, valid_loader, iter): next(valid_loader) # TODO: add callback and logging @@ -339,7 +340,7 @@ def learn_rescaling(self) -> bool: """Whether to learn at least one rescaling factor.""" return self.mlp_eta is not None or self.mlp_xi is not None - def save(self, path: str) -> None: + def save(self, path: str): """Save the model. Args: diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index fddc4fc3c..92a929154 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -20,7 +20,7 @@ import optax from ott.geometry import costs -from ott.neural.models.models import NeuralVectorField, RescalingMLP +from ott.neural.models.models import RescalingMLP, VelocityField from ott.neural.solvers.flows import OffsetUniformSampler, UniformSampler from ott.neural.solvers.genot import GENOT from ott.solvers.linear import sinkhorn @@ -47,7 +47,7 @@ def test_genot_linear_unconditional( target_dim = target_lin.shape[1] condition_dim = 0 - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, latent_embed_dim=5, @@ -97,7 +97,7 @@ def test_genot_quad_unconditional( source_dim = source_quad.shape[1] target_dim = target_quad.shape[1] condition_dim = 0 - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, latent_embed_dim=5, @@ -143,7 +143,7 @@ def test_genot_fused_unconditional( source_dim = source_lin.shape[1] + source_quad.shape[1] target_dim = target_lin.shape[1] + target_quad.shape[1] condition_dim = 0 - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, latent_embed_dim=5, @@ -190,7 +190,7 @@ def test_genot_linear_conditional( target_dim = target_lin.shape[1] condition_dim = source_condition.shape[1] - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, latent_embed_dim=5, @@ -237,7 +237,7 @@ def test_genot_quad_conditional( source_dim = source_quad.shape[1] target_dim = target_quad.shape[1] condition_dim = source_condition.shape[1] - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, latent_embed_dim=5, @@ -284,7 +284,7 @@ def test_genot_fused_conditional( source_dim = source_lin.shape[1] + source_quad.shape[1] target_dim = target_lin.shape[1] + target_quad.shape[1] condition_dim = source_condition.shape[1] - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, latent_embed_dim=5, @@ -340,7 +340,7 @@ def test_genot_linear_learn_rescaling( target_dim = target_lin.shape[1] condition_dim = source_condition.shape[1] if conditional else 0 - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, latent_embed_dim=5, diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index a57588a43..b38fceb74 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -19,7 +19,7 @@ import optax -from ott.neural.models.models import NeuralVectorField, RescalingMLP +from ott.neural.models.models import RescalingMLP, VelocityField from ott.neural.solvers.flows import ( BaseFlow, BrownianNoiseFlow, @@ -40,7 +40,7 @@ class TestOTFlowMatching: BrownianNoiseFlow(0.2)] ) def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=2, condition_dim=0, latent_embed_dim=5, @@ -85,7 +85,7 @@ def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): def test_flow_matching_with_conditions( self, data_loader_gaussian_with_conditions, flow: Type[BaseFlow] ): - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=2, condition_dim=1, latent_embed_dim=5, @@ -133,7 +133,7 @@ def test_flow_matching_with_conditions( def test_flow_matching_conditional( self, data_loader_gaussian_conditional, flow: Type[BaseFlow] ): - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=2, condition_dim=0, latent_embed_dim=5, @@ -181,7 +181,7 @@ def test_flow_matching_learn_rescaling( batch = next(data_loader) source_dim = batch["source_lin"].shape[1] condition_dim = batch["source_conditions"].shape[1] if conditional else 0 - neural_vf = NeuralVectorField( + neural_vf = VelocityField( output_dim=2, condition_dim=0, latent_embed_dim=5, From 3c0700973881763f25aa8d8f712c3954f8389d40 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Mon, 4 Dec 2023 11:02:17 +0100 Subject: [PATCH 053/186] restructure neural module --- src/ott/neural/__init__.py | 2 +- src/ott/neural/data/dataloaders.py | 1 - src/ott/neural/duality/__init__.py | 14 + .../neural/{solvers => duality}/conjugate.py | 0 src/ott/neural/duality/layers.py | 140 +++++ src/ott/neural/duality/models.py | 362 +++++++++++++ .../neural/{solvers => duality}/neuraldual.py | 2 +- src/ott/neural/{solvers => flows}/__init__.py | 2 +- src/ott/neural/{solvers => flows}/flows.py | 2 +- src/ott/neural/{solvers => flows}/genot.py | 12 +- src/ott/neural/flows/models.py | 188 +++++++ src/ott/neural/{solvers => flows}/otfm.py | 4 +- src/ott/neural/gaps/__init__.py | 14 + .../neural/{solvers => gaps}/map_estimator.py | 2 +- .../{models/losses.py => gaps/monge_gap.py} | 0 src/ott/neural/models/__init__.py | 2 +- .../neural/{solvers => models}/base_solver.py | 2 + src/ott/neural/models/layers.py | 128 +---- src/ott/neural/models/models.py | 506 +----------------- tests/neural/genot_test.py | 7 +- tests/neural/losses_test.py | 15 +- tests/neural/map_estimator_test.py | 7 +- tests/neural/neuraldual_test.py | 2 +- tests/neural/otfm_test.py | 7 +- 24 files changed, 768 insertions(+), 653 deletions(-) create mode 100644 src/ott/neural/duality/__init__.py rename src/ott/neural/{solvers => duality}/conjugate.py (100%) create mode 100644 src/ott/neural/duality/layers.py create mode 100644 src/ott/neural/duality/models.py rename src/ott/neural/{solvers => duality}/neuraldual.py (99%) rename src/ott/neural/{solvers => flows}/__init__.py (91%) rename src/ott/neural/{solvers => flows}/flows.py (99%) rename src/ott/neural/{solvers => flows}/genot.py (99%) create mode 100644 src/ott/neural/flows/models.py rename src/ott/neural/{solvers => flows}/otfm.py (99%) create mode 100644 src/ott/neural/gaps/__init__.py rename src/ott/neural/{solvers => gaps}/map_estimator.py (99%) rename src/ott/neural/{models/losses.py => gaps/monge_gap.py} (100%) rename src/ott/neural/{solvers => models}/base_solver.py (99%) diff --git a/src/ott/neural/__init__.py b/src/ott/neural/__init__.py index 326fae432..2a61ca021 100644 --- a/src/ott/neural/__init__.py +++ b/src/ott/neural/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import data, models, solvers +from . import data, duality, flows, gaps, models diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index 832117013..9c09ce08c 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -111,7 +111,6 @@ class ConditionalDataLoader: conditions. p: Probability of sampling from each data loader. seed: Random seed. - """ def __init__( diff --git a/src/ott/neural/duality/__init__.py b/src/ott/neural/duality/__init__.py new file mode 100644 index 000000000..ef76b42fa --- /dev/null +++ b/src/ott/neural/duality/__init__.py @@ -0,0 +1,14 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from . import conjugate, layers, models, neuraldual diff --git a/src/ott/neural/solvers/conjugate.py b/src/ott/neural/duality/conjugate.py similarity index 100% rename from src/ott/neural/solvers/conjugate.py rename to src/ott/neural/duality/conjugate.py diff --git a/src/ott/neural/duality/layers.py b/src/ott/neural/duality/layers.py new file mode 100644 index 000000000..4b85972f3 --- /dev/null +++ b/src/ott/neural/duality/layers.py @@ -0,0 +1,140 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Callable, Optional, Tuple + +import jax +import jax.numpy as jnp + +import flax.linen as nn + +__all__ = ["PositiveDense", "PosDefPotentials"] + +PRNGKey = jax.Array +Shape = Tuple[int, ...] +Dtype = Any +Array = Any + + +class PositiveDense(nn.Module): + """A linear transformation using a weight matrix with all entries positive. + + Args: + dim_hidden: the number of output dim_hidden. + rectifier_fn: choice of rectifier function (default: softplus function). + inv_rectifier_fn: choice of inverse rectifier function + (default: inverse softplus function). + dtype: the dtype of the computation (default: float32). + precision: numerical precision of computation see `jax.lax.Precision` + for details. + kernel_init: initializer function for the weight matrix. + bias_init: initializer function for the bias. + """ + dim_hidden: int + rectifier_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.softplus + inv_rectifier_fn: Callable[[jnp.ndarray], + jnp.ndarray] = lambda x: jnp.log(jnp.exp(x) - 1) + use_bias: bool = True + dtype: Any = jnp.float32 + precision: Any = None + kernel_init: Optional[Callable[[PRNGKey, Shape, Dtype], Array]] = None, + bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = nn.initializers.zeros + + @nn.compact + def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray: + """Applies a linear transformation to inputs along the last dimension. + + Args: + inputs: Array to be transformed. + + Returns: + The transformed input. + """ + kernel_init = nn.initializers.lecun_normal( + ) if self.kernel_init is None else self.kernel_init + + inputs = jnp.asarray(inputs, self.dtype) + kernel = self.param( + "kernel", kernel_init, (inputs.shape[-1], self.dim_hidden) + ) + kernel = self.rectifier_fn(kernel) + kernel = jnp.asarray(kernel, self.dtype) + y = jax.lax.dot_general( + inputs, + kernel, (((inputs.ndim - 1,), (0,)), ((), ())), + precision=self.precision + ) + if self.use_bias: + bias = self.param("bias", self.bias_init, (self.dim_hidden,)) + bias = jnp.asarray(bias, self.dtype) + return y + bias + return y + + +class PosDefPotentials(nn.Module): + r"""A layer to output :math:`\frac{1}{2} ||A_i^T (x - b_i)||^2_i` potentials. + + Args: + use_bias: whether to add a bias to the output. + dtype: the dtype of the computation. + precision: numerical precision of computation see `jax.lax.Precision` + for details. + kernel_init: initializer function for the weight matrix. + bias_init: initializer function for the bias. + """ + dim_data: int + num_potentials: int + use_bias: bool = True + dtype: Any = jnp.float32 + precision: Any = None + kernel_init: Optional[Callable[[PRNGKey, Shape, Dtype], Array]] = None + bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = nn.initializers.zeros + + @nn.compact + def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray: + """Apply a few quadratic forms. + + Args: + inputs: Array to be transformed (possibly batched). + + Returns: + The transformed input. + """ + kernel_init = nn.initializers.lecun_normal( + ) if self.kernel_init is None else self.kernel_init + inputs = jnp.asarray(inputs, self.dtype) + kernel = self.param( + "kernel", kernel_init, + (self.num_potentials, inputs.shape[-1], inputs.shape[-1]) + ) + + if self.use_bias: + bias = self.param( + "bias", self.bias_init, (self.num_potentials, self.dim_data) + ) + bias = jnp.asarray(bias, self.dtype) + + y = inputs.reshape((-1, inputs.shape[-1])) if inputs.ndim == 1 else inputs + y = y[..., None] - bias.T[None, ...] + y = jax.lax.dot_general( + y, kernel, (((1,), (1,)), ((2,), (0,))), precision=self.precision + ) + else: + y = jax.lax.dot_general( + inputs, + kernel, (((inputs.ndim - 1,), (0,)), ((), ())), + precision=self.precision + ) + + y = 0.5 * y * y + return jnp.sum(y.reshape((-1, self.num_potentials, self.dim_data)), axis=2) diff --git a/src/ott/neural/duality/models.py b/src/ott/neural/duality/models.py new file mode 100644 index 000000000..2b51c60cf --- /dev/null +++ b/src/ott/neural/duality/models.py @@ -0,0 +1,362 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import functools +from typing import Any, Callable, Dict, Optional, Sequence, Tuple + +import jax +import jax.numpy as jnp +from jax.nn import initializers + +import flax.linen as nn +import optax +from flax.core import frozen_dict +from flax.training import train_state + +from ott import utils +from ott.geometry import geometry +from ott.initializers.linear import initializers as lin_init +from ott.math import matrix_square_root +from ott.neural.duality import neuraldual +from ott.neural.models import layers +from ott.problems.linear import linear_problem + +__all__ = ["ICNN", "MetaInitializer"] + + +class ICNN(neuraldual.BaseW2NeuralDual): + """Input convex neural network (ICNN) architecture with initialization. + + Implementation of input convex neural networks as introduced in + :cite:`amos:17` with initialization schemes proposed by :cite:`bunne:22`. + + Args: + dim_data: data dimensionality. + dim_hidden: sequence specifying size of hidden dimensions. The + output dimension of the last layer is 1 by default. + init_std: value of standard deviation of weight initialization method. + init_fn: choice of initialization method for weight matrices (default: + :func:`jax.nn.initializers.normal`). + act_fn: choice of activation function used in network architecture + (needs to be convex, default: :obj:`jax.nn.relu`). + pos_weights: Enforce positive weights with a projection. + If ``False``, the positive weights should be enforced with clipping + or regularization in the loss. + gaussian_map_samples: Tuple of source and target points, used to initialize + the ICNN to mimic the linear Bures map that morphs the (Gaussian + approximation) of the input measure to that of the target measure. If + ``None``, the identity initialization is used, and ICNN mimics half the + squared Euclidean norm. + """ + dim_data: int + dim_hidden: Sequence[int] + init_std: float = 1e-2 + init_fn: Callable = jax.nn.initializers.normal + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.relu + pos_weights: bool = True + gaussian_map_samples: Optional[Tuple[jnp.ndarray, jnp.ndarray]] = None + + @property + def is_potential(self) -> bool: # noqa: D102 + return True + + def setup(self): # noqa: D102 + self.num_hidden = len(self.dim_hidden) + + if self.pos_weights: + hid_dense = layers.PositiveDense + # this function needs to be the inverse map of function + # used in PositiveDense layers + rescale = hid_dense.inv_rectifier_fn + else: + hid_dense = nn.Dense + rescale = lambda x: x + self.use_init = False + # check if Gaussian map was provided + if self.gaussian_map_samples is not None: + factor, mean = self._compute_gaussian_map_params( + self.gaussian_map_samples + ) + else: + factor, mean = self._compute_identity_map_params(self.dim_data) + + w_zs = [] + # keep track of previous size to normalize accordingly + normalization = 1 + + for i in range(1, self.num_hidden): + w_zs.append( + hid_dense( + self.dim_hidden[i], + kernel_init=initializers.constant(rescale(1.0 / normalization)), + use_bias=False, + ) + ) + normalization = self.dim_hidden[i] + # final layer computes average, still with normalized rescaling + w_zs.append( + hid_dense( + 1, + kernel_init=initializers.constant(rescale(1.0 / normalization)), + use_bias=False, + ) + ) + self.w_zs = w_zs + + # positive definite potential (the identity mapping or linear OT) + self.pos_def_potential = layers.PosDefPotentials( + self.dim_data, + num_potentials=1, + kernel_init=lambda *_: factor, + bias_init=lambda *_: mean, + use_bias=True, + ) + + # subsequent layers re-injected into convex functions + w_xs = [] + for i in range(self.num_hidden): + w_xs.append( + nn.Dense( + self.dim_hidden[i], + kernel_init=self.init_fn(self.init_std), + bias_init=initializers.constant(0.), + use_bias=True, + ) + ) + # final layer, to output number + w_xs.append( + nn.Dense( + 1, + kernel_init=self.init_fn(self.init_std), + bias_init=initializers.constant(0.), + use_bias=True, + ) + ) + self.w_xs = w_xs + + @staticmethod + def _compute_gaussian_map_params( + samples: Tuple[jnp.ndarray, jnp.ndarray] + ) -> Tuple[jnp.ndarray, jnp.ndarray]: + from ott.tools.gaussian_mixture import gaussian + source, target = samples + g_s = gaussian.Gaussian.from_samples(source) + g_t = gaussian.Gaussian.from_samples(target) + lin_op = g_s.scale.gaussian_map(g_t.scale) + b = jnp.squeeze(g_t.loc) - jnp.linalg.solve(lin_op, jnp.squeeze(g_t.loc)) + lin_op = matrix_square_root.sqrtm_only(lin_op) + return jnp.expand_dims(lin_op, 0), jnp.expand_dims(b, 0) + + @staticmethod + def _compute_identity_map_params( + input_dim: int + ) -> Tuple[jnp.ndarray, jnp.ndarray]: + A = jnp.eye(input_dim).reshape((1, input_dim, input_dim)) + b = jnp.zeros((1, input_dim)) + return A, b + + @nn.compact + def __call__(self, x: jnp.ndarray) -> float: # noqa: D102 + z = self.act_fn(self.w_xs[0](x)) + for i in range(self.num_hidden): + z = jnp.add(self.w_zs[i](z), self.w_xs[i + 1](x)) + z = self.act_fn(z) + z += self.pos_def_potential(x) + return z.squeeze() + + +@jax.tree_util.register_pytree_node_class +class MetaInitializer(lin_init.DefaultInitializer): + """Meta OT Initializer with a fixed geometry :cite:`amos:22`. + + This initializer consists of a predictive model that outputs the + :math:`f` duals to solve the entropy-regularized OT problem given + input probability weights ``a`` and ``b``, and a given (assumed to be + fixed) geometry ``geom``. + + The model's parameters are learned using a training set of OT + instances (multiple pairs of probability weights), that assume the + **same** geometry ``geom`` is used throughout, both for training and + evaluation. + + Args: + geom: The fixed geometry of the problem instances. + meta_model: The model to predict the potential :math:`f` from the measures. + TODO(marcocuturi): add explanation here what arguments to expect. + opt: The optimizer to update the parameters. If ``None``, use + :func:`optax.adam` with :math:`0.001` learning rate. + rng: The PRNG key to use for initializing the model. + state: The training state of the model to start from. + + Examples: + The following code shows a simple + example of using ``update`` to train the model, where + ``a`` and ``b`` are the weights of the measures and + ``geom`` is the fixed geometry. + + .. code-block:: python + + meta_initializer = init_lib.MetaInitializer(geom) + while training(): + a, b = sample_batch() + loss, init_f, meta_initializer.state = meta_initializer.update( + meta_initializer.state, a=a, b=b + ) + """ + + def __init__( + self, + geom: geometry.Geometry, + meta_model: nn.Module, + opt: Optional[optax.GradientTransformation + ] = optax.adam(learning_rate=1e-3), # noqa: B008 + rng: Optional[jax.Array] = None, + state: Optional[train_state.TrainState] = None + ): + self.geom = geom + self.dtype = geom.x.dtype + self.opt = opt + self.rng = utils.default_prng_key(rng) + + na, nb = geom.shape + # TODO(michalk8): add again some default MLP + self.meta_model = meta_model + + if state is None: + # Initialize the model's training state. + a_placeholder = jnp.zeros(na, dtype=self.dtype) + b_placeholder = jnp.zeros(nb, dtype=self.dtype) + params = self.meta_model.init(self.rng, a_placeholder, + b_placeholder)["params"] + self.state = train_state.TrainState.create( + apply_fn=self.meta_model.apply, params=params, tx=opt + ) + else: + self.state = state + + self.update_impl = self._get_update_fn() + + def update( + self, state: train_state.TrainState, a: jnp.ndarray, b: jnp.ndarray + ) -> Tuple[jnp.ndarray, jnp.ndarray, train_state.TrainState]: + r"""Update the meta model with the dual objective. + + The goal is for the model to match the optimal duals, i.e., + :math:`\hat f_\theta \approx f^\star`. + This can be done by training the predictions of :math:`\hat f_\theta` + to optimize the dual objective, which :math:`f^\star` also optimizes for. + The overall learning setup can thus be written as: + + .. math:: + \min_\theta\; {\mathbb E}_{(\alpha,\beta)\sim{\mathcal{D}}}\; + J(\hat f_\theta(a, b); \alpha, \beta), + + where :math:`a,b` are the probabilities of the measures :math:`\alpha,\beta` + ,:math:`\mathcal{D}` is a meta distribution of optimal transport problems, + + .. math:: + -J(f; \alpha, \beta, c) := \langle f, a\rangle + \langle g, b \rangle - + \varepsilon\left\langle \exp\{f/\varepsilon\}, K\exp\{g/\varepsilon\} + \right\rangle + + is the entropic dual objective, + and :math:`K_{i,j} := -C_{i,j}/\varepsilon` is the *Gibbs kernel*. + + Args: + state: Optimizer state of the meta model. + a: Probabilities of the :math:`\alpha` measure's atoms. + b: Probabilities of the :math:`\beta` measure's atoms. + + Returns: + The training loss, :math:`f`, and updated state. + """ + return self.update_impl(state, a, b) + + def init_dual_a( # noqa: D102 + self, + ot_prob: "linear_problem.LinearProblem", + lse_mode: bool, + rng: Optional[jax.Array] = None, + ) -> jnp.ndarray: + del rng + # Detect if the problem is batched. + assert ot_prob.a.ndim in (1, 2) + assert ot_prob.b.ndim in (1, 2) + vmap_a_val = 0 if ot_prob.a.ndim == 2 else None + vmap_b_val = 0 if ot_prob.b.ndim == 2 else None + + if vmap_a_val is not None or vmap_b_val is not None: + compute_f_maybe_batch = jax.vmap( + self._compute_f, in_axes=(vmap_a_val, vmap_b_val, None) + ) + else: + compute_f_maybe_batch = self._compute_f + + init_f = compute_f_maybe_batch(ot_prob.a, ot_prob.b, self.state.params) + return init_f if lse_mode else ot_prob.geom.scaling_from_potential(init_f) + + def _get_update_fn(self): + """Return the implementation (and jitted) update function.""" + from ott.problems.linear import linear_problem + from ott.solvers.linear import sinkhorn + + def dual_obj_loss_single(params, a, b): + f_pred = self._compute_f(a, b, params) + g_pred = self.geom.update_potential( + f_pred, jnp.zeros_like(b), jnp.log(b), 0, axis=0 + ) + g_pred = jnp.where(jnp.isfinite(g_pred), g_pred, 0.) + + ot_prob = linear_problem.LinearProblem(geom=self.geom, a=a, b=b) + dual_obj = sinkhorn.compute_kl_reg_cost( + f_pred, g_pred, ot_prob, lse_mode=True + ) + loss = -dual_obj + return loss, f_pred + + def loss_batch(params, a, b): + loss_fn = functools.partial(dual_obj_loss_single, params=params) + loss, f_pred = jax.vmap(loss_fn)(a=a, b=b) + return jnp.mean(loss), f_pred + + @jax.jit + def update(state, a, b): + a = jnp.atleast_2d(a) + b = jnp.atleast_2d(b) + grad_fn = jax.value_and_grad(loss_batch, has_aux=True) + (loss, init_f), grads = grad_fn(state.params, a, b) + return loss, init_f, state.apply_gradients(grads=grads) + + return update + + def _compute_f( + self, a: jnp.ndarray, b: jnp.ndarray, + params: frozen_dict.FrozenDict[str, jnp.ndarray] + ) -> jnp.ndarray: + r"""Predict the optimal :math:`f` potential. + + Args: + a: Probabilities of the :math:`\alpha` measure's atoms. + b: Probabilities of the :math:`\beta` measure's atoms. + params: The parameters of the Meta model. + + Returns: + The :math:`f` potential. + """ + return self.meta_model.apply({"params": params}, a, b) + + def tree_flatten(self) -> Tuple[Sequence[Any], Dict[str, Any]]: # noqa: D102 + return [self.geom, self.meta_model, self.opt], { + "rng": self.rng, + "state": self.state + } diff --git a/src/ott/neural/solvers/neuraldual.py b/src/ott/neural/duality/neuraldual.py similarity index 99% rename from src/ott/neural/solvers/neuraldual.py rename to src/ott/neural/duality/neuraldual.py index ade11a085..1d1aaa85b 100644 --- a/src/ott/neural/solvers/neuraldual.py +++ b/src/ott/neural/duality/neuraldual.py @@ -36,8 +36,8 @@ from ott import utils from ott.geometry import costs +from ott.neural.duality import conjugate from ott.neural.models import models -from ott.neural.solvers import conjugate from ott.problems.linear import potentials __all__ = ["W2NeuralTrainState", "BaseW2NeuralDual", "W2NeuralDual"] diff --git a/src/ott/neural/solvers/__init__.py b/src/ott/neural/flows/__init__.py similarity index 91% rename from src/ott/neural/solvers/__init__.py rename to src/ott/neural/flows/__init__.py index b09d8c60b..695cbbe3c 100644 --- a/src/ott/neural/solvers/__init__.py +++ b/src/ott/neural/flows/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import conjugate, map_estimator, neuraldual +from . import flows, genot, models, otfm diff --git a/src/ott/neural/solvers/flows.py b/src/ott/neural/flows/flows.py similarity index 99% rename from src/ott/neural/solvers/flows.py rename to src/ott/neural/flows/flows.py index 0ff81a560..93f471b9d 100644 --- a/src/ott/neural/solvers/flows.py +++ b/src/ott/neural/flows/flows.py @@ -97,7 +97,7 @@ def compute_xt( class StraightFlow(BaseFlow, abc.ABC): """Base class for flows with straight paths.""" - def compute_mu_t( + def compute_mu_t( # noqa: D102 self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray ) -> jnp.ndarray: return t * x_0 + (1 - t) * x_1 diff --git a/src/ott/neural/solvers/genot.py b/src/ott/neural/flows/genot.py similarity index 99% rename from src/ott/neural/solvers/genot.py rename to src/ott/neural/flows/genot.py index fb76ded77..fa5ada781 100644 --- a/src/ott/neural/solvers/genot.py +++ b/src/ott/neural/flows/genot.py @@ -26,17 +26,17 @@ from ott import utils from ott.geometry import costs -from ott.neural.solvers.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, -) -from ott.neural.solvers.flows import ( +from ott.neural.flows.flows import ( BaseFlow, BaseTimeSampler, ConstantNoiseFlow, UniformSampler, ) +from ott.neural.models.base_solver import ( + BaseNeuralSolver, + ResampleMixin, + UnbalancednessMixin, +) from ott.solvers import was_solver from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein diff --git a/src/ott/neural/flows/models.py b/src/ott/neural/flows/models.py new file mode 100644 index 000000000..4cf671a19 --- /dev/null +++ b/src/ott/neural/flows/models.py @@ -0,0 +1,188 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Callable, Optional + +import jax +import jax.numpy as jnp + +import flax.linen as nn +import optax +from flax.training import train_state + +from ott.neural.models import layers + +__all__ = ["VelocityField"] + + +class VelocityField(nn.Module): + """Parameterized neural vector field. + + Each of the input, condition, and time embeddings are passed through a block + consisting of ``num_layers_per_block`` layers of dimension + ``latent_embed_dim``, ``condition_embed_dim``, and ``time_embed_dim``, + respectively. + The output of each block is concatenated and passed through a final block of + dimension ``joint_hidden_dim``. + + Args: + output_dim: Dimensionality of the neural vector field. + condition_dim: Dimensionality of the conditioning vector. + latent_embed_dim: Dimensionality of the embedding of the data. + condition_embed_dim: Dimensionality of the embedding of the condition. + If ``None``, set to ``latent_embed_dim``. + t_embed_dim: Dimensionality of the time embedding. + If ``None``, set to ``latent_embed_dim``. + joint_hidden_dim: Dimensionality of the hidden layers of the joint network. + If ``None``, set to ``latent_embed_dim + condition_embed_dim + + t_embed_dim``. + num_layers_per_block: Number of layers per block. + act_fn: Activation function. + n_frequencies: Number of frequencies to use for the time embedding. + + """ + output_dim: int + condition_dim: int + latent_embed_dim: int + condition_embed_dim: Optional[int] = None + t_embed_dim: Optional[int] = None + joint_hidden_dim: Optional[int] = None + num_layers_per_block: int = 3 + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu + n_frequencies: int = 128 + + def time_encoder(self, t: jnp.ndarray) -> jnp.array: + """Encode the time. + + Args: + t: Time. + + Returns: + Encoded time. + """ + freq = 2 * jnp.arange(self.n_frequencies) * jnp.pi + t = freq * t + return jnp.concatenate((jnp.cos(t), jnp.sin(t)), axis=-1) + + def __post_init__(self): + + # set embedded dim from latent embedded dim + if self.condition_embed_dim is None: + self.condition_embed_dim = self.latent_embed_dim + if self.t_embed_dim is None: + self.t_embed_dim = self.latent_embed_dim + + # set joint hidden dim from all embedded dim + concat_embed_dim = ( + self.latent_embed_dim + self.condition_embed_dim + self.t_embed_dim + ) + if self.joint_hidden_dim is not None: + assert (self.joint_hidden_dim >= concat_embed_dim), ( + "joint_hidden_dim must be greater than or equal to the sum of " + "all embedded dimensions. " + ) + self.joint_hidden_dim = self.latent_embed_dim + else: + self.joint_hidden_dim = concat_embed_dim + super().__post_init__() + + @nn.compact + def __call__( + self, + t: jnp.ndarray, + x: jnp.ndarray, + condition: Optional[jnp.ndarray], + keys_model: Optional[jnp.ndarray] = None, + ) -> jnp.ndarray: + """Forward pass through the neural vector field. + + Args: + t: Time. + x: Data. + condition: Conditioning vector. + keys_model: Random number generator. + + Returns: + Output of the neural vector field. + """ + t = self.time_encoder(t) + t = layers.MLPBlock( + dim=self.t_embed_dim, + out_dim=self.t_embed_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn, + )( + t + ) + + x = layers.MLPBlock( + dim=self.latent_embed_dim, + out_dim=self.latent_embed_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn + )( + x + ) + + if self.condition_dim > 0: + condition = layers.MLPBlock( + dim=self.condition_embed_dim, + out_dim=self.condition_embed_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn + )( + condition + ) + concatenated = jnp.concatenate((t, x, condition), axis=-1) + else: + concatenated = jnp.concatenate((t, x), axis=-1) + + out = layers.MLPBlock( + dim=self.joint_hidden_dim, + out_dim=self.joint_hidden_dim, + num_layers=self.num_layers_per_block, + act_fn=self.act_fn, + )( + concatenated + ) + + return nn.Dense( + self.output_dim, + use_bias=True, + )( + out + ) + + def create_train_state( + self, + rng: jax.Array, + optimizer: optax.OptState, + input_dim: int, + ) -> train_state.TrainState: + """Create the training state. + + Args: + rng: Random number generator. + optimizer: Optimizer. + input_dim: Dimensionality of the input. + + Returns: + Training state. + """ + params = self.init( + rng, jnp.ones((1, 1)), jnp.ones((1, input_dim)), + jnp.ones((1, self.condition_dim)) + )["params"] + return train_state.TrainState.create( + apply_fn=self.apply, params=params, tx=optimizer + ) diff --git a/src/ott/neural/solvers/otfm.py b/src/ott/neural/flows/otfm.py similarity index 99% rename from src/ott/neural/solvers/otfm.py rename to src/ott/neural/flows/otfm.py index b7885c1d5..ed0114f6d 100644 --- a/src/ott/neural/solvers/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -36,12 +36,12 @@ from ott import utils from ott.geometry import costs -from ott.neural.solvers.base_solver import ( +from ott.neural.flows.flows import BaseFlow, BaseTimeSampler +from ott.neural.models.base_solver import ( BaseNeuralSolver, ResampleMixin, UnbalancednessMixin, ) -from ott.neural.solvers.flows import BaseFlow, BaseTimeSampler from ott.solvers import was_solver __all__ = ["OTFlowMatching"] diff --git a/src/ott/neural/gaps/__init__.py b/src/ott/neural/gaps/__init__.py new file mode 100644 index 000000000..0ba36da05 --- /dev/null +++ b/src/ott/neural/gaps/__init__.py @@ -0,0 +1,14 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from . import map_estimator, monge_gap diff --git a/src/ott/neural/solvers/map_estimator.py b/src/ott/neural/gaps/map_estimator.py similarity index 99% rename from src/ott/neural/solvers/map_estimator.py rename to src/ott/neural/gaps/map_estimator.py index fb65917c7..cfcc8cb86 100644 --- a/src/ott/neural/solvers/map_estimator.py +++ b/src/ott/neural/gaps/map_estimator.py @@ -32,7 +32,7 @@ from flax.training import train_state from ott import utils -from ott.neural.solvers import neuraldual +from ott.neural.duality import neuraldual __all__ = ["MapEstimator"] diff --git a/src/ott/neural/models/losses.py b/src/ott/neural/gaps/monge_gap.py similarity index 100% rename from src/ott/neural/models/losses.py rename to src/ott/neural/gaps/monge_gap.py diff --git a/src/ott/neural/models/__init__.py b/src/ott/neural/models/__init__.py index 1e374d236..5c2ac3b2b 100644 --- a/src/ott/neural/models/__init__.py +++ b/src/ott/neural/models/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import base_models, layers, losses, models +from . import base_solver, layers, models diff --git a/src/ott/neural/solvers/base_solver.py b/src/ott/neural/models/base_solver.py similarity index 99% rename from src/ott/neural/solvers/base_solver.py rename to src/ott/neural/models/base_solver.py index 780bf61ad..e60d25766 100644 --- a/src/ott/neural/solvers/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -27,6 +27,8 @@ from ott.problems.quadratic import quadratic_problem from ott.solvers.linear import sinkhorn +__all__ = ["BaseNeuralSolver", "ResampleMixin", "UnbalancednessMixin"] + class BaseNeuralSolver(ABC): """Base class for neural solvers. diff --git a/src/ott/neural/models/layers.py b/src/ott/neural/models/layers.py index 50c2c6301..db8b24ae9 100644 --- a/src/ott/neural/models/layers.py +++ b/src/ott/neural/models/layers.py @@ -11,14 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Callable, Optional, Tuple +from typing import Any, Tuple import jax -import jax.numpy as jnp import flax.linen as nn -__all__ = ["PositiveDense", "PosDefPotentials", "MLPBlock"] +__all__ = ["MLPBlock"] PRNGKey = jax.Array Shape = Tuple[int, ...] @@ -27,128 +26,23 @@ class MLPBlock(nn.Module): + """A simple MLP block.""" dim: int = 128 num_layers: int = 3 act_fn: Any = nn.silu - out_dim: int = 32 + out_dim: int = 128 @nn.compact def __call__(self, x): - for _ in range(self.num_layers): - x = nn.Dense(self.dim)(x) - x = self.act_fn(x) - return nn.Dense(self.out_dim)(x) - - -class PositiveDense(nn.Module): - """A linear transformation using a weight matrix with all entries positive. - - Args: - dim_hidden: the number of output dim_hidden. - rectifier_fn: choice of rectifier function (default: softplus function). - inv_rectifier_fn: choice of inverse rectifier function - (default: inverse softplus function). - dtype: the dtype of the computation (default: float32). - precision: numerical precision of computation see `jax.lax.Precision` - for details. - kernel_init: initializer function for the weight matrix. - bias_init: initializer function for the bias. - """ - dim_hidden: int - rectifier_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.softplus - inv_rectifier_fn: Callable[[jnp.ndarray], - jnp.ndarray] = lambda x: jnp.log(jnp.exp(x) - 1) - use_bias: bool = True - dtype: Any = jnp.float32 - precision: Any = None - kernel_init: Optional[Callable[[PRNGKey, Shape, Dtype], Array]] = None, - bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = nn.initializers.zeros - - @nn.compact - def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray: - """Applies a linear transformation to inputs along the last dimension. + """Apply the MLP block. Args: - inputs: Array to be transformed. + x: Input data of shape (batch_size, dim) Returns: - The transformed input. + Output data of shape (batch_size, out_dim). """ - kernel_init = nn.initializers.lecun_normal( - ) if self.kernel_init is None else self.kernel_init - - inputs = jnp.asarray(inputs, self.dtype) - kernel = self.param( - "kernel", kernel_init, (inputs.shape[-1], self.dim_hidden) - ) - kernel = self.rectifier_fn(kernel) - kernel = jnp.asarray(kernel, self.dtype) - y = jax.lax.dot_general( - inputs, - kernel, (((inputs.ndim - 1,), (0,)), ((), ())), - precision=self.precision - ) - if self.use_bias: - bias = self.param("bias", self.bias_init, (self.dim_hidden,)) - bias = jnp.asarray(bias, self.dtype) - return y + bias - return y - - -class PosDefPotentials(nn.Module): - r"""A layer to output :math:`\frac{1}{2} ||A_i^T (x - b_i)||^2_i` potentials. - - Args: - use_bias: whether to add a bias to the output. - dtype: the dtype of the computation. - precision: numerical precision of computation see `jax.lax.Precision` - for details. - kernel_init: initializer function for the weight matrix. - bias_init: initializer function for the bias. - """ - dim_data: int - num_potentials: int - use_bias: bool = True - dtype: Any = jnp.float32 - precision: Any = None - kernel_init: Optional[Callable[[PRNGKey, Shape, Dtype], Array]] = None - bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = nn.initializers.zeros - - @nn.compact - def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray: - """Apply a few quadratic forms. - - Args: - inputs: Array to be transformed (possibly batched). - - Returns: - The transformed input. - """ - kernel_init = nn.initializers.lecun_normal( - ) if self.kernel_init is None else self.kernel_init - inputs = jnp.asarray(inputs, self.dtype) - kernel = self.param( - "kernel", kernel_init, - (self.num_potentials, inputs.shape[-1], inputs.shape[-1]) - ) - - if self.use_bias: - bias = self.param( - "bias", self.bias_init, (self.num_potentials, self.dim_data) - ) - bias = jnp.asarray(bias, self.dtype) - - y = inputs.reshape((-1, inputs.shape[-1])) if inputs.ndim == 1 else inputs - y = y[..., None] - bias.T[None, ...] - y = jax.lax.dot_general( - y, kernel, (((1,), (1,)), ((2,), (0,))), precision=self.precision - ) - else: - y = jax.lax.dot_general( - inputs, - kernel, (((inputs.ndim - 1,), (0,)), ((), ())), - precision=self.precision - ) - - y = 0.5 * y * y - return jnp.sum(y.reshape((-1, self.num_potentials, self.dim_data)), axis=2) + for _ in range(self.num_layers): + x = nn.Dense(self.dim)(x) + x = self.act_fn(x) + return nn.Dense(self.out_dim)(x) diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 0fc7d4f30..78fd3d173 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -11,171 +11,21 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import functools -from typing import Any, Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Optional, Sequence import jax import jax.numpy as jnp -from jax.nn import initializers import flax.linen as nn import optax -from flax.core import frozen_dict from flax.training import train_state -from ott import utils -from ott.geometry import geometry -from ott.initializers.linear import initializers as lin_init -from ott.math import matrix_square_root from ott.neural.models import layers -from ott.neural.solvers import neuraldual -from ott.problems.linear import linear_problem -__all__ = ["ICNN", "MLP", "MetaInitializer", "VelocityField", "RescalingMLP"] +__all__ = ["MLP", "RescalingMLP"] -class ICNN(neuraldual.BaseW2NeuralDual): - """Input convex neural network (ICNN) architecture with initialization. - - Implementation of input convex neural networks as introduced in - :cite:`amos:17` with initialization schemes proposed by :cite:`bunne:22`. - - Args: - dim_data: data dimensionality. - dim_hidden: sequence specifying size of hidden dimensions. The - output dimension of the last layer is 1 by default. - init_std: value of standard deviation of weight initialization method. - init_fn: choice of initialization method for weight matrices (default: - :func:`jax.nn.initializers.normal`). - act_fn: choice of activation function used in network architecture - (needs to be convex, default: :obj:`jax.nn.relu`). - pos_weights: Enforce positive weights with a projection. - If ``False``, the positive weights should be enforced with clipping - or regularization in the loss. - gaussian_map_samples: Tuple of source and target points, used to initialize - the ICNN to mimic the linear Bures map that morphs the (Gaussian - approximation) of the input measure to that of the target measure. If - ``None``, the identity initialization is used, and ICNN mimics half the - squared Euclidean norm. - """ - dim_data: int - dim_hidden: Sequence[int] - init_std: float = 1e-2 - init_fn: Callable = jax.nn.initializers.normal - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.relu - pos_weights: bool = True - gaussian_map_samples: Optional[Tuple[jnp.ndarray, jnp.ndarray]] = None - - @property - def is_potential(self) -> bool: # noqa: D102 - return True - - def setup(self): # noqa: D102 - self.num_hidden = len(self.dim_hidden) - - if self.pos_weights: - hid_dense = layers.PositiveDense - # this function needs to be the inverse map of function - # used in PositiveDense layers - rescale = hid_dense.inv_rectifier_fn - else: - hid_dense = nn.Dense - rescale = lambda x: x - self.use_init = False - # check if Gaussian map was provided - if self.gaussian_map_samples is not None: - factor, mean = self._compute_gaussian_map_params( - self.gaussian_map_samples - ) - else: - factor, mean = self._compute_identity_map_params(self.dim_data) - - w_zs = [] - # keep track of previous size to normalize accordingly - normalization = 1 - - for i in range(1, self.num_hidden): - w_zs.append( - hid_dense( - self.dim_hidden[i], - kernel_init=initializers.constant(rescale(1.0 / normalization)), - use_bias=False, - ) - ) - normalization = self.dim_hidden[i] - # final layer computes average, still with normalized rescaling - w_zs.append( - hid_dense( - 1, - kernel_init=initializers.constant(rescale(1.0 / normalization)), - use_bias=False, - ) - ) - self.w_zs = w_zs - - # positive definite potential (the identity mapping or linear OT) - self.pos_def_potential = layers.PosDefPotentials( - self.dim_data, - num_potentials=1, - kernel_init=lambda *_: factor, - bias_init=lambda *_: mean, - use_bias=True, - ) - - # subsequent layers re-injected into convex functions - w_xs = [] - for i in range(self.num_hidden): - w_xs.append( - nn.Dense( - self.dim_hidden[i], - kernel_init=self.init_fn(self.init_std), - bias_init=initializers.constant(0.), - use_bias=True, - ) - ) - # final layer, to output number - w_xs.append( - nn.Dense( - 1, - kernel_init=self.init_fn(self.init_std), - bias_init=initializers.constant(0.), - use_bias=True, - ) - ) - self.w_xs = w_xs - - @staticmethod - def _compute_gaussian_map_params( - samples: Tuple[jnp.ndarray, jnp.ndarray] - ) -> Tuple[jnp.ndarray, jnp.ndarray]: - from ott.tools.gaussian_mixture import gaussian - source, target = samples - g_s = gaussian.Gaussian.from_samples(source) - g_t = gaussian.Gaussian.from_samples(target) - lin_op = g_s.scale.gaussian_map(g_t.scale) - b = jnp.squeeze(g_t.loc) - jnp.linalg.solve(lin_op, jnp.squeeze(g_t.loc)) - lin_op = matrix_square_root.sqrtm_only(lin_op) - return jnp.expand_dims(lin_op, 0), jnp.expand_dims(b, 0) - - @staticmethod - def _compute_identity_map_params( - input_dim: int - ) -> Tuple[jnp.ndarray, jnp.ndarray]: - A = jnp.eye(input_dim).reshape((1, input_dim, input_dim)) - b = jnp.zeros((1, input_dim)) - return A, b - - @nn.compact - def __call__(self, x: jnp.ndarray) -> float: # noqa: D102 - z = self.act_fn(self.w_xs[0](x)) - for i in range(self.num_hidden): - z = jnp.add(self.w_zs[i](z), self.w_xs[i + 1](x)) - z = self.act_fn(z) - z += self.pos_def_potential(x) - return z.squeeze() - - -class MLP(neuraldual.BaseW2NeuralDual): +class MLP(nn.Module): """A generic, not-convex MLP. Args: @@ -217,356 +67,6 @@ def __call__(self, x: jnp.ndarray) -> jnp.ndarray: # noqa: D102 return z.squeeze(0) if squeeze else z -@jax.tree_util.register_pytree_node_class -class MetaInitializer(lin_init.DefaultInitializer): - """Meta OT Initializer with a fixed geometry :cite:`amos:22`. - - This initializer consists of a predictive model that outputs the - :math:`f` duals to solve the entropy-regularized OT problem given - input probability weights ``a`` and ``b``, and a given (assumed to be - fixed) geometry ``geom``. - - The model's parameters are learned using a training set of OT - instances (multiple pairs of probability weights), that assume the - **same** geometry ``geom`` is used throughout, both for training and - evaluation. - - Args: - geom: The fixed geometry of the problem instances. - meta_model: The model to predict the potential :math:`f` from the measures. - TODO(marcocuturi): add explanation here what arguments to expect. - opt: The optimizer to update the parameters. If ``None``, use - :func:`optax.adam` with :math:`0.001` learning rate. - rng: The PRNG key to use for initializing the model. - state: The training state of the model to start from. - - Examples: - The following code shows a simple - example of using ``update`` to train the model, where - ``a`` and ``b`` are the weights of the measures and - ``geom`` is the fixed geometry. - - .. code-block:: python - - meta_initializer = init_lib.MetaInitializer(geom) - while training(): - a, b = sample_batch() - loss, init_f, meta_initializer.state = meta_initializer.update( - meta_initializer.state, a=a, b=b - ) - """ - - def __init__( - self, - geom: geometry.Geometry, - meta_model: nn.Module, - opt: Optional[optax.GradientTransformation - ] = optax.adam(learning_rate=1e-3), # noqa: B008 - rng: Optional[jax.Array] = None, - state: Optional[train_state.TrainState] = None - ): - self.geom = geom - self.dtype = geom.x.dtype - self.opt = opt - self.rng = utils.default_prng_key(rng) - - na, nb = geom.shape - # TODO(michalk8): add again some default MLP - self.meta_model = meta_model - - if state is None: - # Initialize the model's training state. - a_placeholder = jnp.zeros(na, dtype=self.dtype) - b_placeholder = jnp.zeros(nb, dtype=self.dtype) - params = self.meta_model.init(self.rng, a_placeholder, - b_placeholder)["params"] - self.state = train_state.TrainState.create( - apply_fn=self.meta_model.apply, params=params, tx=opt - ) - else: - self.state = state - - self.update_impl = self._get_update_fn() - - def update( - self, state: train_state.TrainState, a: jnp.ndarray, b: jnp.ndarray - ) -> Tuple[jnp.ndarray, jnp.ndarray, train_state.TrainState]: - r"""Update the meta model with the dual objective. - - The goal is for the model to match the optimal duals, i.e., - :math:`\hat f_\theta \approx f^\star`. - This can be done by training the predictions of :math:`\hat f_\theta` - to optimize the dual objective, which :math:`f^\star` also optimizes for. - The overall learning setup can thus be written as: - - .. math:: - \min_\theta\; {\mathbb E}_{(\alpha,\beta)\sim{\mathcal{D}}}\; - J(\hat f_\theta(a, b); \alpha, \beta), - - where :math:`a,b` are the probabilities of the measures :math:`\alpha,\beta` - ,:math:`\mathcal{D}` is a meta distribution of optimal transport problems, - - .. math:: - -J(f; \alpha, \beta, c) := \langle f, a\rangle + \langle g, b \rangle - - \varepsilon\left\langle \exp\{f/\varepsilon\}, K\exp\{g/\varepsilon\} - \right\rangle - - is the entropic dual objective, - and :math:`K_{i,j} := -C_{i,j}/\varepsilon` is the *Gibbs kernel*. - - Args: - state: Optimizer state of the meta model. - a: Probabilities of the :math:`\alpha` measure's atoms. - b: Probabilities of the :math:`\beta` measure's atoms. - - Returns: - The training loss, :math:`f`, and updated state. - """ - return self.update_impl(state, a, b) - - def init_dual_a( # noqa: D102 - self, - ot_prob: "linear_problem.LinearProblem", - lse_mode: bool, - rng: Optional[jax.Array] = None, - ) -> jnp.ndarray: - del rng - # Detect if the problem is batched. - assert ot_prob.a.ndim in (1, 2) - assert ot_prob.b.ndim in (1, 2) - vmap_a_val = 0 if ot_prob.a.ndim == 2 else None - vmap_b_val = 0 if ot_prob.b.ndim == 2 else None - - if vmap_a_val is not None or vmap_b_val is not None: - compute_f_maybe_batch = jax.vmap( - self._compute_f, in_axes=(vmap_a_val, vmap_b_val, None) - ) - else: - compute_f_maybe_batch = self._compute_f - - init_f = compute_f_maybe_batch(ot_prob.a, ot_prob.b, self.state.params) - return init_f if lse_mode else ot_prob.geom.scaling_from_potential(init_f) - - def _get_update_fn(self): - """Return the implementation (and jitted) update function.""" - from ott.problems.linear import linear_problem - from ott.solvers.linear import sinkhorn - - def dual_obj_loss_single(params, a, b): - f_pred = self._compute_f(a, b, params) - g_pred = self.geom.update_potential( - f_pred, jnp.zeros_like(b), jnp.log(b), 0, axis=0 - ) - g_pred = jnp.where(jnp.isfinite(g_pred), g_pred, 0.) - - ot_prob = linear_problem.LinearProblem(geom=self.geom, a=a, b=b) - dual_obj = sinkhorn.compute_kl_reg_cost( - f_pred, g_pred, ot_prob, lse_mode=True - ) - loss = -dual_obj - return loss, f_pred - - def loss_batch(params, a, b): - loss_fn = functools.partial(dual_obj_loss_single, params=params) - loss, f_pred = jax.vmap(loss_fn)(a=a, b=b) - return jnp.mean(loss), f_pred - - @jax.jit - def update(state, a, b): - a = jnp.atleast_2d(a) - b = jnp.atleast_2d(b) - grad_fn = jax.value_and_grad(loss_batch, has_aux=True) - (loss, init_f), grads = grad_fn(state.params, a, b) - return loss, init_f, state.apply_gradients(grads=grads) - - return update - - def _compute_f( - self, a: jnp.ndarray, b: jnp.ndarray, - params: frozen_dict.FrozenDict[str, jnp.ndarray] - ) -> jnp.ndarray: - r"""Predict the optimal :math:`f` potential. - - Args: - a: Probabilities of the :math:`\alpha` measure's atoms. - b: Probabilities of the :math:`\beta` measure's atoms. - params: The parameters of the Meta model. - - Returns: - The :math:`f` potential. - """ - return self.meta_model.apply({"params": params}, a, b) - - def tree_flatten(self) -> Tuple[Sequence[Any], Dict[str, Any]]: # noqa: D102 - return [self.geom, self.meta_model, self.opt], { - "rng": self.rng, - "state": self.state - } - - -class VelocityField(nn.Module): - """Parameterized neural vector field. - - Each of the input, condition, and time embeddings are passed through a block - consisting of ``num_layers_per_block`` layers of dimension - ``latent_embed_dim``, ``condition_embed_dim``, and ``time_embed_dim``, - respectively. - The output of each block is concatenated and passed through a final block of - dimension ``joint_hidden_dim``. - - Args: - output_dim: Dimensionality of the neural vector field. - condition_dim: Dimensionality of the conditioning vector. - latent_embed_dim: Dimensionality of the embedding of the data. - condition_embed_dim: Dimensionality of the embedding of the condition. - If ``None``, set to ``latent_embed_dim``. - t_embed_dim: Dimensionality of the time embedding. - If ``None``, set to ``latent_embed_dim``. - joint_hidden_dim: Dimensionality of the hidden layers of the joint network. - If ``None``, set to ``latent_embed_dim + condition_embed_dim + - t_embed_dim``. - num_layers_per_block: Number of layers per block. - act_fn: Activation function. - n_frequencies: Number of frequencies to use for the time embedding. - - """ - output_dim: int - condition_dim: int - latent_embed_dim: int - condition_embed_dim: Optional[int] = None - t_embed_dim: Optional[int] = None - joint_hidden_dim: Optional[int] = None - num_layers_per_block: int = 3 - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu - n_frequencies: int = 128 - - def time_encoder(self, t: jnp.ndarray) -> jnp.array: - """Encode the time. - - Args: - t: Time. - - Returns: - Encoded time. - """ - freq = 2 * jnp.arange(self.n_frequencies) * jnp.pi - t = freq * t - return jnp.concatenate((jnp.cos(t), jnp.sin(t)), axis=-1) - - def __post_init__(self): - - # set embedded dim from latent embedded dim - if self.condition_embed_dim is None: - self.condition_embed_dim = self.latent_embed_dim - if self.t_embed_dim is None: - self.t_embed_dim = self.latent_embed_dim - - # set joint hidden dim from all embedded dim - concat_embed_dim = ( - self.latent_embed_dim + self.condition_embed_dim + self.t_embed_dim - ) - if self.joint_hidden_dim is not None: - assert (self.joint_hidden_dim >= concat_embed_dim), ( - "joint_hidden_dim must be greater than or equal to the sum of " - "all embedded dimensions. " - ) - self.joint_hidden_dim = self.latent_embed_dim - else: - self.joint_hidden_dim = concat_embed_dim - super().__post_init__() - - @nn.compact - def __call__( - self, - t: jnp.ndarray, - x: jnp.ndarray, - condition: Optional[jnp.ndarray], - keys_model: Optional[jnp.ndarray] = None, - ) -> jnp.ndarray: - """Forward pass through the neural vector field. - - Args: - t: Time. - x: Data. - condition: Conditioning vector. - keys_model: Random number generator. - - Returns: - Output of the neural vector field. - """ - t = self.time_encoder(t) - t = layers.MLPBlock( - dim=self.t_embed_dim, - out_dim=self.t_embed_dim, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn, - )( - t - ) - - x = layers.MLPBlock( - dim=self.latent_embed_dim, - out_dim=self.latent_embed_dim, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn - )( - x - ) - - if self.condition_dim > 0: - condition = layers.MLPBlock( - dim=self.condition_embed_dim, - out_dim=self.condition_embed_dim, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn - )( - condition - ) - concatenated = jnp.concatenate((t, x, condition), axis=-1) - else: - concatenated = jnp.concatenate((t, x), axis=-1) - - out = layers.MLPBlock( - dim=self.joint_hidden_dim, - out_dim=self.joint_hidden_dim, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn, - )( - concatenated - ) - - return nn.Dense( - self.output_dim, - use_bias=True, - )( - out - ) - - def create_train_state( - self, - rng: jax.Array, - optimizer: optax.OptState, - input_dim: int, - ) -> train_state.TrainState: - """Create the training state. - - Args: - rng: Random number generator. - optimizer: Optimizer. - input_dim: Dimensionality of the input. - - Returns: - Training state. - """ - params = self.init( - rng, jnp.ones((1, 1)), jnp.ones((1, input_dim)), - jnp.ones((1, self.condition_dim)) - )["params"] - return train_state.TrainState.create( - apply_fn=self.apply, params=params, tx=optimizer - ) - - class RescalingMLP(nn.Module): """Network to learn distributional rescaling factors based on a MLP. diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 92a929154..a962afca3 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -20,9 +20,10 @@ import optax from ott.geometry import costs -from ott.neural.models.models import RescalingMLP, VelocityField -from ott.neural.solvers.flows import OffsetUniformSampler, UniformSampler -from ott.neural.solvers.genot import GENOT +from ott.neural.flows.flows import OffsetUniformSampler, UniformSampler +from ott.neural.flows.genot import GENOT +from ott.neural.flows.models import VelocityField +from ott.neural.models.models import RescalingMLP from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein diff --git a/tests/neural/losses_test.py b/tests/neural/losses_test.py index f18681c7a..d6c9334cd 100644 --- a/tests/neural/losses_test.py +++ b/tests/neural/losses_test.py @@ -18,7 +18,8 @@ import numpy as np from ott.geometry import costs -from ott.neural.models import losses, models +from ott.neural import models +from ott.neural.gaps import monge_gap @pytest.mark.fast() @@ -39,13 +40,13 @@ def test_monge_gap_non_negativity( target = model.apply(params, reference_points) # compute the Monge gap based on samples - monge_gap_from_samples_value = losses.monge_gap_from_samples( + monge_gap_from_samples_value = monge_gap.monge_gap_from_samples( source=reference_points, target=target ) np.testing.assert_array_equal(monge_gap_from_samples_value >= 0, True) # Compute the Monge gap using model directly - monge_gap_value = losses.monge_gap( + monge_gap_value = monge_gap.monge_gap( map_fn=lambda x: model.apply(params, x), reference_points=reference_points ) @@ -60,10 +61,10 @@ def test_monge_gap_jit(self, rng: jax.Array): source = jax.random.normal(rng1, (n_samples, n_features)) target = jax.random.normal(rng2, (n_samples, n_features)) # define jitted monge gap - jit_monge_gap = jax.jit(losses.monge_gap_from_samples) + jit_monge_gap = jax.jit(monge_gap.monge_gap_from_samples) # compute the Monge gaps for different costs - monge_gap_value = losses.monge_gap_from_samples( + monge_gap_value = monge_gap.monge_gap_from_samples( source=source, target=target ) jit_monge_gap_value = jit_monge_gap(source, target) @@ -101,10 +102,10 @@ def test_monge_gap_from_samples_different_cost( target = jax.random.normal(rng2, (n_samples, n_features)) * .1 + 3. # compute the Monge gaps for the euclidean cost - monge_gap_from_samples_value_eucl = losses.monge_gap_from_samples( + monge_gap_from_samples_value_eucl = monge_gap.monge_gap_from_samples( source=source, target=target, cost_fn=costs.Euclidean() ) - monge_gap_from_samples_value_cost_fn = losses.monge_gap_from_samples( + monge_gap_from_samples_value_cost_fn = monge_gap.monge_gap_from_samples( source=source, target=target, cost_fn=cost_fn ) diff --git a/tests/neural/map_estimator_test.py b/tests/neural/map_estimator_test.py index b5df51170..f19c63b32 100644 --- a/tests/neural/map_estimator_test.py +++ b/tests/neural/map_estimator_test.py @@ -19,8 +19,7 @@ from ott import datasets from ott.geometry import pointcloud -from ott.neural.models import losses, models -from ott.neural.solvers import map_estimator +from ott.neural.gaps import map_estimator, monge_gap from ott.tools import sinkhorn_divergence @@ -47,11 +46,11 @@ def fitting_loss( return (div, None) def regularizer(x, y): - gap, out = losses.monge_gap_from_samples(x, y, return_output=True) + gap, out = monge_gap.monge_gap_from_samples(x, y, return_output=True) return gap, out.n_iters # define the model - model = models.MLP(dim_hidden=[16, 8], is_potential=False) + model = monge_gap.MLP(dim_hidden=[16, 8], is_potential=False) # generate data train_dataset, valid_dataset, dim_data = ( diff --git a/tests/neural/neuraldual_test.py b/tests/neural/neuraldual_test.py index 8a362affa..fc107ec75 100644 --- a/tests/neural/neuraldual_test.py +++ b/tests/neural/neuraldual_test.py @@ -19,8 +19,8 @@ import numpy as np from ott import datasets +from ott.neural.duality import conjugate, neuraldual from ott.neural.models import models -from ott.neural.solvers import conjugate, neuraldual ModelPair_t = Tuple[neuraldual.BaseW2NeuralDual, neuraldual.BaseW2NeuralDual] DatasetPair_t = Tuple[datasets.Dataset, datasets.Dataset] diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index b38fceb74..0af948705 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -19,15 +19,16 @@ import optax -from ott.neural.models.models import RescalingMLP, VelocityField -from ott.neural.solvers.flows import ( +from ott.neural.flows.flows import ( BaseFlow, BrownianNoiseFlow, ConstantNoiseFlow, OffsetUniformSampler, UniformSampler, ) -from ott.neural.solvers.otfm import OTFlowMatching +from ott.neural.flows.models import VelocityField +from ott.neural.flows.otfm import OTFlowMatching +from ott.neural.models.models import RescalingMLP from ott.solvers.linear import sinkhorn From 8f404f8284a40323a169ceee96248325e8db7361 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Mon, 4 Dec 2023 11:31:38 +0100 Subject: [PATCH 054/186] fix import errors --- src/ott/neural/duality/models.py | 47 +++++++++++++++++++++++++-- src/ott/neural/duality/neuraldual.py | 5 ++- src/ott/neural/models/models.py | 21 ++++++++++++ tests/neural/icnn_test.py | 2 +- tests/neural/losses_test.py | 2 +- tests/neural/map_estimator_test.py | 3 +- tests/neural/meta_initializer_test.py | 2 +- tests/neural/neuraldual_test.py | 10 +++--- 8 files changed, 77 insertions(+), 15 deletions(-) diff --git a/src/ott/neural/duality/models.py b/src/ott/neural/duality/models.py index 2b51c60cf..d10e09f55 100644 --- a/src/ott/neural/duality/models.py +++ b/src/ott/neural/duality/models.py @@ -27,11 +27,10 @@ from ott.geometry import geometry from ott.initializers.linear import initializers as lin_init from ott.math import matrix_square_root -from ott.neural.duality import neuraldual -from ott.neural.models import layers +from ott.neural.duality import layers, neuraldual from ott.problems.linear import linear_problem -__all__ = ["ICNN", "MetaInitializer"] +__all__ = ["ICNN", "PotentialMLP", "MetaInitializer"] class ICNN(neuraldual.BaseW2NeuralDual): @@ -175,6 +174,48 @@ def __call__(self, x: jnp.ndarray) -> float: # noqa: D102 return z.squeeze() +class PotentialMLP(neuraldual.BaseW2NeuralDual): + """A generic, not-convex MLP. + + Args: + dim_hidden: sequence specifying size of hidden dimensions. The output + dimension of the last layer is automatically set to 1 if + :attr:`is_potential` is ``True``, or the dimension of the input otherwise + is_potential: Model the potential if ``True``, otherwise + model the gradient of the potential + act_fn: Activation function + """ + + dim_hidden: Sequence[int] + is_potential: bool = True + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.leaky_relu + + @nn.compact + def __call__(self, x: jnp.ndarray) -> jnp.ndarray: # noqa: D102 + squeeze = x.ndim == 1 + if squeeze: + x = jnp.expand_dims(x, 0) + assert x.ndim == 2, x.ndim + n_input = x.shape[-1] + + z = x + for n_hidden in self.dim_hidden: + Wx = nn.Dense(n_hidden, use_bias=True) + z = self.act_fn(Wx(z)) + + if self.is_potential: + Wx = nn.Dense(1, use_bias=True) + z = Wx(z).squeeze(-1) + + quad_term = 0.5 * jax.vmap(jnp.dot)(x, x) + z += quad_term + else: + Wx = nn.Dense(n_input, use_bias=True) + z = x + Wx(z) + + return z.squeeze(0) if squeeze else z + + @jax.tree_util.register_pytree_node_class class MetaInitializer(lin_init.DefaultInitializer): """Meta OT Initializer with a fixed geometry :cite:`amos:22`. diff --git a/src/ott/neural/duality/neuraldual.py b/src/ott/neural/duality/neuraldual.py index 1d1aaa85b..a8f5fd273 100644 --- a/src/ott/neural/duality/neuraldual.py +++ b/src/ott/neural/duality/neuraldual.py @@ -36,8 +36,7 @@ from ott import utils from ott.geometry import costs -from ott.neural.duality import conjugate -from ott.neural.models import models +from ott.neural.duality import conjugate, models from ott.problems.linear import potentials __all__ = ["W2NeuralTrainState", "BaseW2NeuralDual", "W2NeuralDual"] @@ -326,7 +325,7 @@ def setup( # default to using back_and_forth with the non-convex models if self.back_and_forth is None: - self.back_and_forth = isinstance(neural_f, models.MLP) + self.back_and_forth = isinstance(neural_f, models.PotentialMLP) if self.num_inner_iters == 1 and self.parallel_updates: self.train_step_parallel = self.get_step_fn( diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 78fd3d173..df4a0e14e 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -66,6 +66,27 @@ def __call__(self, x: jnp.ndarray) -> jnp.ndarray: # noqa: D102 return z.squeeze(0) if squeeze else z + def create_train_state( + self, + rng: jax.Array, + optimizer: optax.OptState, + input_dim: int, + ) -> train_state.TrainState: + """Create the training state. + + Args: + rng: Random number generator. + optimizer: Optimizer. + input_dim: Dimensionality of the input. + + Returns: + Training state. + """ + params = self.init(rng, jnp.ones(input_dim))["params"] + return train_state.TrainState.create( + apply_fn=self.apply, params=params, tx=optimizer + ) + class RescalingMLP(nn.Module): """Network to learn distributional rescaling factors based on a MLP. diff --git a/tests/neural/icnn_test.py b/tests/neural/icnn_test.py index dba2f7b7c..541ecff38 100644 --- a/tests/neural/icnn_test.py +++ b/tests/neural/icnn_test.py @@ -17,7 +17,7 @@ import jax.numpy as jnp import numpy as np -from ott.neural.models import models +from ott.neural.duality import models @pytest.mark.fast() diff --git a/tests/neural/losses_test.py b/tests/neural/losses_test.py index d6c9334cd..6379b9dfa 100644 --- a/tests/neural/losses_test.py +++ b/tests/neural/losses_test.py @@ -18,8 +18,8 @@ import numpy as np from ott.geometry import costs -from ott.neural import models from ott.neural.gaps import monge_gap +from ott.neural.models import models @pytest.mark.fast() diff --git a/tests/neural/map_estimator_test.py b/tests/neural/map_estimator_test.py index f19c63b32..508143465 100644 --- a/tests/neural/map_estimator_test.py +++ b/tests/neural/map_estimator_test.py @@ -20,6 +20,7 @@ from ott import datasets from ott.geometry import pointcloud from ott.neural.gaps import map_estimator, monge_gap +from ott.neural.models import models from ott.tools import sinkhorn_divergence @@ -50,7 +51,7 @@ def regularizer(x, y): return gap, out.n_iters # define the model - model = monge_gap.MLP(dim_hidden=[16, 8], is_potential=False) + model = models.MLP(dim_hidden=[16, 8], is_potential=False) # generate data train_dataset, valid_dataset, dim_data = ( diff --git a/tests/neural/meta_initializer_test.py b/tests/neural/meta_initializer_test.py index e84554940..a083d6560 100644 --- a/tests/neural/meta_initializer_test.py +++ b/tests/neural/meta_initializer_test.py @@ -22,7 +22,7 @@ from ott.geometry import pointcloud from ott.initializers.linear import initializers as linear_init -from ott.neural.models import models as nn_init +from ott.neural.duality import models as nn_init from ott.problems.linear import linear_problem from ott.solvers.linear import sinkhorn diff --git a/tests/neural/neuraldual_test.py b/tests/neural/neuraldual_test.py index fc107ec75..5aef77aba 100644 --- a/tests/neural/neuraldual_test.py +++ b/tests/neural/neuraldual_test.py @@ -19,8 +19,7 @@ import numpy as np from ott import datasets -from ott.neural.duality import conjugate, neuraldual -from ott.neural.models import models +from ott.neural.duality import conjugate, models, neuraldual ModelPair_t = Tuple[neuraldual.BaseW2NeuralDual, neuraldual.BaseW2NeuralDual] DatasetPair_t = Tuple[datasets.Dataset, datasets.Dataset] @@ -42,11 +41,12 @@ def neural_models(request: str) -> ModelPair_t: dim_hidden=[32]), models.ICNN(dim_data=2, dim_hidden=[32]) ) if request.param == "mlps": - return models.MLP(dim_hidden=[32]), models.MLP(dim_hidden=[32]), + return models.PotentialMLP(dim_hidden=[32] + ), models.PotentialMLP(dim_hidden=[32]), if request.param == "mlps-grad": return ( - models.MLP(dim_hidden=[32]), - models.MLP(is_potential=False, dim_hidden=[128]) + models.PotentialMLP(dim_hidden=[32]), + models.PotentialMLP(is_potential=False, dim_hidden=[128]) ) raise ValueError(f"Invalid request: {request.param}") From 0b81135c0f9d25b2d432b4d11ebd10dc523d4429 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 14:03:54 +0100 Subject: [PATCH 055/186] incorporate feedback partially --- src/ott/neural/data/dataloaders.py | 1 - src/ott/neural/models/base_solver.py | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index 9c09ce08c..68da7de6e 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - from typing import Dict, Iterator, Mapping, Optional import numpy as np diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index e60d25766..8ee71a9c6 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -153,6 +153,7 @@ def _get_sinkhorn_match_fn( filter_input: bool = False, ) -> Callable: + @jax.jit def match_pairs( x: jnp.ndarray, y: jnp.ndarray ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: From fccdeef659660081811973f48c3b9f4f56f03eaa Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 14:21:19 +0100 Subject: [PATCH 056/186] make time encoder a layer --- src/ott/neural/flows/__init__.py | 2 +- src/ott/neural/flows/layers.py | 47 ++++++++++++++++++++++++++++++++ src/ott/neural/flows/models.py | 16 ++--------- src/ott/neural/models/layers.py | 5 ++-- 4 files changed, 53 insertions(+), 17 deletions(-) create mode 100644 src/ott/neural/flows/layers.py diff --git a/src/ott/neural/flows/__init__.py b/src/ott/neural/flows/__init__.py index 695cbbe3c..af3ceb125 100644 --- a/src/ott/neural/flows/__init__.py +++ b/src/ott/neural/flows/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import flows, genot, models, otfm +from . import flows, genot, layers, models, otfm diff --git a/src/ott/neural/flows/layers.py b/src/ott/neural/flows/layers.py new file mode 100644 index 000000000..84a526b1f --- /dev/null +++ b/src/ott/neural/flows/layers.py @@ -0,0 +1,47 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import abc + +import jax.numpy as jnp + +import flax.linen as nn + +__all__ = ["TimeEncoder", "CyclicalTimeEncoder"] + + +class TimeEncoder(nn.Module, abc.ABC): + """A time encoder.""" + + @abc.abstractmethod + def __call__(self, t: jnp.ndarray) -> jnp.ndarray: + """Encode the time. + + Args: + t: Input time of shape (batch_size, 1). + + Returns: + The encoded time. + """ + pass + + +class CyclicalTimeEncoder(nn.Module): + """A cyclical time encoder.""" + n_frequencies: int = 128 + + @nn.compact + def __call__(self, t: jnp.ndarray) -> jnp.ndarray: # noqa: D102 + freq = 2 * jnp.arange(self.n_frequencies) * jnp.pi + t = freq * t + return jnp.concatenate((jnp.cos(t), jnp.sin(t)), axis=-1) diff --git a/src/ott/neural/flows/models.py b/src/ott/neural/flows/models.py index 4cf671a19..be73ac09d 100644 --- a/src/ott/neural/flows/models.py +++ b/src/ott/neural/flows/models.py @@ -20,6 +20,7 @@ import optax from flax.training import train_state +import ott.neural.flows.layers as flow_layers from ott.neural.models import layers __all__ = ["VelocityField"] @@ -61,19 +62,6 @@ class VelocityField(nn.Module): act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu n_frequencies: int = 128 - def time_encoder(self, t: jnp.ndarray) -> jnp.array: - """Encode the time. - - Args: - t: Time. - - Returns: - Encoded time. - """ - freq = 2 * jnp.arange(self.n_frequencies) * jnp.pi - t = freq * t - return jnp.concatenate((jnp.cos(t), jnp.sin(t)), axis=-1) - def __post_init__(self): # set embedded dim from latent embedded dim @@ -115,7 +103,7 @@ def __call__( Returns: Output of the neural vector field. """ - t = self.time_encoder(t) + t = flow_layers.CyclicalTimeEncoder(n_frequencies=self.n_frequencies)(t) t = layers.MLPBlock( dim=self.t_embed_dim, out_dim=self.t_embed_dim, diff --git a/src/ott/neural/models/layers.py b/src/ott/neural/models/layers.py index db8b24ae9..952cc9d24 100644 --- a/src/ott/neural/models/layers.py +++ b/src/ott/neural/models/layers.py @@ -14,6 +14,7 @@ from typing import Any, Tuple import jax +import jax.numpy as jnp import flax.linen as nn @@ -26,14 +27,14 @@ class MLPBlock(nn.Module): - """A simple MLP block.""" + """An MLP block.""" dim: int = 128 num_layers: int = 3 act_fn: Any = nn.silu out_dim: int = 128 @nn.compact - def __call__(self, x): + def __call__(self, x: jnp.ndarray) -> jnp.ndarray: """Apply the MLP block. Args: From 2a279c1d76594ee3937a70fae932e4096e738287 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 14:50:30 +0100 Subject: [PATCH 057/186] make conditions Optional and minor feedback --- src/ott/neural/duality/models.py | 4 ++-- src/ott/neural/flows/flows.py | 36 ++++++++++++++-------------- src/ott/neural/flows/genot.py | 10 ++++---- src/ott/neural/flows/models.py | 32 ++++++++++++------------- src/ott/neural/flows/otfm.py | 9 +++---- src/ott/neural/models/base_solver.py | 8 +++++-- src/ott/neural/models/models.py | 11 +++++---- tests/neural/conftest.py | 13 ++++++++++ 8 files changed, 71 insertions(+), 52 deletions(-) diff --git a/src/ott/neural/duality/models.py b/src/ott/neural/duality/models.py index d10e09f55..baa0386c8 100644 --- a/src/ott/neural/duality/models.py +++ b/src/ott/neural/duality/models.py @@ -54,7 +54,7 @@ class ICNN(neuraldual.BaseW2NeuralDual): gaussian_map_samples: Tuple of source and target points, used to initialize the ICNN to mimic the linear Bures map that morphs the (Gaussian approximation) of the input measure to that of the target measure. If - ``None``, the identity initialization is used, and ICNN mimics half the + :obj:`None`, the identity initialization is used, and ICNN mimics half the squared Euclidean norm. """ dim_data: int @@ -234,7 +234,7 @@ class MetaInitializer(lin_init.DefaultInitializer): geom: The fixed geometry of the problem instances. meta_model: The model to predict the potential :math:`f` from the measures. TODO(marcocuturi): add explanation here what arguments to expect. - opt: The optimizer to update the parameters. If ``None``, use + opt: The optimizer to update the parameters. If :obj:`None`, use :func:`optax.adam` with :math:`0.001` learning rate. rng: The PRNG key to use for initializing the model. state: The training state of the model to start from. diff --git a/src/ott/neural/flows/flows.py b/src/ott/neural/flows/flows.py index 93f471b9d..83b23eb42 100644 --- a/src/ott/neural/flows/flows.py +++ b/src/ott/neural/flows/flows.py @@ -34,7 +34,7 @@ def __init__(self, sigma: float): @abc.abstractmethod def compute_mu_t( - self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray + self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray ) -> jnp.ndarray: """Compute the mean of the probablitiy path. @@ -43,12 +43,12 @@ def compute_mu_t( Args: t: Time :math:`t`. - x_0: Sample from the source distribution. - x_1: Sample from the target distribution. + src: Sample from the source distribution. + tgt: Sample from the target distribution. """ @abc.abstractmethod - def compute_sigma_t(self, t: jnp.ndarray): + def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: """Compute the standard deviation of the probablity path at time :math:`t`. Args: @@ -57,7 +57,7 @@ def compute_sigma_t(self, t: jnp.ndarray): @abc.abstractmethod def compute_ut( - self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray + self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray ) -> jnp.ndarray: """Evaluate the conditional vector field. @@ -66,13 +66,13 @@ def compute_ut( Args: t: Time :math:`t`. - x_0: Sample from the source distribution. - x_1: Sample from the target distribution. + src: Sample from the source distribution. + tgt: Sample from the target distribution. """ def compute_xt( - self, noise: jnp.ndarray, t: jnp.ndarray, x_0: jnp.ndarray, - x_1: jnp.ndarray + self, noise: jnp.ndarray, t: jnp.ndarray, src: jnp.ndarray, + tgt: jnp.ndarray ) -> jnp.ndarray: """Sample from the probability path. @@ -82,14 +82,14 @@ def compute_xt( Args: noise: Noise sampled from a standard normal distribution. t: Time :math:`t`. - x_0: Sample from the source distribution. - x_1: Sample from the target distribution. + src: Sample from the source distribution. + tgt: Sample from the target distribution. Returns: Samples from the probability path between :math:`x_0` and :math:`x_1` at time :math:`t`. """ - mu_t = self.compute_mu_t(t, x_0, x_1) + mu_t = self.compute_mu_t(t, src, tgt) sigma_t = self.compute_sigma_t(t) return mu_t + sigma_t * noise @@ -103,7 +103,7 @@ def compute_mu_t( # noqa: D102 return t * x_0 + (1 - t) * x_1 def compute_ut( - self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray + self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray ) -> jnp.ndarray: """Evaluate the conditional vector field. @@ -112,19 +112,19 @@ def compute_ut( Args: t: Time :math:`t`. - x_0: Sample from the source distribution. - x_1: Sample from the target distribution. + src: Sample from the source distribution. + tgt: Sample from the target distribution. Returns: Conditional vector field evaluated at time :math:`t`. """ - return x_1 - x_0 + return tgt - src class ConstantNoiseFlow(StraightFlow): r"""Flow with straight paths and constant flow noise :math:`\sigma`.""" - def compute_sigma_t(self, t: jnp.ndarray): + def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: r"""Compute noise of the flow at time :math:`t`. Args: @@ -144,7 +144,7 @@ class BrownianNoiseFlow(StraightFlow): :math:`\sigma_t = \sigma * \sqrt(t * (1-t))`. """ - def compute_sigma_t(self, t: jnp.ndarray): + def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: """Compute the standard deviation of the probablity path at time :math:`t`. Args: diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index fa5ada781..4384f8e7f 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -88,10 +88,10 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): on the first marginal. tau_b: If :math:`< 1`, defines how much unbalanced the problem is on the second marginal. - mlp_eta: Neural network to learn the left rescaling function. If `None`, - the left rescaling factor is not learnt. - mlp_xi: Neural network to learn the right rescaling function. If `None`, - the right rescaling factor is not learnt. + mlp_eta: Neural network to learn the left rescaling function. If + :obj:`None`, the left rescaling factor is not learnt. + mlp_xi: Neural network to learn the right rescaling function. If + :obj:`None`, the right rescaling factor is not learnt. unbalanced_kwargs: Keyword arguments for the unbalancedness solver. callback_fn: Callback function. rng: Random number generator. @@ -379,7 +379,7 @@ def loss_fn( def transport( self, source: jnp.ndarray, - condition: Optional[jnp.ndarray], + condition: Optional[jnp.ndarray] = None, rng: Optional[jax.Array] = None, forward: bool = True, diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}), diff --git a/src/ott/neural/flows/models.py b/src/ott/neural/flows/models.py index be73ac09d..9a5ce13af 100644 --- a/src/ott/neural/flows/models.py +++ b/src/ott/neural/flows/models.py @@ -38,14 +38,14 @@ class VelocityField(nn.Module): Args: output_dim: Dimensionality of the neural vector field. - condition_dim: Dimensionality of the conditioning vector. latent_embed_dim: Dimensionality of the embedding of the data. + condition_dim: Dimensionality of the conditioning vector. condition_embed_dim: Dimensionality of the embedding of the condition. - If ``None``, set to ``latent_embed_dim``. + If :obj:`None`, set to ``latent_embed_dim``. t_embed_dim: Dimensionality of the time embedding. - If ``None``, set to ``latent_embed_dim``. + If :obj:`None`, set to ``latent_embed_dim``. joint_hidden_dim: Dimensionality of the hidden layers of the joint network. - If ``None``, set to ``latent_embed_dim + condition_embed_dim + + If :obj:`None`, set to ``latent_embed_dim + condition_embed_dim + t_embed_dim``. num_layers_per_block: Number of layers per block. act_fn: Activation function. @@ -53,8 +53,8 @@ class VelocityField(nn.Module): """ output_dim: int - condition_dim: int latent_embed_dim: int + condition_dim: Optional[int] = None condition_embed_dim: Optional[int] = None t_embed_dim: Optional[int] = None joint_hidden_dim: Optional[int] = None @@ -89,26 +89,29 @@ def __call__( self, t: jnp.ndarray, x: jnp.ndarray, - condition: Optional[jnp.ndarray], + condition: Optional[jnp.ndarray] = None, keys_model: Optional[jnp.ndarray] = None, ) -> jnp.ndarray: """Forward pass through the neural vector field. Args: - t: Time. - x: Data. + t: Time of shape (batch_size, 1). + x: Data of shape (batch_size, output_dim). condition: Conditioning vector. keys_model: Random number generator. Returns: Output of the neural vector field. """ + if self.condition_dim is None: + assert condition is None + t = flow_layers.CyclicalTimeEncoder(n_frequencies=self.n_frequencies)(t) t = layers.MLPBlock( dim=self.t_embed_dim, out_dim=self.t_embed_dim, num_layers=self.num_layers_per_block, - act_fn=self.act_fn, + act_fn=self.act_fn )( t ) @@ -122,7 +125,7 @@ def __call__( x ) - if self.condition_dim > 0: + if self.condition_dim is not None: condition = layers.MLPBlock( dim=self.condition_embed_dim, out_dim=self.condition_embed_dim, @@ -139,17 +142,12 @@ def __call__( dim=self.joint_hidden_dim, out_dim=self.joint_hidden_dim, num_layers=self.num_layers_per_block, - act_fn=self.act_fn, + act_fn=self.act_fn )( concatenated ) - return nn.Dense( - self.output_dim, - use_bias=True, - )( - out - ) + return nn.Dense(self.output_dim, use_bias=True)(out) def create_train_state( self, diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index ed0114f6d..657c5fe82 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -61,7 +61,8 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): valid_freq: Frequency of validation. ot_solver: OT solver to match samples from the source and the target distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. - If `None`, no matching will be performed as proposed in :cite:`lipman:22`. + If :obj:`None`, no matching will be performed as proposed in + :cite:`lipman:22`. flow: Flow between source and target distribution. time_sampler: Sampler for the time. optimizer: Optimizer for `neural_vector_field`. @@ -76,9 +77,9 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): tau_b: If :math:`< 1`, defines how much unbalanced the problem is on the second marginal. mlp_eta: Neural network to learn the left rescaling function as suggested - in :cite:`TODO`. If `None`, the left rescaling factor is not learnt. + in :cite:`TODO`. If :obj:`None`, the left rescaling factor is not learnt. mlp_xi: Neural network to learn the right rescaling function as suggested - in :cite:`TODO`. If `None`, the right rescaling factor is not learnt. + in :cite:`TODO`. If :obj:`None`, the right rescaling factor is not learnt. unbalanced_kwargs: Keyword arguments for the unbalancedness solver. callback_fn: Callback function. num_eval_samples: Number of samples to evaluate on during evaluation. @@ -279,7 +280,7 @@ def __call__(self, train_loader, valid_loader): def transport( self, data: jnp.array, - condition: Optional[jnp.ndarray], + condition: Optional[jnp.ndarray] = None, forward: bool = True, diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) ) -> diffrax.Solution: diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 8ee71a9c6..e2236a294 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -441,7 +441,9 @@ def step_fn( return step_fn def evaluate_eta( - self, source: jnp.ndarray, condition: Optional[jnp.ndarray] + self, + source: jnp.ndarray, + condition: Optional[jnp.ndarray] = None ) -> jnp.ndarray: """Evaluate the left learnt rescaling factor. @@ -460,7 +462,9 @@ def evaluate_eta( condition=condition) def evaluate_xi( - self, target: jnp.ndarray, condition: Optional[jnp.ndarray] + self, + target: jnp.ndarray, + condition: Optional[jnp.ndarray] = None ) -> jnp.ndarray: """Evaluate the right learnt rescaling factor. diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index df4a0e14e..4250ff9f8 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -110,13 +110,15 @@ class RescalingMLP(nn.Module): Rescaling factors. """ hidden_dim: int - condition_dim: int + condition_dim: Optional[int] = None num_layers_per_block: int = 3 act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.selu @nn.compact def __call__( - self, x: jnp.ndarray, condition: Optional[jnp.ndarray] + self, + x: jnp.ndarray, + condition: Optional[jnp.ndarray] = None ) -> jnp.ndarray: # noqa: D102 """Forward pass through the rescaling network. @@ -127,6 +129,8 @@ def __call__( Returns: Estimated rescaling factors. """ + if self.condition_dim is None: + assert condition is None x = layers.MLPBlock( dim=self.hidden_dim, out_dim=self.hidden_dim, @@ -135,8 +139,7 @@ def __call__( )( x ) - if self.condition_dim > 0: - condition = jnp.atleast_1d(condition) + if self.condition_dim is not None: condition = layers.MLPBlock( dim=self.hidden_dim, out_dim=self.hidden_dim, diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 74d66dea3..723d25393 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -1,3 +1,16 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import pytest import numpy as np From e6f0049bd26d59f393b8ad97cfc61d4cd5570771 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 15:44:14 +0100 Subject: [PATCH 058/186] revert faulty jax.array / jnp.ndarray conversions --- src/ott/neural/flows/flows.py | 4 +- src/ott/neural/flows/otfm.py | 7 - src/ott/neural/models/base_solver.py | 18 +-- src/ott/neural/models/layers.py | 2 +- .../solvers/quadratic/gromov_wasserstein.py | 2 +- src/ott/tools/soft_sort.py | 2 +- src/ott/utils.py | 3 +- tests/geometry/scaling_cost_test.py | 4 +- tests/neural/genot_test.py | 1 - tests/solvers/quadratic/lower_bound_test.py | 137 ------------------ tests/tools/soft_sort_test.py | 2 +- 11 files changed, 18 insertions(+), 164 deletions(-) delete mode 100644 tests/solvers/quadratic/lower_bound_test.py diff --git a/src/ott/neural/flows/flows.py b/src/ott/neural/flows/flows.py index 83b23eb42..eb1723883 100644 --- a/src/ott/neural/flows/flows.py +++ b/src/ott/neural/flows/flows.py @@ -98,9 +98,9 @@ class StraightFlow(BaseFlow, abc.ABC): """Base class for flows with straight paths.""" def compute_mu_t( # noqa: D102 - self, t: jnp.ndarray, x_0: jnp.ndarray, x_1: jnp.ndarray + self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray ) -> jnp.ndarray: - return t * x_0 + (1 - t) * x_1 + return t * src + (1 - t) * tgt def compute_ut( self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index 657c5fe82..ec5eb9821 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -84,10 +84,6 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): callback_fn: Callback function. num_eval_samples: Number of samples to evaluate on during evaluation. rng: Random number generator. - - Returns: - None - """ def __init__( @@ -226,9 +222,6 @@ def __call__(self, train_loader, valid_loader): Args; train_loader: Dataloader for the training data. valid_loader: Dataloader for the validation data. - - Returns: - None """ batch: Mapping[str, jnp.ndarray] = {} curr_loss = 0.0 diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index e2236a294..3f807c4cc 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from abc import ABC, abstractmethod +import abc from pathlib import Path from types import MappingProxyType from typing import Any, Callable, Dict, Literal, Mapping, Optional, Tuple, Union @@ -30,7 +30,7 @@ __all__ = ["BaseNeuralSolver", "ResampleMixin", "UnbalancednessMixin"] -class BaseNeuralSolver(ABC): +class BaseNeuralSolver(abc.ABC): """Base class for neural solvers. Args: @@ -42,28 +42,28 @@ def __init__(self, iterations: int, valid_freq: int, **_: Any): self.iterations = iterations self.valid_freq = valid_freq - @abstractmethod + @abc.abstractmethod def setup(self, *args: Any, **kwargs: Any): """Setup the model.""" - @abstractmethod + @abc.abstractmethod def __call__(self, *args: Any, **kwargs: Any): """Train the model.""" - @abstractmethod + @abc.abstractmethod def transport(self, *args: Any, forward: bool, **kwargs: Any) -> Any: """Transport.""" - @abstractmethod + @abc.abstractmethod def save(self, path: Path): """Save the model.""" - @abstractmethod + @abc.abstractmethod def load(self, path: Path): """Load the model.""" @property - @abstractmethod + @abc.abstractmethod def training_logs(self) -> Dict[str, Any]: """Return the training logs.""" @@ -327,7 +327,7 @@ def _resample_unbalanced( batch: Tuple[jnp.ndarray, ...], marginals: jnp.ndarray, ) -> Tuple[jnp.ndarray, ...]: - """Resample a batch based upon marginals.""" + """Resample a batch based on marginals.""" indices = jax.random.choice( key, a=len(marginals), p=jnp.squeeze(marginals), shape=[len(marginals)] ) diff --git a/src/ott/neural/models/layers.py b/src/ott/neural/models/layers.py index 952cc9d24..46313b0e2 100644 --- a/src/ott/neural/models/layers.py +++ b/src/ott/neural/models/layers.py @@ -38,7 +38,7 @@ def __call__(self, x: jnp.ndarray) -> jnp.ndarray: """Apply the MLP block. Args: - x: Input data of shape (batch_size, dim) + x: Input data of shape (batch_size, dim). Returns: Output data of shape (batch_size, out_dim). diff --git a/src/ott/solvers/quadratic/gromov_wasserstein.py b/src/ott/solvers/quadratic/gromov_wasserstein.py index 5e23d88e6..a7890e1c9 100644 --- a/src/ott/solvers/quadratic/gromov_wasserstein.py +++ b/src/ott/solvers/quadratic/gromov_wasserstein.py @@ -129,7 +129,7 @@ class GWState(NamedTuple): linear_state: LinearOutput linear_pb: linear_problem.LinearProblem old_transport_mass: float - rngs: Optional[jnp.ndarray] = None + rngs: Optional[jax.Array] = None errors: Optional[jnp.ndarray] = None def set(self, **kwargs: Any) -> "GWState": diff --git a/src/ott/tools/soft_sort.py b/src/ott/tools/soft_sort.py index ccde3bd2c..1a30359ee 100644 --- a/src/ott/tools/soft_sort.py +++ b/src/ott/tools/soft_sort.py @@ -458,7 +458,7 @@ def _quantile( def multivariate_cdf_quantile_maps( inputs: jnp.ndarray, target_sampler: Optional[Callable[[jnp.ndarray, Tuple[int, int]], - jnp.ndarray]] = None, + jax.Array]] = None, rng: Optional[jax.Array] = None, num_target_samples: Optional[int] = None, cost_fn: Optional[costs.CostFn] = None, diff --git a/src/ott/utils.py b/src/ott/utils.py index 63a36f2b4..558f4ba1c 100644 --- a/src/ott/utils.py +++ b/src/ott/utils.py @@ -18,7 +18,6 @@ from typing import Any, Callable, NamedTuple, Optional, Tuple import jax -import jax.numpy as jnp import numpy as np try: @@ -69,7 +68,7 @@ def wrapper(*args: Any, **kwargs: Any) -> Any: return functools.wraps(func)(wrapper) -def default_prng_key(rng: Optional[jax.Array] = None) -> jnp.ndarray: +def default_prng_key(rng: Optional[jax.Array] = None) -> jax.Array: """Get the default PRNG key. Args: diff --git a/tests/geometry/scaling_cost_test.py b/tests/geometry/scaling_cost_test.py index 6cd5dcaa9..3dbe4bf31 100644 --- a/tests/geometry/scaling_cost_test.py +++ b/tests/geometry/scaling_cost_test.py @@ -189,7 +189,7 @@ def apply_sinkhorn(cost1, cost2, scale_cost): np.testing.assert_allclose(1.0, geom.cost_matrix.max(), rtol=1e-4) @pytest.mark.parametrize("batch_size", [5, 12]) - def test_mascale_cost_xx_low_rank_with_batch(self, batch_size: int): + def test_max_scale_cost_low_rank_with_batch(self, batch_size: int): """Test max_cost options for low rank with batch_size fixed.""" geom0 = low_rank.LRCGeometry( @@ -200,7 +200,7 @@ def test_mascale_cost_xx_low_rank_with_batch(self, batch_size: int): geom0.inv_scale_cost, 1.0 / jnp.max(self.cost_lr), rtol=1e-4 ) - def test_mascale_cost_xx_low_rank_large_array(self): + def test_max_scale_cost_low_rank_large_array(self): """Test max_cost options for large matrices.""" _, *rngs = jax.random.split(self.rng, 3) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index a962afca3..44ad21428 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -29,7 +29,6 @@ class TestGENOT: - #TODO: add tests for unbalancedness @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) diff --git a/tests/solvers/quadratic/lower_bound_test.py b/tests/solvers/quadratic/lower_bound_test.py deleted file mode 100644 index 37bf2a8b3..000000000 --- a/tests/solvers/quadratic/lower_bound_test.py +++ /dev/null @@ -1,137 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import functools -from typing import Callable - -import pytest - -import jax -import jax.numpy as jnp -import numpy as np - -from ott.geometry import costs, distrib_costs, pointcloud -from ott.initializers.linear import initializers -from ott.problems.quadratic import quadratic_problem -from ott.solvers.linear import implicit_differentiation as implicit_lib -from ott.solvers.quadratic import lower_bound -from ott.tools import soft_sort - - -class TestLowerBoundSolver: - - @pytest.fixture(autouse=True) - def initialize(self, rng: jax.Array): - d_x = 2 - d_y = 3 - self.n, self.m = 13, 15 - rngs = jax.random.split(rng, 4) - self.x = jax.random.uniform(rngs[0], (self.n, d_x)) - self.y = jax.random.uniform(rngs[1], (self.m, d_y)) - # Currently the Lower Bound only supports uniform distributions: - a = jnp.ones(self.n) - b = jnp.ones(self.m) - self.a = a / jnp.sum(a) - self.b = b / jnp.sum(b) - self.cx = jax.random.uniform(rngs[2], (self.n, self.n)) - self.cy = jax.random.uniform(rngs[3], (self.m, self.m)) - - @pytest.mark.fast.with_args( - "ground_cost", - [costs.SqEuclidean(), costs.PNormP(1.5)], - only_fast=0, - ) - def test_lb_pointcloud(self, ground_cost: costs.TICost): - x, y = self.x, self.y - - geom_x = pointcloud.PointCloud(x) - geom_y = pointcloud.PointCloud(y) - prob = quadratic_problem.QuadraticProblem( - geom_x, geom_y, a=self.a, b=self.b - ) - distrib_cost = distrib_costs.UnivariateWasserstein(ground_cost=ground_cost) - solver = lower_bound.LowerBoundSolver( - epsilon=1e-1, distrib_cost=distrib_cost - ) - - out = jax.jit(solver)(prob) - - assert not jnp.isnan(out.reg_ot_cost) - - @pytest.mark.parametrize("method", ["subsample", "quantile", "equal"]) - @pytest.mark.parametrize( - "sort_fn", - [ - None, - functools.partial( - soft_sort.sort, - epsilon=1e-3, - implicit_diff=False, - # soft sort uses `sorting` initializer, which uses while loop - # which is not reverse-mode diff. - initializer=initializers.DefaultInitializer(), - min_iterations=10, - max_iterations=10, - ), - functools.partial( - soft_sort.sort, - epsilon=1e-1, - implicit_diff=implicit_lib.ImplicitDiff(), - initializer=initializers.DefaultInitializer(), - min_iterations=0, - max_iterations=100, - ) - ] - ) - def test_lb_grad( - self, rng: jax.Array, sort_fn: Callable[[jnp.ndarray], jnp.ndarray], - method: str - ): - - def fn(x: jnp.ndarray, y: jnp.ndarray) -> float: - geom_x = pointcloud.PointCloud(x) - geom_y = pointcloud.PointCloud(y) - prob = quadratic_problem.QuadraticProblem(geom_x, geom_y) - - solver = lower_bound.LowerBoundSolver( - epsilon=5e-2, - sort_fn=sort_fn, - cost_fn=costs.SqEuclidean(), - method=method, - n_subsamples=n_sub, - ) - return solver(prob).reg_ot_cost - - rng1, rng2 = jax.random.split(rng) - eps, tol = 1e-4, 1e-3 - - n_sub = min(self.x.shape[0], self.y.shape[0]) - if method == "equal": - x, y = self.x[:n_sub], self.y[:n_sub] - else: - x, y = self.x, self.y - - grad_x, grad_y = jax.jit(jax.grad(fn, (0, 1)))(x, y) - - v_x = jax.random.normal(rng1, shape=x.shape) - v_x = (v_x / jnp.linalg.norm(v_x, axis=-1, keepdims=True)) * eps - expected = fn(x + v_x, y) - fn(x - v_x, y) - actual = 2.0 * jnp.vdot(v_x, grad_x) - np.testing.assert_allclose(actual, expected, rtol=tol, atol=tol) - - v_y = jax.random.normal(rng2, shape=y.shape) - v_y = (v_y / jnp.linalg.norm(v_y, axis=-1, keepdims=True)) * eps - expected = (fn(x, y + v_y) - fn(x, y - v_y)) - actual = 2.0 * jnp.vdot(v_y, grad_y) - np.testing.assert_allclose(actual, expected, rtol=tol, atol=tol) diff --git a/tests/tools/soft_sort_test.py b/tests/tools/soft_sort_test.py index b4fa68ddf..3d66d43c7 100644 --- a/tests/tools/soft_sort_test.py +++ b/tests/tools/soft_sort_test.py @@ -109,7 +109,7 @@ def test_multivariate_cdf_quantiles(self, rng: jax.Array): # Check passing custom sampler, must be still symmetric / centered on {.5}^d # Check passing custom epsilon also works. - def ball_sampler(k: jnp.ndarray, s: Tuple[int, int]) -> jnp.ndarray: + def ball_sampler(k: jax.Array, s: Tuple[int, int]) -> jnp.ndarray: return 0.5 * (jax.random.ball(k, d=s[1], p=4, shape=(s[0],)) + 1.) num_target_samples = 473 From f23497f6ef0e402cbac5817b72d07f56eec30c55 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 16:04:04 +0100 Subject: [PATCH 059/186] make formatting in neural nets nicer --- src/ott/neural/flows/models.py | 21 ++++++++------------- src/ott/neural/models/models.py | 19 +++++++++---------- 2 files changed, 17 insertions(+), 23 deletions(-) diff --git a/src/ott/neural/flows/models.py b/src/ott/neural/flows/models.py index 9a5ce13af..c9c2df44a 100644 --- a/src/ott/neural/flows/models.py +++ b/src/ott/neural/flows/models.py @@ -107,46 +107,41 @@ def __call__( assert condition is None t = flow_layers.CyclicalTimeEncoder(n_frequencies=self.n_frequencies)(t) - t = layers.MLPBlock( + t_layer = layers.MLPBlock( dim=self.t_embed_dim, out_dim=self.t_embed_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn - )( - t ) + t = t_layer(t) - x = layers.MLPBlock( + x_layer = layers.MLPBlock( dim=self.latent_embed_dim, out_dim=self.latent_embed_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn - )( - x ) + x = x_layer(x) if self.condition_dim is not None: - condition = layers.MLPBlock( + condition_layer = layers.MLPBlock( dim=self.condition_embed_dim, out_dim=self.condition_embed_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn - )( - condition ) + condition = condition_layer(condition) concatenated = jnp.concatenate((t, x, condition), axis=-1) else: concatenated = jnp.concatenate((t, x), axis=-1) - out = layers.MLPBlock( + out_layer = layers.MLPBlock( dim=self.joint_hidden_dim, out_dim=self.joint_hidden_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn - )( - concatenated ) - + out = out_layer(concatenated) return nn.Dense(self.output_dim, use_bias=True)(out) def create_train_state( diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 4250ff9f8..5afc809d7 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -131,36 +131,35 @@ def __call__( """ if self.condition_dim is None: assert condition is None - x = layers.MLPBlock( + x_layer = layers.MLPBlock( dim=self.hidden_dim, out_dim=self.hidden_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn - )( - x ) + x = x_layer(x) + if self.condition_dim is not None: - condition = layers.MLPBlock( + condition_layer = layers.MLPBlock( dim=self.hidden_dim, out_dim=self.hidden_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn - )( - condition ) + + condition = condition_layer(condition) concatenated = jnp.concatenate((x, condition), axis=-1) else: concatenated = x - out = layers.MLPBlock( + out_layer = layers.MLPBlock( dim=self.hidden_dim, out_dim=self.hidden_dim, num_layers=self.num_layers_per_block, - act_fn=self.act_fn, - )( - concatenated + act_fn=self.act_fn ) + out = out_layer(concatenated) return jnp.exp(out) def create_train_state( From 9f96583ddd967fc061ac1bae3b32ab47562f1f6f Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 16:20:42 +0100 Subject: [PATCH 060/186] add description to Velocity Field --- src/ott/neural/flows/models.py | 10 +++++++++- src/ott/neural/models/models.py | 2 +- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/ott/neural/flows/models.py b/src/ott/neural/flows/models.py index c9c2df44a..0177383c9 100644 --- a/src/ott/neural/flows/models.py +++ b/src/ott/neural/flows/models.py @@ -27,7 +27,15 @@ class VelocityField(nn.Module): - """Parameterized neural vector field. + r"""Parameterized neural vector field. + + The `VelocityField` learns a map + :math:`v: \\mathbb{R}\times \\mathbb{R}^d\rightarrow \\mathbb{R}^d` solving + the ODE :math:`\frac{dx}{dt} = v(t, x)`. Given a source distribution at time + :math:`t=0`, the `VelocityField` can be used to transport the source + distribution given at :math:`t_0` to a target distribution given at + :math:`t_1` by integrating :math:`v(t, x)` from :math:`t=t_0` to + :math:`t=t_1`. Each of the input, condition, and time embeddings are passed through a block consisting of ``num_layers_per_block`` layers of dimension diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 5afc809d7..e84e3560a 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -107,7 +107,7 @@ class RescalingMLP(nn.Module): act_fn: Activation function. Returns: - Rescaling factors. + Non-negative rescaling factors. """ hidden_dim: int condition_dim: Optional[int] = None From 86fe8864098340931f5898086b58e5f9d13b01c9 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 16:38:03 +0100 Subject: [PATCH 061/186] replace time sampler class by function --- src/ott/neural/flows/__init__.py | 2 +- src/ott/neural/flows/flows.py | 87 ++------------------------------ src/ott/neural/flows/genot.py | 10 ++-- src/ott/neural/flows/otfm.py | 4 +- src/ott/neural/flows/samplers.py | 50 ++++++++++++++++++ 5 files changed, 60 insertions(+), 93 deletions(-) create mode 100644 src/ott/neural/flows/samplers.py diff --git a/src/ott/neural/flows/__init__.py b/src/ott/neural/flows/__init__.py index af3ceb125..cc2c4bfdb 100644 --- a/src/ott/neural/flows/__init__.py +++ b/src/ott/neural/flows/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import flows, genot, layers, models, otfm +from . import flows, genot, layers, models, otfm, samplers diff --git a/src/ott/neural/flows/flows.py b/src/ott/neural/flows/flows.py index eb1723883..0dce912aa 100644 --- a/src/ott/neural/flows/flows.py +++ b/src/ott/neural/flows/flows.py @@ -13,12 +13,13 @@ # limitations under the License. import abc -import jax import jax.numpy as jnp __all__ = [ - "BaseFlow", "StraightFlow", "ConstantNoiseFlow", "BrownianNoiseFlow", - "BaseTimeSampler", "UniformSampler", "OffsetUniformSampler" + "BaseFlow", + "StraightFlow", + "ConstantNoiseFlow", + "BrownianNoiseFlow", ] @@ -154,83 +155,3 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: Standard deviation of the probablity path at time :math:`t`. """ return jnp.sqrt(self.sigma * t * (1 - t)) - - -class BaseTimeSampler(abc.ABC): - """Base class for time samplers. - - Args: - low: Lower bound of the distribution to sample from. - high: Upper bound of the distribution to sample from . - """ - - def __init__(self, low: float, high: float): - self.low = low - self.high = high - - @abc.abstractmethod - def __call__(self, rng: jax.Array, num_samples: int) -> jnp.ndarray: - """Generate `num_samples` samples of the time `math`:t:. - - Args: - rng: Random number generator. - num_samples: Number of samples to generate. - """ - - -class UniformSampler(BaseTimeSampler): - """Sample :math:`t` from a uniform distribution :math:`[low, high]`. - - Args: - low: Lower bound of the uniform distribution. - high: Upper bound of the uniform distribution. - """ - - def __init__(self, low: float = 0.0, high: float = 1.0): - super().__init__(low=low, high=high) - - def __call__(self, rng: jax.Array, num_samples: int) -> jnp.ndarray: - """Generate `num_samples` samples of the time `math`:t:. - - Args: - rng: Random number generator. - num_samples: Number of samples to generate. - - Returns: - `num_samples` samples of the time :math:`t``. - """ - return jax.random.uniform( - rng, (num_samples, 1), minval=self.low, maxval=self.high - ) - - -class OffsetUniformSampler(BaseTimeSampler): - """Sample the time :math:`t`. - - Sample :math:`t` from a uniform distribution :math:`[low, high]` with - offset `offset`. - - Args: - offset: Offset of the uniform distribution. - low: Lower bound of the uniform distribution. - high: Upper bound of the uniform distribution. - """ - - def __init__(self, offset: float, low: float = 0.0, high: float = 1.0): - super().__init__(low=low, high=high) - self.offset = offset - - def __call__(self, rng: jax.Array, num_samples: int) -> jnp.ndarray: - """Generate `num_samples` samples of the time `math`:t:. - - Args: - rng: Random number generator. - num_samples: Number of samples to generate. - - Returns: - An array with `num_samples` samples of the time `math`:t:. - """ - return ( - jax.random.uniform(rng, (1, 1), minval=self.low, maxval=self.high) + - jnp.arange(num_samples)[:, None] / num_samples - ) % ((self.high - self.low) - self.offset) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 4384f8e7f..813be6649 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -26,12 +26,8 @@ from ott import utils from ott.geometry import costs -from ott.neural.flows.flows import ( - BaseFlow, - BaseTimeSampler, - ConstantNoiseFlow, - UniformSampler, -) +from ott.neural.flows.flows import BaseFlow, ConstantNoiseFlow +from ott.neural.flows.samplers import sample_uniformly from ott.neural.models.base_solver import ( BaseNeuralSolver, ResampleMixin, @@ -118,7 +114,7 @@ def __init__( "max_cost", "median"]]]], optimizer: Type[optax.GradientTransformation], flow: Type[BaseFlow] = ConstantNoiseFlow(0.0), - time_sampler: Type[BaseTimeSampler] = UniformSampler(), + time_sampler: Callable[[jax.Array, int], jnp.ndarray] = sample_uniformly, checkpoint_manager: Type[checkpoint.CheckpointManager] = None, k_samples_per_x: int = 1, solver_latent_to_data: Optional[Type[was_solver.WassersteinSolver] diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index ec5eb9821..f7a973eb4 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -36,7 +36,7 @@ from ott import utils from ott.geometry import costs -from ott.neural.flows.flows import BaseFlow, BaseTimeSampler +from ott.neural.flows.flows import BaseFlow from ott.neural.models.base_solver import ( BaseNeuralSolver, ResampleMixin, @@ -96,7 +96,7 @@ def __init__( iterations: int, ot_solver: Optional[Type[was_solver.WassersteinSolver]], flow: Type[BaseFlow], - time_sampler: Type[BaseTimeSampler], + time_sampler: Callable[[jax.Array, int], jnp.ndarray], optimizer: Type[optax.GradientTransformation], checkpoint_manager: Type[checkpoint.CheckpointManager] = None, epsilon: float = 1e-2, diff --git a/src/ott/neural/flows/samplers.py b/src/ott/neural/flows/samplers.py new file mode 100644 index 000000000..f5d0e0d17 --- /dev/null +++ b/src/ott/neural/flows/samplers.py @@ -0,0 +1,50 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional + +import jax +import jax.numpy as jnp + +__all__ = ["sample_uniformly"] + + +def sample_uniformly( + rng: jax.Array, + num_samples: int, + low: float = 0.0, + high: float = 1.0, + offset: Optional[float] = None +): + """Sample from a uniform distribution. + + Sample :math:`t` from a uniform distribution :math:`[low, high]` with + offset `offset`. + + Args: + rng: Random number generator. + num_samples: Number of samples to generate. + low: Lower bound of the uniform distribution. + high: Upper bound of the uniform distribution. + offset: Offset of the uniform distribution. If :obj:`None`, no offset is + used. + + Returns: + An array with `num_samples` samples of the time `math`:t:. + """ + if offset is None: + return jax.random.uniform(rng, (num_samples, 1), minval=low, maxval=high) + return ( + jax.random.uniform(rng, (1, 1), minval=low, maxval=high) + + jnp.arange(num_samples)[:, None] / num_samples + ) % ((high - low) - offset) From 58e3d29c12429d7bbabc979545de05b10b6acb99 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 16:52:02 +0100 Subject: [PATCH 062/186] add citations --- docs/references.bib | 20 ++++++++++++++++++++ src/ott/neural/flows/genot.py | 14 +++++++------- src/ott/neural/flows/otfm.py | 24 +++++++++++++----------- src/ott/neural/models/base_solver.py | 20 ++++++++++---------- tests/neural/genot_test.py | 8 ++++---- tests/neural/otfm_test.py | 8 ++++---- 6 files changed, 58 insertions(+), 36 deletions(-) diff --git a/docs/references.bib b/docs/references.bib index c5d4c4678..e0c83a6ee 100644 --- a/docs/references.bib +++ b/docs/references.bib @@ -814,3 +814,23 @@ @misc{huguet:2023 title = {Geodesic Sinkhorn for Fast and Accurate Optimal Transport on Manifolds}, year = {2023}, } + +@misc{eyring:23, + author={Eyring, Luca and Klein, Dominik and Uscidda, Th{\'e}o and Palla, Giovanni and Kilbertus, Niki and Akata, Zeynep and Theis, Fabian}, + doi = {10.48550/arXiv.2311.15100}, + eprint = {2311.15100}, + eprintclass = {stat.ML}, + eprinttype = {arXiv}, + title={Unbalancedness in Neural Monge Maps Improves Unpaired Domain Translation}, + year={2023} +} + +@misc{klein_uscidda:23, + author={Dominik Klein and Théo Uscidda and Fabian Theis and Marco Cuturi}, + doi = {10.48550/arXiv.2310.09254}, + eprint={2310.09254}, + eprintclass = {stat.ML}, + eprinttype = {arXiv}, + title={Generative Entropic Neural Optimal Transport To Map Within and Across Spaces}, + year={2023}, +} diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 813be6649..dedbf20ec 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -84,9 +84,9 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): on the first marginal. tau_b: If :math:`< 1`, defines how much unbalanced the problem is on the second marginal. - mlp_eta: Neural network to learn the left rescaling function. If + rescaling_a: Neural network to learn the left rescaling function. If :obj:`None`, the left rescaling factor is not learnt. - mlp_xi: Neural network to learn the right rescaling function. If + rescaling_b: Neural network to learn the right rescaling function. If :obj:`None`, the right rescaling factor is not learnt. unbalanced_kwargs: Keyword arguments for the unbalancedness solver. callback_fn: Callback function. @@ -123,8 +123,8 @@ def __init__( fused_penalty: float = 0.0, tau_a: float = 1.0, tau_b: float = 1.0, - mlp_eta: Callable[[jnp.ndarray], float] = None, - mlp_xi: Callable[[jnp.ndarray], float] = None, + rescaling_a: Callable[[jnp.ndarray], float] = None, + rescaling_b: Callable[[jnp.ndarray], float] = None, unbalanced_kwargs: Dict[str, Any] = types.MappingProxyType({}), callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, @@ -144,8 +144,8 @@ def __init__( cond_dim=cond_dim, tau_a=tau_a, tau_b=tau_b, - mlp_eta=mlp_eta, - mlp_xi=mlp_xi, + rescaling_a=rescaling_a, + rescaling_b=rescaling_b, unbalanced_kwargs=unbalanced_kwargs, ) if isinstance( @@ -443,7 +443,7 @@ def _valid_step(self, valid_loader, iter): @property def learn_rescaling(self) -> bool: """Whether to learn at least one rescaling factor.""" - return self.mlp_eta is not None or self.mlp_xi is not None + return self.rescaling_a is not None or self.rescaling_b is not None def save(self, path: str): """Save the model. diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index f7a973eb4..d08032bad 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -73,13 +73,15 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): scale_cost: How to scale the cost matrix for the OT problem solved by the `ot_solver`. tau_a: If :math:`<1`, defines how much unbalanced the problem is - on the first marginal. + on the first marginal. tau_b: If :math:`< 1`, defines how much unbalanced the problem is - on the second marginal. - mlp_eta: Neural network to learn the left rescaling function as suggested - in :cite:`TODO`. If :obj:`None`, the left rescaling factor is not learnt. - mlp_xi: Neural network to learn the right rescaling function as suggested - in :cite:`TODO`. If :obj:`None`, the right rescaling factor is not learnt. + on the second marginal. + rescaling_a: Neural network to learn the left rescaling function as + suggested in :cite:`eyring:23`. If :obj:`None`, the left rescaling factor + is not learnt. + rescaling_b: Neural network to learn the right rescaling function as + suggested in :cite:`eyring:23`. If :obj:`None`, the right rescaling factor + is not learnt. unbalanced_kwargs: Keyword arguments for the unbalancedness solver. callback_fn: Callback function. num_eval_samples: Number of samples to evaluate on during evaluation. @@ -106,8 +108,8 @@ def __init__( "median"]] = "mean", tau_a: float = 1.0, tau_b: float = 1.0, - mlp_eta: Callable[[jnp.ndarray], float] = None, - mlp_xi: Callable[[jnp.ndarray], float] = None, + rescaling_a: Callable[[jnp.ndarray], float] = None, + rescaling_b: Callable[[jnp.ndarray], float] = None, unbalanced_kwargs: Dict[str, Any] = types.MappingProxyType({}), callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, @@ -130,8 +132,8 @@ def __init__( cond_dim=cond_dim, tau_a=tau_a, tau_b=tau_b, - mlp_eta=mlp_eta, - mlp_xi=mlp_xi, + rescaling_a=rescaling_a, + rescaling_b=rescaling_b, unbalanced_kwargs=unbalanced_kwargs, ) @@ -332,7 +334,7 @@ def _valid_step(self, valid_loader, iter): @property def learn_rescaling(self) -> bool: """Whether to learn at least one rescaling factor.""" - return self.mlp_eta is not None or self.mlp_xi is not None + return self.rescaling_a is not None or self.rescaling_b is not None def save(self, path: str): """Save the model. diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 3f807c4cc..2a82ab610 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -256,10 +256,10 @@ def __init__( cond_dim: Optional[int], tau_a: float = 1.0, tau_b: float = 1.0, - mlp_eta: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], - jnp.ndarray]] = None, - mlp_xi: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], - jnp.ndarray]] = None, + rescaling_a: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], + jnp.ndarray]] = None, + rescaling_b: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], + jnp.ndarray]] = None, seed: Optional[int] = None, opt_eta: Optional[optax.GradientTransformation] = None, opt_xi: Optional[optax.GradientTransformation] = None, @@ -275,8 +275,8 @@ def __init__( self.cond_dim = cond_dim self.tau_a = tau_a self.tau_b = tau_b - self.mlp_eta = mlp_eta - self.mlp_xi = mlp_xi + self.rescaling_a = rescaling_a + self.rescaling_b = rescaling_b self.seed = seed self.opt_eta = opt_eta self.opt_xi = opt_xi @@ -338,20 +338,20 @@ def _setup(self, source_dim: int, target_dim: int, cond_dim: int): self.rng_unbalanced, 3 ) self.unbalancedness_step_fn = self._get_rescaling_step_fn() - if self.mlp_eta is not None: + if self.rescaling_a is not None: self.opt_eta = ( self.opt_eta if self.opt_eta is not None else optax.adamw(learning_rate=1e-4, weight_decay=1e-10) ) - self.state_eta = self.mlp_eta.create_train_state( + self.state_eta = self.rescaling_a.create_train_state( rng_eta, self.opt_eta, source_dim ) - if self.mlp_xi is not None: + if self.rescaling_b is not None: self.opt_xi = ( self.opt_xi if self.opt_xi is not None else optax.adamw(learning_rate=1e-4, weight_decay=1e-10) ) - self.state_xi = self.mlp_xi.create_train_state( + self.state_xi = self.rescaling_b.create_train_state( rng_xi, self.opt_xi, target_dim ) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 44ad21428..a4c221d40 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -350,8 +350,8 @@ def test_genot_linear_learn_rescaling( optimizer = optax.adam(learning_rate=1e-3) tau_a = 0.9 tau_b = 0.2 - mlp_eta = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - mlp_xi = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + rescaling_a = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + rescaling_b = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) genot = GENOT( neural_vf, input_dim=source_dim, @@ -367,8 +367,8 @@ def test_genot_linear_learn_rescaling( time_sampler=time_sampler, tau_a=tau_a, tau_b=tau_b, - mlp_eta=mlp_eta, - mlp_xi=mlp_xi, + rescaling_a=rescaling_a, + rescaling_b=rescaling_b, ) genot(data_loader, data_loader) diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 0af948705..74f483654 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -194,8 +194,8 @@ def test_flow_matching_learn_rescaling( tau_a = 0.9 tau_b = 0.2 - mlp_eta = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - mlp_xi = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + rescaling_a = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + rescaling_b = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) fm = OTFlowMatching( neural_vf, input_dim=source_dim, @@ -208,8 +208,8 @@ def test_flow_matching_learn_rescaling( optimizer=optimizer, tau_a=tau_a, tau_b=tau_b, - mlp_eta=mlp_eta, - mlp_xi=mlp_xi, + rescaling_a=rescaling_a, + rescaling_b=rescaling_b, ) fm(data_loader, data_loader) From 2f5fa52517a8a97f0cff6735cffbf0c44c5812a9 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 17:11:15 +0100 Subject: [PATCH 063/186] add more references --- docs/references.bib | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/docs/references.bib b/docs/references.bib index e0c83a6ee..f161c1570 100644 --- a/docs/references.bib +++ b/docs/references.bib @@ -834,3 +834,33 @@ @misc{klein_uscidda:23 title={Generative Entropic Neural Optimal Transport To Map Within and Across Spaces}, year={2023}, } + +@misc{lipman:22, + author={Lipman, Yaron and Chen, Ricky TQ and Ben-Hamu, Heli and Nickel, Maximilian and Le, Matt}, + doi = {10.48550/arXiv.2210.02747}, + eprint={2210.02747}, + eprintclass = {stat.ML}, + eprinttype = {arXiv}, + title={Flow matching for generative modeling}, + year={2022}, +} + +@misc{tong:23, + author={Tong, Alexander and Malkin, Nikolay and Huguet, Guillaume and Zhang, Yanlei and {Rector-Brooks}, Jarrid and Fatras, Kilian and Wolf, Guy and Bengio, Yoshua}, + doi={10.48550/arXiv.2302.00482}, + eprint={2302.00482}, + eprintclass = {stat.ML}, + eprinttype = {arXiv}, + title={Improving and Generalizing Flow-Based Generative Models with Minibatch Optimal Transport}, + year={2023}, +} + +@misc{pooladian:23, + author={Pooladian, Aram-Alexandre and Ben-Hamu, Heli and Domingo-Enrich, Carles and Amos, Brandon and Lipman, Yaron and Chen, Ricky}, + doi={10.48550/arXiv.2304.14772}, + eprint={2304.14772}, + eprintclass = {stat.ML}, + eprinttype = {arXiv}, + title={Multisample flow matching: Straightening flows with minibatch couplings}, + year={2023} +} From 9ad992431c6ec560471e098426791f594b54f72a Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 17:22:23 +0100 Subject: [PATCH 064/186] rename keys_model to rng --- src/ott/neural/flows/genot.py | 7 +++---- src/ott/neural/flows/models.py | 4 ++-- src/ott/neural/flows/otfm.py | 7 ++----- src/ott/neural/models/base_solver.py | 12 ++++++------ 4 files changed, 13 insertions(+), 17 deletions(-) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index dedbf20ec..e23d9ba06 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -340,7 +340,7 @@ def step_fn( def loss_fn( params: jnp.ndarray, batch: Dict[str, jnp.array], - keys_model: jax.random.PRNGKeyArray + rng: jax.random.PRNGKeyArray ): x_t = self.flow.compute_xt( batch["noise"], batch["time"], batch["latent"], batch["target"] @@ -355,9 +355,8 @@ def loss_fn( if batch[el] is not None ], axis=1) - v_t = jax.vmap(apply_fn)( - t=batch["time"], x=x_t, condition=cond_input, keys_model=keys_model - ) + v_t = jax.vmap(apply_fn + )(t=batch["time"], x=x_t, condition=cond_input, rng=rng) u_t = self.flow.compute_ut( batch["time"], batch["latent"], batch["target"] ) diff --git a/src/ott/neural/flows/models.py b/src/ott/neural/flows/models.py index 0177383c9..badc91232 100644 --- a/src/ott/neural/flows/models.py +++ b/src/ott/neural/flows/models.py @@ -98,7 +98,7 @@ def __call__( t: jnp.ndarray, x: jnp.ndarray, condition: Optional[jnp.ndarray] = None, - keys_model: Optional[jnp.ndarray] = None, + rng: Optional[jnp.ndarray] = None, ) -> jnp.ndarray: """Forward pass through the neural vector field. @@ -106,7 +106,7 @@ def __call__( t: Time of shape (batch_size, 1). x: Data of shape (batch_size, output_dim). condition: Conditioning vector. - keys_model: Random number generator. + rng: Random number generator. Returns: Output of the neural vector field. diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index d08032bad..e27ff2582 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -187,7 +187,7 @@ def step_fn( def loss_fn( params: jnp.ndarray, t: jnp.ndarray, noise: jnp.ndarray, - batch: Dict[str, jnp.ndarray], keys_model: jax.random.PRNGKeyArray + batch: Dict[str, jnp.ndarray], rng: jax.random.PRNGKeyArray ) -> jnp.ndarray: x_t = self.flow.compute_xt( @@ -197,10 +197,7 @@ def loss_fn( state_neural_vector_field.apply_fn, {"params": params} ) v_t = jax.vmap(apply_fn)( - t=t, - x=x_t, - condition=batch["source_conditions"], - keys_model=keys_model + t=t, x=x_t, condition=batch["source_conditions"], rng=rng ) u_t = self.flow.compute_ut(t, batch["source_lin"], batch["target_lin"]) return jnp.mean((v_t - u_t) ** 2) diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 2a82ab610..a5521e1e6 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. import abc -from pathlib import Path -from types import MappingProxyType +import pathlib +import types from typing import Any, Callable, Dict, Literal, Mapping, Optional, Tuple, Union import jax @@ -55,11 +55,11 @@ def transport(self, *args: Any, forward: bool, **kwargs: Any) -> Any: """Transport.""" @abc.abstractmethod - def save(self, path: Path): + def save(self, path: pathlib.Path): """Save the model.""" @abc.abstractmethod - def load(self, path: Path): + def load(self, path: pathlib.Path): """Load the model.""" @property @@ -266,7 +266,7 @@ def __init__( resample_epsilon: float = 1e-2, scale_cost: Union[bool, int, float, Literal["mean", "max_cost", "median"]] = "mean", - sinkhorn_kwargs: Mapping[str, Any] = MappingProxyType({}), + sinkhorn_kwargs: Mapping[str, Any] = types.MappingProxyType({}), **_: Any, ): self.rng_unbalanced = rng @@ -299,7 +299,7 @@ def _get_compute_unbalanced_marginals( resample_epsilon: float, scale_cost: Union[bool, int, float, Literal["mean", "max_cost", "median"]] = "mean", - sinkhorn_kwargs: Dict[str, Any] = MappingProxyType({}), + sinkhorn_kwargs: Dict[str, Any] = types.MappingProxyType({}), ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Compute the unbalanced source and target marginals for a batch.""" From 0addc7a6044af468f2e015c8346215a90d84650d Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 17:27:42 +0100 Subject: [PATCH 065/186] fix tests regarding time sampling --- tests/neural/genot_test.py | 16 ++++++++-------- tests/neural/otfm_test.py | 12 ++++++------ 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index a4c221d40..148cc935d 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import functools from typing import Iterator, Optional import pytest @@ -20,9 +21,9 @@ import optax from ott.geometry import costs -from ott.neural.flows.flows import OffsetUniformSampler, UniformSampler from ott.neural.flows.genot import GENOT from ott.neural.flows.models import VelocityField +from ott.neural.flows.samplers import sample_uniformly from ott.neural.models.models import RescalingMLP from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein @@ -53,7 +54,7 @@ def test_genot_linear_unconditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = UniformSampler() + time_sampler = sample_uniformly optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -103,7 +104,7 @@ def test_genot_quad_unconditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) - time_sampler = OffsetUniformSampler(1e-3) + time_sampler = functools.patial(sample_uniformly, offset=1e-2) optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -149,7 +150,6 @@ def test_genot_fused_unconditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) - UniformSampler() optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -196,7 +196,7 @@ def test_genot_linear_conditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = UniformSampler() + time_sampler = sample_uniformly optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -243,7 +243,7 @@ def test_genot_quad_conditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) - time_sampler = UniformSampler() + time_sampler = sample_uniformly optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -290,7 +290,7 @@ def test_genot_fused_conditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) - time_sampler = UniformSampler() + time_sampler = sample_uniformly optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -346,7 +346,7 @@ def test_genot_linear_learn_rescaling( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = UniformSampler() + time_sampler = sample_uniformly optimizer = optax.adam(learning_rate=1e-3) tau_a = 0.9 tau_b = 0.2 diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 74f483654..1230a638b 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import functools from typing import Iterator, Type import pytest @@ -23,11 +24,10 @@ BaseFlow, BrownianNoiseFlow, ConstantNoiseFlow, - OffsetUniformSampler, - UniformSampler, ) from ott.neural.flows.models import VelocityField from ott.neural.flows.otfm import OTFlowMatching +from ott.neural.flows.samplers import sample_uniformly from ott.neural.models.models import RescalingMLP from ott.solvers.linear import sinkhorn @@ -47,7 +47,7 @@ def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = UniformSampler() + time_sampler = sample_uniformly optimizer = optax.adam(learning_rate=1e-3) fm = OTFlowMatching( neural_vf, @@ -92,7 +92,7 @@ def test_flow_matching_with_conditions( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = OffsetUniformSampler(1e-6) + time_sampler = functools.partial(sample_uniformly, offset=1e-5) optimizer = optax.adam(learning_rate=1e-3) fm = OTFlowMatching( neural_vf, @@ -140,7 +140,7 @@ def test_flow_matching_conditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = UniformSampler() + time_sampler = sample_uniformly optimizer = optax.adam(learning_rate=1e-3) fm = OTFlowMatching( neural_vf, @@ -188,7 +188,7 @@ def test_flow_matching_learn_rescaling( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = UniformSampler() + time_sampler = sample_uniformly flow = ConstantNoiseFlow(1.0) optimizer = optax.adam(learning_rate=1e-3) From be68393644d35d7918dbea111fbb8417fd0e2608 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 17:31:34 +0100 Subject: [PATCH 066/186] fix typo in tests --- tests/neural/genot_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 148cc935d..7098e7419 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -104,7 +104,7 @@ def test_genot_quad_unconditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) - time_sampler = functools.patial(sample_uniformly, offset=1e-2) + time_sampler = functools.partial(sample_uniformly, offset=1e-2) optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, From b5bdc4a3d91530d2e6430034625476f32a3a35a2 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 17:43:35 +0100 Subject: [PATCH 067/186] rename neural_vector_field to velocity_field everywhere --- src/ott/neural/flows/genot.py | 38 +++++++++++++--------------- src/ott/neural/flows/otfm.py | 34 ++++++++++++------------- src/ott/neural/models/base_solver.py | 2 ++ 3 files changed, 35 insertions(+), 39 deletions(-) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index e23d9ba06..622456a26 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -44,7 +44,7 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): """The GENOT training class as introduced in :cite:`klein_uscidda:23`. Args: - neural_vector_field: Neural vector field parameterized by a neural network. + velocity_field: Neural vector field parameterized by a neural network. input_dim: Dimension of the data in the source distribution. output_dim: Dimension of the data in the target distribution. cond_dim: Dimension of the conditioning variable. @@ -68,7 +68,7 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): terms, i.e. both quadratic terms and, if applicable, the linear temr. If of type :class:`dict`, the keys are expected to be `scale_cost_xx`, `scale_cost_yy`, and if applicable, `scale_cost_xy`. - optimizer: Optimizer for `neural_vector_field`. + optimizer: Optimizer for `velocity_field`. flow: Flow between latent distribution and target distribution. time_sampler: Sampler for the time. checkpoint_manager: Checkpoint manager. @@ -95,7 +95,7 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): def __init__( self, - neural_vector_field: Callable[[ + velocity_field: Callable[[ jnp.ndarray, jnp.ndarray, Optional[jnp.ndarray], Optional[jnp.ndarray] ], jnp.ndarray], input_dim: int, @@ -158,8 +158,8 @@ def __init__( ) self.rng = utils.default_prng_key(rng) - self.neural_vector_field = neural_vector_field - self.state_neural_vector_field: Optional[TrainState] = None + self.velocity_field = velocity_field + self.state_velocity_field: Optional[TrainState] = None self.flow = flow self.time_sampler = time_sampler self.optimizer = optimizer @@ -197,8 +197,8 @@ def setup(self): kwargs Keyword arguments for the setup function """ - self.state_neural_vector_field = ( - self.neural_vector_field.create_train_state( + self.state_velocity_field = ( + self.velocity_field.create_train_state( self.rng, self.optimizer, self.output_dim ) ) @@ -301,8 +301,8 @@ def __call__(self, train_loader, valid_loader): for key, arr in batch.items() } - self.state_neural_vector_field, loss = self.step_fn( - rng_step_fn, self.state_neural_vector_field, batch + self.state_velocity_field, loss = self.step_fn( + rng_step_fn, self.state_velocity_field, batch ) if self.learn_rescaling: ( @@ -320,9 +320,7 @@ def __call__(self, train_loader, valid_loader): if iteration % self.valid_freq == 0: self._valid_step(valid_loader, iteration) if self.checkpoint_manager is not None: - states_to_save = { - "state_neural_vector_field": self.state_neural_vector_field - } + states_to_save = {"state_velocity_field": self.state_velocity_field} if self.state_eta is not None: states_to_save["state_eta"] = self.state_eta if self.state_xi is not None: @@ -334,7 +332,7 @@ def _get_step_fn(self) -> Callable: @jax.jit def step_fn( key: jax.random.PRNGKeyArray, - state_neural_vector_field: train_state.TrainState, + state_velocity_field: train_state.TrainState, batch: Dict[str, jnp.array], ): @@ -346,7 +344,7 @@ def loss_fn( batch["noise"], batch["time"], batch["latent"], batch["target"] ) apply_fn = functools.partial( - state_neural_vector_field.apply_fn, {"params": params} + state_velocity_field.apply_fn, {"params": params} ) cond_input = jnp.concatenate([ @@ -365,9 +363,9 @@ def loss_fn( keys_model = jax.random.split(key, len(batch["noise"])) grad_fn = jax.value_and_grad(loss_fn, has_aux=False) - loss, grads = grad_fn(state_neural_vector_field.params, batch, keys_model) + loss, grads = grad_fn(state_velocity_field.params, batch, keys_model) - return state_neural_vector_field.apply_gradients(grads=grads), loss + return state_velocity_field.apply_gradients(grads=grads), loss return step_fn @@ -383,9 +381,7 @@ def transport( This method pushes-forward the `source` to its conditional distribution by solving the neural ODE parameterized by the - :attr:`~ott.neural.solvers.GENOTg.neural_vector_field` from - :attr:`~ott.neural.flows.BaseTimeSampler.low` to - :attr:`~ott.neural.flows.BaseTimeSampler.high`. + :attr:`~ott.neural.flows.genot.velocity_field` Args: source: Data to transport. @@ -415,8 +411,8 @@ def transport( def solve_ode(input: jnp.ndarray, cond: jnp.ndarray): return diffrax.diffeqsolve( diffrax.ODETerm( - lambda t, x, args: self.state_neural_vector_field. - apply_fn({"params": self.state_neural_vector_field.params}, + lambda t, x, args: self.state_velocity_field. + apply_fn({"params": self.state_velocity_field.params}, t=t, x=x, condition=cond) diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index e27ff2582..ad43736c6 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -54,7 +54,7 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): (:cite`tong:23`, :cite:`pooladian:23`). Args: - neural_vector_field: Neural vector field parameterized by a neural network. + velocity_field: Neural vector field parameterized by a neural network. input_dim: Dimension of the input data. cond_dim: Dimension of the conditioning variable. iterations: Number of iterations. @@ -65,7 +65,7 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): :cite:`lipman:22`. flow: Flow between source and target distribution. time_sampler: Sampler for the time. - optimizer: Optimizer for `neural_vector_field`. + optimizer: Optimizer for `velocity_field`. checkpoint_manager: Checkpoint manager. epsilon: Entropy regularization term of the OT OT problem solved by the `ot_solver`. @@ -90,7 +90,7 @@ class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): def __init__( self, - neural_vector_field: Callable[[ + velocity_field: Callable[[ jnp.ndarray, jnp.ndarray, Optional[jnp.ndarray], Optional[jnp.ndarray] ], jnp.ndarray], input_dim: int, @@ -137,7 +137,7 @@ def __init__( unbalanced_kwargs=unbalanced_kwargs, ) - self.neural_vector_field = neural_vector_field + self.velocity_field = velocity_field self.input_dim = input_dim self.ot_solver = ot_solver self.flow = flow @@ -157,8 +157,8 @@ def __init__( def setup(self): """Setup :class:`OTFlowMatching`.""" - self.state_neural_vector_field = ( - self.neural_vector_field.create_train_state( + self.state_velocity_field = ( + self.velocity_field.create_train_state( self.rng, self.optimizer, self.input_dim ) ) @@ -181,7 +181,7 @@ def _get_step_fn(self) -> Callable: @jax.jit def step_fn( key: jax.random.PRNGKeyArray, - state_neural_vector_field: train_state.TrainState, + state_velocity_field: train_state.TrainState, batch: Dict[str, jnp.ndarray], ) -> Tuple[Any, Any]: @@ -194,7 +194,7 @@ def loss_fn( noise, t, batch["source_lin"], batch["target_lin"] ) apply_fn = functools.partial( - state_neural_vector_field.apply_fn, {"params": params} + state_velocity_field.apply_fn, {"params": params} ) v_t = jax.vmap(apply_fn)( t=t, x=x_t, condition=batch["source_conditions"], rng=rng @@ -209,9 +209,9 @@ def loss_fn( noise = self.sample_noise(key_noise, batch_size) grad_fn = jax.value_and_grad(loss_fn) loss, grads = grad_fn( - state_neural_vector_field.params, t, noise, batch, keys_model + state_velocity_field.params, t, noise, batch, keys_model ) - return state_neural_vector_field.apply_gradients(grads=grads), loss + return state_velocity_field.apply_gradients(grads=grads), loss return step_fn @@ -237,8 +237,8 @@ def __call__(self, train_loader, valid_loader): (batch["source_lin"], batch["source_conditions"]), (batch["target_lin"], batch["target_conditions"]) ) - self.state_neural_vector_field, loss = self.step_fn( - rng_step_fn, self.state_neural_vector_field, batch + self.state_velocity_field, loss = self.step_fn( + rng_step_fn, self.state_velocity_field, batch ) curr_loss += loss if iter % self.logging_freq == 0: @@ -260,9 +260,7 @@ def __call__(self, train_loader, valid_loader): if iter % self.valid_freq == 0: self._valid_step(valid_loader, iter) if self.checkpoint_manager is not None: - states_to_save = { - "state_neural_vector_field": self.state_neural_vector_field - } + states_to_save = {"state_velocity_field": self.state_velocity_field} if self.state_eta is not None: states_to_save["state_eta"] = self.state_eta if self.state_xi is not None: @@ -279,7 +277,7 @@ def transport( """Transport data with the learnt map. This method solves the neural ODE parameterized by the - :attr:`~ott.neural.solvers.OTFlowMatching.neural_vector_field` from + :attr:`~ott.neural.solvers.OTFlowMatching.velocity_field` from :attr:`~ott.neural.flows.BaseTimeSampler.low` to :attr:`~ott.neural.flows.BaseTimeSampler.high` if `forward` is `True`, else the other way round. @@ -304,8 +302,8 @@ def transport( def solve_ode(input: jnp.ndarray, cond: jnp.ndarray): return diffrax.diffeqsolve( diffrax.ODETerm( - lambda t, x, args: self.state_neural_vector_field. - apply_fn({"params": self.state_neural_vector_field.params}, + lambda t, x, args: self.state_velocity_field. + apply_fn({"params": self.state_velocity_field.params}, t=t, x=x, condition=cond) diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index a5521e1e6..98272c028 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -164,6 +164,7 @@ def match_pairs( linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) ).matrix + @jax.jit def match_pairs_filtered( x_lin: jnp.ndarray, x_quad: jnp.ndarray, y_lin: jnp.ndarray, y_quad: jnp.ndarray @@ -213,6 +214,7 @@ def _get_gromov_match_fn( else: scale_cost_xx = scale_cost_yy = scale_cost_xy = scale_cost + @jax.jit def match_pairs( x_lin: Optional[jnp.ndarray], x_quad: Tuple[jnp.ndarray, jnp.ndarray], From bebbbd0db409ce2001160da8e9ad6b2afbe931f6 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 18:02:09 +0100 Subject: [PATCH 068/186] fix OTFlowMatching.transport --- src/ott/neural/flows/models.py | 7 ++----- src/ott/neural/flows/otfm.py | 15 ++++++++------- src/ott/neural/models/models.py | 4 +--- 3 files changed, 11 insertions(+), 15 deletions(-) diff --git a/src/ott/neural/flows/models.py b/src/ott/neural/flows/models.py index badc91232..bdc1ab4aa 100644 --- a/src/ott/neural/flows/models.py +++ b/src/ott/neural/flows/models.py @@ -62,7 +62,7 @@ class VelocityField(nn.Module): """ output_dim: int latent_embed_dim: int - condition_dim: Optional[int] = None + condition_dim: int = 0 condition_embed_dim: Optional[int] = None t_embed_dim: Optional[int] = None joint_hidden_dim: Optional[int] = None @@ -111,9 +111,6 @@ def __call__( Returns: Output of the neural vector field. """ - if self.condition_dim is None: - assert condition is None - t = flow_layers.CyclicalTimeEncoder(n_frequencies=self.n_frequencies)(t) t_layer = layers.MLPBlock( dim=self.t_embed_dim, @@ -131,7 +128,7 @@ def __call__( ) x = x_layer(x) - if self.condition_dim is not None: + if self.condition_dim > 0: condition_layer = layers.MLPBlock( dim=self.condition_embed_dim, out_dim=self.condition_embed_dim, diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index ad43736c6..1be3bdc16 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -272,20 +272,22 @@ def transport( data: jnp.array, condition: Optional[jnp.ndarray] = None, forward: bool = True, + t_0: float = 0.0, + t_1: float = 1.0, diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) ) -> diffrax.Solution: """Transport data with the learnt map. - This method solves the neural ODE parameterized by the - :attr:`~ott.neural.solvers.OTFlowMatching.velocity_field` from - :attr:`~ott.neural.flows.BaseTimeSampler.low` to - :attr:`~ott.neural.flows.BaseTimeSampler.high` if `forward` is `True`, - else the other way round. + This method pushes-forward the `source` by + solving the neural ODE parameterized by the + :attr:`~ott.neural.flows.OTFlowMatching.velocity_field`. Args: data: Initial condition of the ODE. condition: Condition of the input data. forward: If `True` integrates forward, otherwise backwards. + t_0: Starting point of integration. + t_1: End point of integration. diffeqsolve_kwargs: Keyword arguments for the ODE solver. Returns: @@ -295,8 +297,7 @@ def transport( """ diffeqsolve_kwargs = dict(diffeqsolve_kwargs) - t0, t1 = (self.time_sampler.low, self.time_sampler.high - ) if forward else (self.time_sampler.high, self.time_sampler.low) + t0, t1 = (t_0, t_1) if forward else (t_1, t_0) @jax.jit def solve_ode(input: jnp.ndarray, cond: jnp.ndarray): diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index e84e3560a..56982270f 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -107,7 +107,7 @@ class RescalingMLP(nn.Module): act_fn: Activation function. Returns: - Non-negative rescaling factors. + Non-negative escaling factors. """ hidden_dim: int condition_dim: Optional[int] = None @@ -129,8 +129,6 @@ def __call__( Returns: Estimated rescaling factors. """ - if self.condition_dim is None: - assert condition is None x_layer = layers.MLPBlock( dim=self.hidden_dim, out_dim=self.hidden_dim, From f4c05c488978d842de988f967ab297e925b0d1ac Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 5 Dec 2023 18:07:26 +0100 Subject: [PATCH 069/186] fix rescaling networks --- src/ott/neural/models/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 56982270f..93af5b58d 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -110,7 +110,7 @@ class RescalingMLP(nn.Module): Non-negative escaling factors. """ hidden_dim: int - condition_dim: Optional[int] = None + condition_dim: int = 0 num_layers_per_block: int = 3 act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.selu @@ -137,7 +137,7 @@ def __call__( ) x = x_layer(x) - if self.condition_dim is not None: + if self.condition_dim > 0: condition_layer = layers.MLPBlock( dim=self.hidden_dim, out_dim=self.hidden_dim, From 4d9992e61cdadb9d5d2633fd31456e918b0ab29b Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 5 Jan 2024 15:12:05 +0100 Subject: [PATCH 070/186] Update src/ott/neural/flows/flows.py Co-authored-by: nvesseron <96598529+nvesseron@users.noreply.github.com> --- src/ott/neural/flows/flows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ott/neural/flows/flows.py b/src/ott/neural/flows/flows.py index 0dce912aa..5ddaa56b8 100644 --- a/src/ott/neural/flows/flows.py +++ b/src/ott/neural/flows/flows.py @@ -101,7 +101,7 @@ class StraightFlow(BaseFlow, abc.ABC): def compute_mu_t( # noqa: D102 self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray ) -> jnp.ndarray: - return t * src + (1 - t) * tgt + return (1 - t) * src + t * tgt def compute_ut( self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray From 51221ddaa53061c1f8cc08f100e9616a26ba06e3 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 5 Jan 2024 15:12:24 +0100 Subject: [PATCH 071/186] Update src/ott/neural/flows/flows.py Co-authored-by: nvesseron <96598529+nvesseron@users.noreply.github.com> --- src/ott/neural/flows/flows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ott/neural/flows/flows.py b/src/ott/neural/flows/flows.py index 5ddaa56b8..572abba91 100644 --- a/src/ott/neural/flows/flows.py +++ b/src/ott/neural/flows/flows.py @@ -154,4 +154,4 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: Returns: Standard deviation of the probablity path at time :math:`t`. """ - return jnp.sqrt(self.sigma * t * (1 - t)) + return self.sigma * jnp.sqrt(t * (1 - t)) From 6c56dfe4c9809caf3f2f87f8d70254e3b14cd8f7 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Mon, 8 Jan 2024 19:13:36 +0100 Subject: [PATCH 072/186] test for scale_cost --- tests/neural/genot_test.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 7098e7419..e0a27ea72 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Iterator, Optional +from typing import Iterator, Optional, Union, Literal import pytest @@ -31,10 +31,11 @@ class TestGENOT: + @pytest.mark.parameterize("scale_cost", ["mean", 2.0]) @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_linear_unconditional( - self, genot_data_loader_linear: Iterator, k_samples_per_x: int, + self, genot_data_loader_linear: Iterator, scale_cost: Union[float, Literal["mean"]], k_samples_per_x: int, solver_latent_to_data: Optional[str] ): solver_latent_to_data = ( From cc045fa1ab741d8ce7bcfb3ab59d445a1b97b350 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 9 Jan 2024 11:20:13 +0100 Subject: [PATCH 073/186] update test for scale_cost --- tests/neural/genot_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index e0a27ea72..0ba932587 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -31,7 +31,7 @@ class TestGENOT: - @pytest.mark.parameterize("scale_cost", ["mean", 2.0]) + @pytest.mark.parametrize("scale_cost", ["mean", 2.0]) @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_linear_unconditional( @@ -67,7 +67,7 @@ def test_genot_linear_unconditional( ot_solver=ot_solver, epsilon=0.1, cost_fn=costs.SqEuclidean(), - scale_cost=1.0, + scale_cost=scale_cost, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, From f4de3394c7be6f67e91df605aaa156184385edf9 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 9 Jan 2024 15:01:24 +0100 Subject: [PATCH 074/186] fix bug for scale_cost --- src/ott/neural/flows/genot.py | 27 ++++++++++++++++----------- tests/neural/genot_test.py | 5 +++-- 2 files changed, 19 insertions(+), 13 deletions(-) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 622456a26..94f609c99 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -195,7 +195,7 @@ def setup(self): Parameters ---------- kwargs - Keyword arguments for the setup function + Keyword arguments for the setup function. """ self.state_velocity_field = ( self.velocity_field.create_train_state( @@ -205,7 +205,7 @@ def setup(self): self.step_fn = self._get_step_fn() if self.solver_latent_to_data is not None: self.match_latent_to_data_fn = self._get_sinkhorn_match_fn( - self.solver_latent_to_data, **self.kwargs_solver_latent_to_data + ot_solver=self.solver_latent_to_data, **self.kwargs_solver_latent_to_data ) else: self.match_latent_to_data_fn = lambda key, x, y, **_: (x, y) @@ -213,22 +213,27 @@ def setup(self): # TODO: add graph construction function if isinstance(self.ot_solver, sinkhorn.Sinkhorn): self.match_fn = self._get_sinkhorn_match_fn( - self.ot_solver, - self.epsilon, - self.cost_fn, - self.tau_a, - self.tau_b, - self.scale_cost, + ot_solver=self.ot_solver, + epsilon=self.epsilon, + cost_fn=self.cost_fn, + scale_cost=self.scale_cost, + tau_a=self.tau_a, + tau_b=self.tau_b, filter_input=True ) else: self.match_fn = self._get_gromov_match_fn( - self.ot_solver, self.cost_fn, self.tau_a, self.tau_b, self.scale_cost, - self.fused_penalty + ot_solver=self.ot_solver, cost_fn=self.cost_fn, scale_cost=self.scale_cost, tau_a=self.tau_a, tau_b=self.tau_b, + fused_penalty=self.fused_penalty ) def __call__(self, train_loader, valid_loader): - """Train GENOT.""" + """Train GENOT. + + Args: + train_loader: Data loader for the training data. + valid_loader: Data loader for the validation data. + """ batch: Dict[str, jnp.array] = {} for iteration in range(self.iterations): batch = next(train_loader) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 0ba932587..b3de698ad 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Iterator, Optional, Union, Literal +from typing import Iterator, Literal, Optional, Union import pytest @@ -35,7 +35,8 @@ class TestGENOT: @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_linear_unconditional( - self, genot_data_loader_linear: Iterator, scale_cost: Union[float, Literal["mean"]], k_samples_per_x: int, + self, genot_data_loader_linear: Iterator, + scale_cost: Union[float, Literal["mean"]], k_samples_per_x: int, solver_latent_to_data: Optional[str] ): solver_latent_to_data = ( From 5db4c730a385d9900c99518ef59b193b68f606d1 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 9 Jan 2024 15:01:59 +0100 Subject: [PATCH 075/186] fix bug for scale_cost --- src/ott/neural/flows/genot.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 94f609c99..4efc4a9ad 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -205,7 +205,8 @@ def setup(self): self.step_fn = self._get_step_fn() if self.solver_latent_to_data is not None: self.match_latent_to_data_fn = self._get_sinkhorn_match_fn( - ot_solver=self.solver_latent_to_data, **self.kwargs_solver_latent_to_data + ot_solver=self.solver_latent_to_data, + **self.kwargs_solver_latent_to_data ) else: self.match_latent_to_data_fn = lambda key, x, y, **_: (x, y) @@ -223,13 +224,17 @@ def setup(self): ) else: self.match_fn = self._get_gromov_match_fn( - ot_solver=self.ot_solver, cost_fn=self.cost_fn, scale_cost=self.scale_cost, tau_a=self.tau_a, tau_b=self.tau_b, + ot_solver=self.ot_solver, + cost_fn=self.cost_fn, + scale_cost=self.scale_cost, + tau_a=self.tau_a, + tau_b=self.tau_b, fused_penalty=self.fused_penalty ) def __call__(self, train_loader, valid_loader): """Train GENOT. - + Args: train_loader: Data loader for the training data. valid_loader: Data loader for the validation data. From 72885ac75491d98e55b4f039c583134d3c2ba7b7 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 10 Jan 2024 17:52:27 +0100 Subject: [PATCH 076/186] jit solve_ode in genot --- src/ott/neural/flows/genot.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 4efc4a9ad..c12026251 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -418,6 +418,7 @@ def transport( axis=-1) t0, t1 = (0.0, 1.0) + @jax.jit def solve_ode(input: jnp.ndarray, cond: jnp.ndarray): return diffrax.diffeqsolve( diffrax.ODETerm( From 937fffcce22a3b200df826bf266879b0a3afb53a Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 7 Feb 2024 15:10:51 +0100 Subject: [PATCH 077/186] incorporate changes partially --- docs/references.bib | 52 ++++++------- pyproject.toml | 4 +- src/ott/__init__.py | 11 +-- src/ott/datasets.py | 2 +- src/ott/neural/duality/models.py | 2 +- src/ott/neural/duality/neuraldual.py | 16 +--- src/ott/neural/flows/flows.py | 18 +++-- src/ott/neural/flows/genot.py | 54 ++++++------- src/ott/neural/flows/models.py | 5 +- src/ott/neural/flows/otfm.py | 47 +++++------- src/ott/neural/flows/samplers.py | 6 +- src/ott/neural/gaps/map_estimator.py | 11 +-- src/ott/neural/models/base_solver.py | 51 ++++++------- src/ott/neural/models/layers.py | 17 +++-- src/ott/neural/models/models.py | 73 ++---------------- src/ott/solvers/linear/sinkhorn_lr.py | 11 +-- src/ott/solvers/quadratic/__init__.py | 7 +- .../quadratic/gromov_wasserstein_lr.py | 11 +-- .../tools/gaussian_mixture/fit_gmm_pair.py | 6 +- .../gaussian_mixture/gaussian_mixture.py | 7 +- tests/neural/conftest.py | 27 ++++--- tests/neural/genot_test.py | 14 ++-- tests/neural/otfm_test.py | 75 +++++++++---------- 23 files changed, 191 insertions(+), 336 deletions(-) diff --git a/docs/references.bib b/docs/references.bib index f161c1570..799e827d1 100644 --- a/docs/references.bib +++ b/docs/references.bib @@ -816,51 +816,51 @@ @misc{huguet:2023 } @misc{eyring:23, - author={Eyring, Luca and Klein, Dominik and Uscidda, Th{\'e}o and Palla, Giovanni and Kilbertus, Niki and Akata, Zeynep and Theis, Fabian}, - doi = {10.48550/arXiv.2311.15100}, + author = {Eyring, Luca and Klein, Dominik and Uscidda, Théo and Palla, Giovanni and Kilbertus, Niki and Akata, Zeynep and Theis, Fabian}, + doi = {10.48550/arXiv.2311.15100}, eprint = {2311.15100}, eprintclass = {stat.ML}, eprinttype = {arXiv}, - title={Unbalancedness in Neural Monge Maps Improves Unpaired Domain Translation}, - year={2023} + title = {Unbalancedness in Neural Monge Maps Improves Unpaired Domain Translation}, + year = {2023}, } @misc{klein_uscidda:23, - author={Dominik Klein and Théo Uscidda and Fabian Theis and Marco Cuturi}, - doi = {10.48550/arXiv.2310.09254}, - eprint={2310.09254}, - eprintclass = {stat.ML}, - eprinttype = {arXiv}, - title={Generative Entropic Neural Optimal Transport To Map Within and Across Spaces}, - year={2023}, + author = {Klein, Dominik and Uscidda, Théo and Theis, Fabian and Cuturi, Marco}, + doi = {10.48550/arXiv.2310.09254}, + eprint = {2310.09254}, + eprintclass = {stat.ML}, + eprinttype = {arXiv}, + title = {Generative Entropic Neural Optimal Transport To Map Within and Across Spaces}, + year = {2023}, } @misc{lipman:22, - author={Lipman, Yaron and Chen, Ricky TQ and Ben-Hamu, Heli and Nickel, Maximilian and Le, Matt}, - doi = {10.48550/arXiv.2210.02747}, - eprint={2210.02747}, + author = {Lipman, Yaron and Chen, Ricky TQ and Ben-Hamu, Heli and Nickel, Maximilian and Le, Matt}, + doi = {10.48550/arXiv.2210.02747}, + eprint = {2210.02747}, eprintclass = {stat.ML}, eprinttype = {arXiv}, - title={Flow matching for generative modeling}, - year={2022}, + title = {Flow matching for generative modeling}, + year = {2022}, } @misc{tong:23, - author={Tong, Alexander and Malkin, Nikolay and Huguet, Guillaume and Zhang, Yanlei and {Rector-Brooks}, Jarrid and Fatras, Kilian and Wolf, Guy and Bengio, Yoshua}, - doi={10.48550/arXiv.2302.00482}, - eprint={2302.00482}, + author = {Tong, Alexander and Malkin, Nikolay and Huguet, Guillaume and Zhang, Yanlei and {Rector-Brooks}, Jarrid and Fatras, Kilian and Wolf, Guy and Bengio, Yoshua}, + doi = {10.48550/arXiv.2302.00482}, + eprint = {2302.00482}, eprintclass = {stat.ML}, eprinttype = {arXiv}, - title={Improving and Generalizing Flow-Based Generative Models with Minibatch Optimal Transport}, - year={2023}, + title = {Improving and Generalizing Flow-Based Generative Models with Minibatch Optimal Transport}, + year = {2023}, } @misc{pooladian:23, - author={Pooladian, Aram-Alexandre and Ben-Hamu, Heli and Domingo-Enrich, Carles and Amos, Brandon and Lipman, Yaron and Chen, Ricky}, - doi={10.48550/arXiv.2304.14772}, - eprint={2304.14772}, + author = {Pooladian, Aram-Alexandre and Ben-Hamu, Heli and Domingo-Enrich, Carles and Amos, Brandon and Lipman, Yaron and Chen, Ricky}, + doi = {10.48550/arXiv.2304.14772}, + eprint = {2304.14772}, eprintclass = {stat.ML}, eprinttype = {arXiv}, - title={Multisample flow matching: Straightening flows with minibatch couplings}, - year={2023} + title = {Multisample flow matching: Straightening flows with minibatch couplings}, + year = {2023}, } diff --git a/pyproject.toml b/pyproject.toml index 1961a5971..5c128e7c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,7 +103,6 @@ include = '\.ipynb$' [tool.isort] profile = "black" -line_length = 80 include_trailing_comma = true multi_line_output = 3 sections = ["FUTURE", "STDLIB", "THIRDPARTY", "TEST", "NUMERIC", "NEURAL", "PLOTTING", "FIRSTPARTY", "LOCALFOLDER"] @@ -289,6 +288,7 @@ ignore = [ line-length = 80 select = [ "D", # flake8-docstrings + "I", # isort "E", # pycodestyle "F", # pyflakes "W", # pycodestyle @@ -304,7 +304,7 @@ select = [ "T20", # flake8-print "RET", # flake8-raise ] -unfixable = ["I", "B", "UP", "C4", "BLE", "T20", "RET"] +unfixable = ["B", "UP", "C4", "BLE", "T20", "RET"] target-version = "py38" [tool.ruff.per-file-ignores] # TODO(michalk8): PO004 - remove `self.initialize` diff --git a/src/ott/__init__.py b/src/ott/__init__.py index dac0eb854..8d2f007c5 100644 --- a/src/ott/__init__.py +++ b/src/ott/__init__.py @@ -13,16 +13,7 @@ # limitations under the License. import contextlib -from . import ( - datasets, - geometry, - initializers, - math, - problems, - solvers, - tools, - utils, -) +from . import datasets, geometry, initializers, math, problems, solvers, tools, utils with contextlib.suppress(ImportError): # TODO(michalk8): add warning that neural module is not imported diff --git a/src/ott/datasets.py b/src/ott/datasets.py index e5077c87c..36ac6b561 100644 --- a/src/ott/datasets.py +++ b/src/ott/datasets.py @@ -61,7 +61,7 @@ class GaussianMixture: scale: float = 5.0 std: float = 0.5 - def __post_init__(self): + def __post_init__(self) -> None: gaussian_centers = { "simple": np.array([[0, 0]]), diff --git a/src/ott/neural/duality/models.py b/src/ott/neural/duality/models.py index baa0386c8..5c18f5eb0 100644 --- a/src/ott/neural/duality/models.py +++ b/src/ott/neural/duality/models.py @@ -69,7 +69,7 @@ class ICNN(neuraldual.BaseW2NeuralDual): def is_potential(self) -> bool: # noqa: D102 return True - def setup(self): # noqa: D102 + def setup(self) -> None: # noqa: D102 self.num_hidden = len(self.dim_hidden) if self.pos_weights: diff --git a/src/ott/neural/duality/neuraldual.py b/src/ott/neural/duality/neuraldual.py index a8f5fd273..573e7b2bb 100644 --- a/src/ott/neural/duality/neuraldual.py +++ b/src/ott/neural/duality/neuraldual.py @@ -13,17 +13,7 @@ # limitations under the License. import abc import warnings -from typing import ( - Any, - Callable, - Dict, - Iterator, - List, - Literal, - Optional, - Tuple, - Union, -) +from typing import Any, Callable, Dict, Iterator, List, Literal, Optional, Tuple, Union import jax import jax.numpy as jnp @@ -290,7 +280,7 @@ def setup( dim_data: int, optimizer_f: optax.OptState, optimizer_g: optax.OptState, - ): + ) -> None: """Setup all components required to train the network.""" # split random number generator rng, rng_f, rng_g = jax.random.split(rng, 3) @@ -695,7 +685,7 @@ def _update_logs( loss_f: jnp.ndarray, loss_g: jnp.ndarray, w_dist: jnp.ndarray, - ): + ) -> None: logs["loss_f"].append(float(loss_f)) logs["loss_g"].append(float(loss_g)) logs["w_dist"].append(float(w_dist)) diff --git a/src/ott/neural/flows/flows.py b/src/ott/neural/flows/flows.py index 572abba91..c379dcbc3 100644 --- a/src/ott/neural/flows/flows.py +++ b/src/ott/neural/flows/flows.py @@ -39,7 +39,7 @@ def compute_mu_t( ) -> jnp.ndarray: """Compute the mean of the probablitiy path. - Compute the mean of the probablitiy path between :math:`x` and :math:`y` + Compute the mean of the probablitiy path between :math:`x_0` and :math:`x_1` at time :math:`t`. Args: @@ -69,6 +69,9 @@ def compute_ut( t: Time :math:`t`. src: Sample from the source distribution. tgt: Sample from the target distribution. + + Returns: + Conditional vector field evaluated at time :math:`t`. """ def compute_xt( @@ -101,7 +104,7 @@ class StraightFlow(BaseFlow, abc.ABC): def compute_mu_t( # noqa: D102 self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray ) -> jnp.ndarray: - return (1 - t) * src + t * tgt + return (1.0 - t) * src + t * tgt def compute_ut( self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray @@ -119,6 +122,7 @@ def compute_ut( Returns: Conditional vector field evaluated at time :math:`t`. """ + del t return tgt - src @@ -134,15 +138,19 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: Returns: Constant, time-independent standard deviation :math:`\sigma`. """ - return self.sigma + return jnp.full_like(t, fill_value=self.sigma) class BrownianNoiseFlow(StraightFlow): r"""Brownian Bridge Flow. Sampler for sampling noise implicitly defined by a Schroedinger Bridge - problem with parameter `\sigma` such that + problem with parameter :math:`\sigma` such that :math:`\sigma_t = \sigma * \sqrt(t * (1-t))`. + + Returns: + Samples from the probability path between :math:`x_0` and :math:`x_1` + at time :math:`t`. """ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: @@ -154,4 +162,4 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: Returns: Standard deviation of the probablity path at time :math:`t`. """ - return self.sigma * jnp.sqrt(t * (1 - t)) + return self.sigma * jnp.sqrt(t * (1.0 - t)) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index c12026251..b11b77b20 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -21,18 +21,12 @@ import diffrax import optax from flax.training import train_state -from flax.training.train_state import TrainState from orbax import checkpoint from ott import utils from ott.geometry import costs -from ott.neural.flows.flows import BaseFlow, ConstantNoiseFlow -from ott.neural.flows.samplers import sample_uniformly -from ott.neural.models.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, -) +from ott.neural.flows import flows, samplers +from ott.neural.models import base_solver from ott.solvers import was_solver from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein @@ -40,7 +34,10 @@ __all__ = ["GENOT"] -class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): +class GENOT( + base_solver.UnbalancednessMixin, base_solver.ResampleMixin, + base_solver.BaseNeuralSolver +): """The GENOT training class as introduced in :cite:`klein_uscidda:23`. Args: @@ -81,15 +78,15 @@ class GENOT(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): fused_penalty: Fused penalty of the linear/fused term in the Fused Gromov-Wasserstein problem. tau_a: If :math:`<1`, defines how much unbalanced the problem is - on the first marginal. + on the first marginal. tau_b: If :math:`< 1`, defines how much unbalanced the problem is - on the second marginal. + on the second marginal. rescaling_a: Neural network to learn the left rescaling function. If :obj:`None`, the left rescaling factor is not learnt. rescaling_b: Neural network to learn the right rescaling function. If :obj:`None`, the right rescaling factor is not learnt. unbalanced_kwargs: Keyword arguments for the unbalancedness solver. - callback_fn: Callback function. + callback_fn: Callback function. rng: Random number generator. """ @@ -103,7 +100,7 @@ def __init__( cond_dim: int, iterations: int, valid_freq: int, - ot_solver: Type[was_solver.WassersteinSolver], + ot_solver: was_solver.WassersteinSolver, epsilon: float, cost_fn: Union[costs.CostFn, Dict[str, costs.CostFn]], scale_cost: Union[Union[bool, int, float, @@ -112,9 +109,10 @@ def __init__( Dict[str, Union[bool, int, float, Literal["mean", "max_norm", "max_bound", "max_cost", "median"]]]], - optimizer: Type[optax.GradientTransformation], - flow: Type[BaseFlow] = ConstantNoiseFlow(0.0), - time_sampler: Callable[[jax.Array, int], jnp.ndarray] = sample_uniformly, + optimizer: optax.GradientTransformation, + flow: Type[flows.BaseFlow] = flows.ConstantNoiseFlow(0.0), # noqa: B008 + time_sampler: Callable[[jax.Array, int], + jnp.ndarray] = samplers.uniform_sampler, checkpoint_manager: Type[checkpoint.CheckpointManager] = None, k_samples_per_x: int = 1, solver_latent_to_data: Optional[Type[was_solver.WassersteinSolver] @@ -132,11 +130,11 @@ def __init__( ): rng = utils.default_prng_key(rng) rng, rng_unbalanced = jax.random.split(rng) - BaseNeuralSolver.__init__( + base_solver.BaseNeuralSolver.__init__( self, iterations=iterations, valid_freq=valid_freq ) - ResampleMixin.__init__(self) - UnbalancednessMixin.__init__( + base_solver.ResampleMixin.__init__(self) + base_solver.UnbalancednessMixin.__init__( self, rng=rng_unbalanced, source_dim=input_dim, @@ -159,7 +157,7 @@ def __init__( self.rng = utils.default_prng_key(rng) self.velocity_field = velocity_field - self.state_velocity_field: Optional[TrainState] = None + self.state_velocity_field: Optional[train_state.TrainState] = None self.flow = flow self.time_sampler = time_sampler self.optimizer = optimizer @@ -189,14 +187,8 @@ def __init__( self.callback_fn = callback_fn self.setup() - def setup(self): - """Set up the model. - - Parameters - ---------- - kwargs - Keyword arguments for the setup function. - """ + def setup(self) -> None: + """Set up the model.""" self.state_velocity_field = ( self.velocity_field.create_train_state( self.rng, self.optimizer, self.output_dim @@ -341,7 +333,7 @@ def _get_step_fn(self) -> Callable: @jax.jit def step_fn( - key: jax.random.PRNGKeyArray, + rng: jax.Array, state_velocity_field: train_state.TrainState, batch: Dict[str, jnp.array], ): @@ -370,7 +362,7 @@ def loss_fn( ) return jnp.mean((v_t - u_t) ** 2) - keys_model = jax.random.split(key, len(batch["noise"])) + keys_model = jax.random.split(rng, len(batch["noise"])) grad_fn = jax.value_and_grad(loss_fn, has_aux=False) loss, grads = grad_fn(state_velocity_field.params, batch, keys_model) @@ -419,7 +411,7 @@ def transport( t0, t1 = (0.0, 1.0) @jax.jit - def solve_ode(input: jnp.ndarray, cond: jnp.ndarray): + def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: return diffrax.diffeqsolve( diffrax.ODETerm( lambda t, x, args: self.state_velocity_field. diff --git a/src/ott/neural/flows/models.py b/src/ott/neural/flows/models.py index bdc1ab4aa..6970e8368 100644 --- a/src/ott/neural/flows/models.py +++ b/src/ott/neural/flows/models.py @@ -70,15 +70,12 @@ class VelocityField(nn.Module): act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu n_frequencies: int = 128 - def __post_init__(self): - - # set embedded dim from latent embedded dim + def __post_init__(self) -> None: if self.condition_embed_dim is None: self.condition_embed_dim = self.latent_embed_dim if self.t_embed_dim is None: self.t_embed_dim = self.latent_embed_dim - # set joint hidden dim from all embedded dim concat_embed_dim = ( self.latent_embed_dim + self.condition_embed_dim + self.t_embed_dim ) diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index 1be3bdc16..2ec4707c6 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -11,20 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import collections import functools import types -from collections import defaultdict -from typing import ( - Any, - Callable, - Dict, - Literal, - Mapping, - Optional, - Tuple, - Type, - Union, -) +from typing import Any, Callable, Dict, Literal, Mapping, Optional, Tuple, Type, Union import jax import jax.numpy as jnp @@ -36,18 +26,17 @@ from ott import utils from ott.geometry import costs -from ott.neural.flows.flows import BaseFlow -from ott.neural.models.base_solver import ( - BaseNeuralSolver, - ResampleMixin, - UnbalancednessMixin, -) +from ott.neural.flows import flows +from ott.neural.models import base_solver from ott.solvers import was_solver __all__ = ["OTFlowMatching"] -class OTFlowMatching(UnbalancednessMixin, ResampleMixin, BaseNeuralSolver): +class OTFlowMatching( + base_solver.UnbalancednessMixin, base_solver.ResampleMixin, + base_solver.BaseNeuralSolver +): """(Optimal transport) flow matching class. Flow matching as introduced in :cite:`lipman:22`, with extension to OT-FM @@ -97,9 +86,9 @@ def __init__( cond_dim: int, iterations: int, ot_solver: Optional[Type[was_solver.WassersteinSolver]], - flow: Type[BaseFlow], + flow: Type[flows.BaseFlow], time_sampler: Callable[[jax.Array, int], jnp.ndarray], - optimizer: Type[optax.GradientTransformation], + optimizer: optax.GradientTransformation, checkpoint_manager: Type[checkpoint.CheckpointManager] = None, epsilon: float = 1e-2, cost_fn: Optional[Type[costs.CostFn]] = None, @@ -120,11 +109,11 @@ def __init__( ): rng = utils.default_prng_key(rng) rng, rng_unbalanced = jax.random.split(rng) - BaseNeuralSolver.__init__( + base_solver.BaseNeuralSolver.__init__( self, iterations=iterations, valid_freq=valid_freq ) - ResampleMixin.__init__(self) - UnbalancednessMixin.__init__( + base_solver.ResampleMixin.__init__(self) + base_solver.UnbalancednessMixin.__init__( self, rng=rng_unbalanced, source_dim=input_dim, @@ -151,11 +140,11 @@ def __init__( self.rng = rng self.logging_freq = logging_freq self.num_eval_samples = num_eval_samples - self._training_logs: Mapping[str, Any] = defaultdict(list) + self._training_logs: Mapping[str, Any] = collections.defaultdict(list) self.setup() - def setup(self): + def setup(self) -> None: """Setup :class:`OTFlowMatching`.""" self.state_velocity_field = ( self.velocity_field.create_train_state( @@ -180,7 +169,7 @@ def _get_step_fn(self) -> Callable: @jax.jit def step_fn( - key: jax.random.PRNGKeyArray, + rng: jax.Array, state_velocity_field: train_state.TrainState, batch: Dict[str, jnp.ndarray], ) -> Tuple[Any, Any]: @@ -203,7 +192,7 @@ def loss_fn( return jnp.mean((v_t - u_t) ** 2) batch_size = len(batch["source_lin"]) - key_noise, key_t, key_model = jax.random.split(key, 3) + key_noise, key_t, key_model = jax.random.split(rng, 3) keys_model = jax.random.split(key_model, batch_size) t = self.time_sampler(key_t, batch_size) noise = self.sample_noise(key_noise, batch_size) @@ -300,7 +289,7 @@ def transport( t0, t1 = (t_0, t_1) if forward else (t_1, t_0) @jax.jit - def solve_ode(input: jnp.ndarray, cond: jnp.ndarray): + def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: return diffrax.diffeqsolve( diffrax.ODETerm( lambda t, x, args: self.state_velocity_field. diff --git a/src/ott/neural/flows/samplers.py b/src/ott/neural/flows/samplers.py index f5d0e0d17..1bfee16b4 100644 --- a/src/ott/neural/flows/samplers.py +++ b/src/ott/neural/flows/samplers.py @@ -16,16 +16,16 @@ import jax import jax.numpy as jnp -__all__ = ["sample_uniformly"] +__all__ = ["uniform_sampler"] -def sample_uniformly( +def uniform_sampler( rng: jax.Array, num_samples: int, low: float = 0.0, high: float = 1.0, offset: Optional[float] = None -): +) -> jnp.ndarray: """Sample from a uniform distribution. Sample :math:`t` from a uniform distribution :math:`[low, high]` with diff --git a/src/ott/neural/gaps/map_estimator.py b/src/ott/neural/gaps/map_estimator.py index cfcc8cb86..13dbc4ef4 100644 --- a/src/ott/neural/gaps/map_estimator.py +++ b/src/ott/neural/gaps/map_estimator.py @@ -13,16 +13,7 @@ # limitations under the License. import collections import functools -from typing import ( - Any, - Callable, - Dict, - Iterator, - Optional, - Sequence, - Tuple, - Union, -) +from typing import Any, Callable, Dict, Iterator, Optional, Sequence, Tuple, Union import jax import jax.numpy as jnp diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 98272c028..071dc6e07 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -13,7 +13,6 @@ # limitations under the License. import abc import pathlib -import types from typing import Any, Callable, Dict, Literal, Mapping, Optional, Tuple, Union import jax @@ -43,7 +42,7 @@ def __init__(self, iterations: int, valid_freq: int, **_: Any): self.valid_freq = valid_freq @abc.abstractmethod - def setup(self, *args: Any, **kwargs: Any): + def setup(self, *args: Any, **kwargs: Any) -> None: """Setup the model.""" @abc.abstractmethod @@ -73,14 +72,14 @@ class ResampleMixin: def _resample_data( self, - key: jax.random.KeyArray, + rng: jax.Array, tmat: jnp.ndarray, source_arrays: Tuple[jnp.ndarray, ...], target_arrays: Tuple[jnp.ndarray, ...], ) -> Tuple[jnp.ndarray, ...]: """Resample a batch according to coupling `tmat`.""" tmat_flattened = tmat.flatten() - indices = jax.random.choice(key, len(tmat_flattened), shape=[tmat.shape[0]]) + indices = jax.random.choice(rng, len(tmat_flattened), shape=[tmat.shape[0]]) indices_source = indices // tmat.shape[1] indices_target = indices % tmat.shape[1] return tuple( @@ -91,7 +90,7 @@ def _resample_data( def _sample_conditional_indices_from_tmap( self, - key: jax.random.PRNGKeyArray, + rng: jax.Array, tmat: jnp.ndarray, k_samples_per_x: Union[int, jnp.ndarray], source_arrays: Tuple[jnp.ndarray, ...], @@ -102,22 +101,19 @@ def _sample_conditional_indices_from_tmap( batch_size = tmat.shape[0] left_marginals = tmat.sum(axis=1) if not source_is_balanced: - key, key2 = jax.random.split(key, 2) + rng, key2 = jax.random.split(rng, 2) indices = jax.random.choice( key=key2, a=jnp.arange(len(left_marginals)), p=left_marginals, shape=(len(left_marginals),) ) + tmat_adapted = tmat[indices] else: - indices = jnp.arange(batch_size) - tmat_adapted = tmat[indices] + tmat_adapted = tmat indices_per_row = jax.vmap( - lambda tmat_adapted: jax.random.choice( - key=key, - a=jnp.arange(batch_size), - p=tmat_adapted, - shape=(k_samples_per_x,) + lambda row: jax.random.choice( + key=rng, a=jnp.arange(batch_size), p=row, shape=(k_samples_per_x,) ), in_axes=0, out_axes=0, @@ -134,8 +130,8 @@ def _sample_conditional_indices_from_tmap( -1)) if b is not None else None for b in source_arrays ), tuple( - jnp.reshape(b[indices_target, :], (k_samples_per_x, batch_size, - -1)) if b is not None else None + jnp.reshape(b[indices_target], (k_samples_per_x, batch_size, + -1)) if b is not None else None for b in target_arrays ) @@ -154,9 +150,7 @@ def _get_sinkhorn_match_fn( ) -> Callable: @jax.jit - def match_pairs( - x: jnp.ndarray, y: jnp.ndarray - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: + def match_pairs(x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: geom = pointcloud.PointCloud( x, y, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn ) @@ -168,7 +162,7 @@ def match_pairs( def match_pairs_filtered( x_lin: jnp.ndarray, x_quad: jnp.ndarray, y_lin: jnp.ndarray, y_quad: jnp.ndarray - ) -> Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]: + ) -> jnp.ndarray: geom = pointcloud.PointCloud( x_lin, y_lin, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn ) @@ -220,7 +214,7 @@ def match_pairs( x_quad: Tuple[jnp.ndarray, jnp.ndarray], y_lin: Optional[jnp.ndarray], y_quad: Tuple[jnp.ndarray, jnp.ndarray], - ) -> Tuple[jnp.array, jnp.array]: + ) -> jnp.ndarray: geom_xx = pointcloud.PointCloud( x=x_quad, y=x_quad, cost_fn=cost_fn_xx, scale_cost=scale_cost_xx ) @@ -262,14 +256,12 @@ def __init__( jnp.ndarray]] = None, rescaling_b: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], jnp.ndarray]] = None, - seed: Optional[int] = None, opt_eta: Optional[optax.GradientTransformation] = None, opt_xi: Optional[optax.GradientTransformation] = None, resample_epsilon: float = 1e-2, scale_cost: Union[bool, int, float, Literal["mean", "max_cost", "median"]] = "mean", - sinkhorn_kwargs: Mapping[str, Any] = types.MappingProxyType({}), - **_: Any, + **kwargs: Mapping[str, Any], ): self.rng_unbalanced = rng self.source_dim = source_dim @@ -279,7 +271,6 @@ def __init__( self.tau_b = tau_b self.rescaling_a = rescaling_a self.rescaling_b = rescaling_b - self.seed = seed self.opt_eta = opt_eta self.opt_xi = opt_xi self.resample_epsilon = resample_epsilon @@ -290,7 +281,7 @@ def __init__( tau_b=tau_b, resample_epsilon=resample_epsilon, scale_cost=scale_cost, - sinkhorn_kwargs=sinkhorn_kwargs + sinkorn_kwargs=kwargs ) self._setup(source_dim=source_dim, target_dim=target_dim, cond_dim=cond_dim) @@ -301,7 +292,7 @@ def _get_compute_unbalanced_marginals( resample_epsilon: float, scale_cost: Union[bool, int, float, Literal["mean", "max_cost", "median"]] = "mean", - sinkhorn_kwargs: Dict[str, Any] = types.MappingProxyType({}), + **kwargs: Dict[str, Any], ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Compute the unbalanced source and target marginals for a batch.""" @@ -315,23 +306,23 @@ def compute_unbalanced_marginals( epsilon=resample_epsilon, scale_cost=scale_cost ) - out = sinkhorn.Sinkhorn(**sinkhorn_kwargs)( + out = sinkhorn.Sinkhorn(**kwargs)( linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) ) - return out.matrix.sum(axis=1), out.matrix.sum(axis=0) + return out.marginal(axis=1), out.marginal(axis=0) return compute_unbalanced_marginals @jax.jit def _resample_unbalanced( self, - key: jax.random.KeyArray, + rng: jax.Array, batch: Tuple[jnp.ndarray, ...], marginals: jnp.ndarray, ) -> Tuple[jnp.ndarray, ...]: """Resample a batch based on marginals.""" indices = jax.random.choice( - key, a=len(marginals), p=jnp.squeeze(marginals), shape=[len(marginals)] + rng, a=len(marginals), p=jnp.squeeze(marginals), shape=[len(marginals)] ) return tuple(b[indices] if b is not None else None for b in batch) diff --git a/src/ott/neural/models/layers.py b/src/ott/neural/models/layers.py index 46313b0e2..d0352ff05 100644 --- a/src/ott/neural/models/layers.py +++ b/src/ott/neural/models/layers.py @@ -11,23 +11,24 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Tuple +from typing import Any -import jax import jax.numpy as jnp import flax.linen as nn __all__ = ["MLPBlock"] -PRNGKey = jax.Array -Shape = Tuple[int, ...] -Dtype = Any -Array = Any - class MLPBlock(nn.Module): - """An MLP block.""" + """An MLP block. + + Args: + dim: Dimensionality of the input data. + num_layers: Number of layers in the MLP block. + act_fn: Activation function. + out_dim: Dimensionality of the output data. + """ dim: int = 128 num_layers: int = 3 act_fn: Any = nn.silu diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/models.py index 93af5b58d..0acb7daae 100644 --- a/src/ott/neural/models/models.py +++ b/src/ott/neural/models/models.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Callable, Optional, Sequence +from typing import Callable, Optional import jax import jax.numpy as jnp @@ -22,70 +22,7 @@ from ott.neural.models import layers -__all__ = ["MLP", "RescalingMLP"] - - -class MLP(nn.Module): - """A generic, not-convex MLP. - - Args: - dim_hidden: sequence specifying size of hidden dimensions. The output - dimension of the last layer is automatically set to 1 if - :attr:`is_potential` is ``True``, or the dimension of the input otherwise - is_potential: Model the potential if ``True``, otherwise - model the gradient of the potential - act_fn: Activation function - """ - - dim_hidden: Sequence[int] - is_potential: bool = True - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.leaky_relu - - @nn.compact - def __call__(self, x: jnp.ndarray) -> jnp.ndarray: # noqa: D102 - squeeze = x.ndim == 1 - if squeeze: - x = jnp.expand_dims(x, 0) - assert x.ndim == 2, x.ndim - n_input = x.shape[-1] - - z = x - for n_hidden in self.dim_hidden: - Wx = nn.Dense(n_hidden, use_bias=True) - z = self.act_fn(Wx(z)) - - if self.is_potential: - Wx = nn.Dense(1, use_bias=True) - z = Wx(z).squeeze(-1) - - quad_term = 0.5 * jax.vmap(jnp.dot)(x, x) - z += quad_term - else: - Wx = nn.Dense(n_input, use_bias=True) - z = x + Wx(z) - - return z.squeeze(0) if squeeze else z - - def create_train_state( - self, - rng: jax.Array, - optimizer: optax.OptState, - input_dim: int, - ) -> train_state.TrainState: - """Create the training state. - - Args: - rng: Random number generator. - optimizer: Optimizer. - input_dim: Dimensionality of the input. - - Returns: - Training state. - """ - params = self.init(rng, jnp.ones(input_dim))["params"] - return train_state.TrainState.create( - apply_fn=self.apply, params=params, tx=optimizer - ) +__all__ = ["RescalingMLP"] class RescalingMLP(nn.Module): @@ -119,12 +56,12 @@ def __call__( self, x: jnp.ndarray, condition: Optional[jnp.ndarray] = None - ) -> jnp.ndarray: # noqa: D102 + ) -> jnp.ndarray: """Forward pass through the rescaling network. Args: - x: Data. - condition: Condition. + x: Data of shape ``[n, ...]``. + condition: Condition of shape ``[n, condition_dim]``. Returns: Estimated rescaling factors. diff --git a/src/ott/solvers/linear/sinkhorn_lr.py b/src/ott/solvers/linear/sinkhorn_lr.py index db948cf8b..45b4e4721 100644 --- a/src/ott/solvers/linear/sinkhorn_lr.py +++ b/src/ott/solvers/linear/sinkhorn_lr.py @@ -11,16 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import ( - Any, - Callable, - Literal, - Mapping, - NamedTuple, - Optional, - Tuple, - Union, -) +from typing import Any, Callable, Literal, Mapping, NamedTuple, Optional, Tuple, Union import jax import jax.experimental diff --git a/src/ott/solvers/quadratic/__init__.py b/src/ott/solvers/quadratic/__init__.py index 560ac3ddd..507812971 100644 --- a/src/ott/solvers/quadratic/__init__.py +++ b/src/ott/solvers/quadratic/__init__.py @@ -11,10 +11,5 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import ( - gromov_wasserstein, - gromov_wasserstein_lr, - gw_barycenter, - lower_bound, -) +from . import gromov_wasserstein, gromov_wasserstein_lr, gw_barycenter, lower_bound from ._solve import solve diff --git a/src/ott/solvers/quadratic/gromov_wasserstein_lr.py b/src/ott/solvers/quadratic/gromov_wasserstein_lr.py index 214853f4c..ad8c4130a 100644 --- a/src/ott/solvers/quadratic/gromov_wasserstein_lr.py +++ b/src/ott/solvers/quadratic/gromov_wasserstein_lr.py @@ -12,16 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. """A Jax implementation of the unbalanced low-rank GW algorithm.""" -from typing import ( - Any, - Callable, - Literal, - Mapping, - NamedTuple, - Optional, - Tuple, - Union, -) +from typing import Any, Callable, Literal, Mapping, NamedTuple, Optional, Tuple, Union import jax import jax.experimental diff --git a/src/ott/tools/gaussian_mixture/fit_gmm_pair.py b/src/ott/tools/gaussian_mixture/fit_gmm_pair.py index 7ecde263c..0c3c78ba3 100644 --- a/src/ott/tools/gaussian_mixture/fit_gmm_pair.py +++ b/src/ott/tools/gaussian_mixture/fit_gmm_pair.py @@ -84,11 +84,7 @@ import jax import jax.numpy as jnp -from ott.tools.gaussian_mixture import ( - fit_gmm, - gaussian_mixture, - gaussian_mixture_pair, -) +from ott.tools.gaussian_mixture import fit_gmm, gaussian_mixture, gaussian_mixture_pair __all__ = ["get_fit_model_em_fn"] diff --git a/src/ott/tools/gaussian_mixture/gaussian_mixture.py b/src/ott/tools/gaussian_mixture/gaussian_mixture.py index 313689939..576d937c8 100644 --- a/src/ott/tools/gaussian_mixture/gaussian_mixture.py +++ b/src/ott/tools/gaussian_mixture/gaussian_mixture.py @@ -16,12 +16,7 @@ import jax import jax.numpy as jnp -from ott.tools.gaussian_mixture import ( - gaussian, - linalg, - probabilities, - scale_tril, -) +from ott.tools.gaussian_mixture import gaussian, linalg, probabilities, scale_tril __all__ = ["GaussianMixture"] diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 723d25393..05cd38af1 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -15,7 +15,7 @@ import numpy as np -from ott.neural.data.dataloaders import ConditionalDataLoader, OTDataLoader +from ott.neural.data import dataloaders @pytest.fixture(scope="module") @@ -24,7 +24,7 @@ def data_loader_gaussian(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - return OTDataLoader(16, source_lin=source, target_lin=target) + return dataloaders.OTDataLoader(16, source_lin=source, target_lin=target) @pytest.fixture(scope="module") @@ -36,20 +36,23 @@ def data_loader_gaussian_conditional(): source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 2)) - 2.0 - dl0 = OTDataLoader( + dl0 = dataloaders.OTDataLoader( 16, source_lin=source_0, target_lin=target_0, source_conditions=np.zeros_like(source_0) * 0.0 ) - dl1 = OTDataLoader( + dl1 = dataloaders.OTDataLoader( 16, source_lin=source_1, target_lin=target_1, source_conditions=np.ones_like(source_1) * 1.0 ) - return ConditionalDataLoader({"0": dl0, "1": dl1}, np.array([0.5, 0.5])) + return dataloaders.ConditionalDataLoader({ + "0": dl0, + "1": dl1 + }, np.array([0.5, 0.5])) @pytest.fixture(scope="module") @@ -60,7 +63,7 @@ def data_loader_gaussian_with_conditions(): target = rng.normal(size=(100, 2)) + 1.0 source_conditions = rng.normal(size=(100, 1)) target_conditions = rng.normal(size=(100, 1)) - 1.0 - return OTDataLoader( + return dataloaders.OTDataLoader( 16, source_lin=source, target_lin=target, @@ -75,7 +78,7 @@ def genot_data_loader_linear(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - return OTDataLoader(16, source_lin=source, target_lin=target) + return dataloaders.OTDataLoader(16, source_lin=source, target_lin=target) @pytest.fixture(scope="module") @@ -85,7 +88,7 @@ def genot_data_loader_linear_conditional(): source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 source_conditions = rng.normal(size=(100, 4)) - return OTDataLoader( + return dataloaders.OTDataLoader( 16, source_lin=source, target_lin=target, @@ -99,7 +102,7 @@ def genot_data_loader_quad(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 1)) + 1.0 - return OTDataLoader(16, source_quad=source, target_quad=target) + return dataloaders.OTDataLoader(16, source_quad=source, target_quad=target) @pytest.fixture(scope="module") @@ -109,7 +112,7 @@ def genot_data_loader_quad_conditional(): source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 1)) + 1.0 source_conditions = rng.normal(size=(100, 7)) - return OTDataLoader( + return dataloaders.OTDataLoader( 16, source_quad=source, target_quad=target, @@ -125,7 +128,7 @@ def genot_data_loader_fused(): target_q = rng.normal(size=(100, 1)) + 1.0 source_lin = rng.normal(size=(100, 2)) target_lin = rng.normal(size=(100, 2)) + 1.0 - return OTDataLoader( + return dataloaders.OTDataLoader( 16, source_lin=source_lin, source_quad=source_q, @@ -143,7 +146,7 @@ def genot_data_loader_fused_conditional(): source_lin = rng.normal(size=(100, 2)) target_lin = rng.normal(size=(100, 2)) + 1.0 source_conditions = rng.normal(size=(100, 7)) - return OTDataLoader( + return dataloaders.OTDataLoader( 16, source_lin=source_lin, source_quad=source_q, diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index b3de698ad..5a5b9a847 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -23,7 +23,7 @@ from ott.geometry import costs from ott.neural.flows.genot import GENOT from ott.neural.flows.models import VelocityField -from ott.neural.flows.samplers import sample_uniformly +from ott.neural.flows.samplers import uniform_sampler from ott.neural.models.models import RescalingMLP from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein @@ -56,7 +56,7 @@ def test_genot_linear_unconditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = sample_uniformly + time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -106,7 +106,7 @@ def test_genot_quad_unconditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) - time_sampler = functools.partial(sample_uniformly, offset=1e-2) + time_sampler = functools.partial(uniform_sampler, offset=1e-2) optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -198,7 +198,7 @@ def test_genot_linear_conditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = sample_uniformly + time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -245,7 +245,7 @@ def test_genot_quad_conditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) - time_sampler = sample_uniformly + time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -292,7 +292,7 @@ def test_genot_fused_conditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) - time_sampler = sample_uniformly + time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -348,7 +348,7 @@ def test_genot_linear_learn_rescaling( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = sample_uniformly + time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) tau_a = 0.9 tau_b = 0.2 diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 1230a638b..7f6a1a8dc 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -20,36 +20,31 @@ import optax -from ott.neural.flows.flows import ( - BaseFlow, - BrownianNoiseFlow, - ConstantNoiseFlow, -) -from ott.neural.flows.models import VelocityField -from ott.neural.flows.otfm import OTFlowMatching -from ott.neural.flows.samplers import sample_uniformly -from ott.neural.models.models import RescalingMLP +from ott.neural.flows import flows, models, otfm, samplers from ott.solvers.linear import sinkhorn class TestOTFlowMatching: @pytest.mark.parametrize( - "flow", - [ConstantNoiseFlow(0.0), - ConstantNoiseFlow(1.0), - BrownianNoiseFlow(0.2)] + "flow", [ + flows.ConstantNoiseFlow(0.0), + flows.ConstantNoiseFlow(1.0), + flows.BrownianNoiseFlow(0.2) + ] ) - def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): - neural_vf = VelocityField( + def test_flow_matching( + self, data_loader_gaussian, flow: Type[flows.BaseFlow] + ): + neural_vf = models.VelocityField( output_dim=2, condition_dim=0, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = sample_uniformly + time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) - fm = OTFlowMatching( + fm = otfm.OTFlowMatching( neural_vf, input_dim=2, cond_dim=0, @@ -78,23 +73,24 @@ def test_flow_matching(self, data_loader_gaussian, flow: Type[BaseFlow]): assert jnp.sum(jnp.isnan(result_backward)) == 0 @pytest.mark.parametrize( - "flow", - [ConstantNoiseFlow(0.0), - ConstantNoiseFlow(1.0), - BrownianNoiseFlow(0.2)] + "flow", [ + flows.ConstantNoiseFlow(0.0), + flows.ConstantNoiseFlow(1.0), + flows.BrownianNoiseFlow(0.2) + ] ) def test_flow_matching_with_conditions( - self, data_loader_gaussian_with_conditions, flow: Type[BaseFlow] + self, data_loader_gaussian_with_conditions, flow: Type[flows.BaseFlow] ): - neural_vf = VelocityField( + neural_vf = models.VelocityField( output_dim=2, condition_dim=1, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = functools.partial(sample_uniformly, offset=1e-5) + time_sampler = functools.partial(samplers.uniform_sampler, offset=1e-5) optimizer = optax.adam(learning_rate=1e-3) - fm = OTFlowMatching( + fm = otfm.OTFlowMatching( neural_vf, input_dim=2, cond_dim=1, @@ -126,23 +122,24 @@ def test_flow_matching_with_conditions( assert jnp.sum(jnp.isnan(result_backward)) == 0 @pytest.mark.parametrize( - "flow", - [ConstantNoiseFlow(0.0), - ConstantNoiseFlow(1.0), - BrownianNoiseFlow(0.2)] + "flow", [ + flows.ConstantNoiseFlow(0.0), + flows.ConstantNoiseFlow(1.0), + flows.BrownianNoiseFlow(0.2) + ] ) def test_flow_matching_conditional( - self, data_loader_gaussian_conditional, flow: Type[BaseFlow] + self, data_loader_gaussian_conditional, flow: Type[flows.BaseFlow] ): - neural_vf = VelocityField( + neural_vf = models.VelocityField( output_dim=2, condition_dim=0, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = sample_uniformly + time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) - fm = OTFlowMatching( + fm = otfm.OTFlowMatching( neural_vf, input_dim=2, cond_dim=0, @@ -182,21 +179,21 @@ def test_flow_matching_learn_rescaling( batch = next(data_loader) source_dim = batch["source_lin"].shape[1] condition_dim = batch["source_conditions"].shape[1] if conditional else 0 - neural_vf = VelocityField( + neural_vf = models.VelocityField( output_dim=2, condition_dim=0, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - time_sampler = sample_uniformly - flow = ConstantNoiseFlow(1.0) + time_sampler = samplers.uniform_sampler + flow = flows.ConstantNoiseFlow(1.0) optimizer = optax.adam(learning_rate=1e-3) tau_a = 0.9 tau_b = 0.2 - rescaling_a = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - rescaling_b = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - fm = OTFlowMatching( + rescaling_a = models.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + rescaling_b = models.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + fm = otfm.OTFlowMatching( neural_vf, input_dim=source_dim, cond_dim=condition_dim, From a94b585b53e83c23e9498abd8bae56b11e584c3e Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 9 Feb 2024 11:17:37 +0100 Subject: [PATCH 078/186] [ci skip] intermediate save --- src/ott/neural/models/base_solver.py | 2 +- tests/neural/genot_test.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 071dc6e07..71825aa27 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -121,7 +121,7 @@ def _sample_conditional_indices_from_tmap( tmat_adapted ) - indices_source = jnp.repeat(indices, k_samples_per_x) + indices_source = jnp.repeat(indices_per_row, k_samples_per_x) indices_target = jnp.reshape( indices_per_row % tmat.shape[1], (batch_size * k_samples_per_x,) ) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 5a5b9a847..4960c1bec 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -50,6 +50,10 @@ def test_genot_linear_unconditional( target_dim = target_lin.shape[1] condition_dim = 0 + print("source dim is ", source_dim) + print("target dim is ", target_dim) + print("condition dim is ", condition_dim) + neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, @@ -294,6 +298,9 @@ def test_genot_fused_conditional( ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) + print("source dim is ", source_dim) + print("target dim is ", target_dim) + print("condition dim is ", condition_dim) genot = GENOT( neural_vf, input_dim=source_dim, From 78b5e10f875816de268823c9cac48eb13b0f49a7 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 9 Feb 2024 11:31:35 +0100 Subject: [PATCH 079/186] [ci skip] neural base solver update --- src/ott/neural/models/base_solver.py | 29 ++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 71825aa27..b078393d3 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -42,7 +42,7 @@ def __init__(self, iterations: int, valid_freq: int, **_: Any): self.valid_freq = valid_freq @abc.abstractmethod - def setup(self, *args: Any, **kwargs: Any) -> None: + def setup(self, *args: Any, **kwargs: Any): """Setup the model.""" @abc.abstractmethod @@ -90,7 +90,7 @@ def _resample_data( def _sample_conditional_indices_from_tmap( self, - rng: jax.Array, + key: jax.random.PRNGKeyArray, tmat: jnp.ndarray, k_samples_per_x: Union[int, jnp.ndarray], source_arrays: Tuple[jnp.ndarray, ...], @@ -101,19 +101,22 @@ def _sample_conditional_indices_from_tmap( batch_size = tmat.shape[0] left_marginals = tmat.sum(axis=1) if not source_is_balanced: - rng, key2 = jax.random.split(rng, 2) + key, key2 = jax.random.split(key, 2) indices = jax.random.choice( key=key2, a=jnp.arange(len(left_marginals)), p=left_marginals, shape=(len(left_marginals),) ) - tmat_adapted = tmat[indices] else: - tmat_adapted = tmat + indices = jnp.arange(batch_size) + tmat_adapted = tmat[indices] indices_per_row = jax.vmap( - lambda row: jax.random.choice( - key=rng, a=jnp.arange(batch_size), p=row, shape=(k_samples_per_x,) + lambda tmat_adapted: jax.random.choice( + key=key, + a=jnp.arange(batch_size), + p=tmat_adapted, + shape=(k_samples_per_x,) ), in_axes=0, out_axes=0, @@ -121,7 +124,7 @@ def _sample_conditional_indices_from_tmap( tmat_adapted ) - indices_source = jnp.repeat(indices_per_row, k_samples_per_x) + indices_source = jnp.repeat(indices, k_samples_per_x) indices_target = jnp.reshape( indices_per_row % tmat.shape[1], (batch_size * k_samples_per_x,) ) @@ -130,8 +133,8 @@ def _sample_conditional_indices_from_tmap( -1)) if b is not None else None for b in source_arrays ), tuple( - jnp.reshape(b[indices_target], (k_samples_per_x, batch_size, - -1)) if b is not None else None + jnp.reshape(b[indices_target, :], (k_samples_per_x, batch_size, + -1)) if b is not None else None for b in target_arrays ) @@ -256,6 +259,7 @@ def __init__( jnp.ndarray]] = None, rescaling_b: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], jnp.ndarray]] = None, + seed: Optional[int] = None, opt_eta: Optional[optax.GradientTransformation] = None, opt_xi: Optional[optax.GradientTransformation] = None, resample_epsilon: float = 1e-2, @@ -271,6 +275,7 @@ def __init__( self.tau_b = tau_b self.rescaling_a = rescaling_a self.rescaling_b = rescaling_b + self.seed = seed self.opt_eta = opt_eta self.opt_xi = opt_xi self.resample_epsilon = resample_epsilon @@ -281,7 +286,7 @@ def __init__( tau_b=tau_b, resample_epsilon=resample_epsilon, scale_cost=scale_cost, - sinkorn_kwargs=kwargs + **kwargs ) self._setup(source_dim=source_dim, target_dim=target_dim, cond_dim=cond_dim) @@ -292,7 +297,7 @@ def _get_compute_unbalanced_marginals( resample_epsilon: float, scale_cost: Union[bool, int, float, Literal["mean", "max_cost", "median"]] = "mean", - **kwargs: Dict[str, Any], + **kwargs: Mapping[str, Any], ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Compute the unbalanced source and target marginals for a batch.""" From 592564fd3b47f10fb677fc8b9f2e4262b72b45cd Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 9 Feb 2024 14:17:48 +0100 Subject: [PATCH 080/186] make resamlpemixin a class --- src/ott/neural/flows/genot.py | 73 ++-- src/ott/neural/flows/otfm.py | 67 +--- src/ott/neural/models/__init__.py | 2 +- src/ott/neural/models/base_solver.py | 370 ++++++++++--------- src/ott/neural/models/{models.py => nets.py} | 0 tests/neural/genot_test.py | 69 +++- tests/neural/losses_test.py | 4 +- tests/neural/map_estimator_test.py | 4 +- tests/neural/otfm_test.py | 64 +++- 9 files changed, 336 insertions(+), 317 deletions(-) rename src/ott/neural/models/{models.py => nets.py} (100%) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index b11b77b20..736d9e268 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -21,7 +21,6 @@ import diffrax import optax from flax.training import train_state -from orbax import checkpoint from ott import utils from ott.geometry import costs @@ -35,8 +34,7 @@ class GENOT( - base_solver.UnbalancednessMixin, base_solver.ResampleMixin, - base_solver.BaseNeuralSolver + base_solver.ResampleMixin, ): """The GENOT training class as introduced in :cite:`klein_uscidda:23`. @@ -68,7 +66,7 @@ class GENOT( optimizer: Optimizer for `velocity_field`. flow: Flow between latent distribution and target distribution. time_sampler: Sampler for the time. - checkpoint_manager: Checkpoint manager. + unbalancedness_handler: Handler for unbalancedness. k_samples_per_x: Number of samples drawn from the conditional distribution of an input sample, see algorithm TODO. solver_latent_to_data: Linear OT solver to match the latent distribution @@ -77,15 +75,6 @@ class GENOT( #TODO: adapt fused_penalty: Fused penalty of the linear/fused term in the Fused Gromov-Wasserstein problem. - tau_a: If :math:`<1`, defines how much unbalanced the problem is - on the first marginal. - tau_b: If :math:`< 1`, defines how much unbalanced the problem is - on the second marginal. - rescaling_a: Neural network to learn the left rescaling function. If - :obj:`None`, the left rescaling factor is not learnt. - rescaling_b: Neural network to learn the right rescaling function. If - :obj:`None`, the right rescaling factor is not learnt. - unbalanced_kwargs: Keyword arguments for the unbalancedness solver. callback_fn: Callback function. rng: Random number generator. """ @@ -109,43 +98,23 @@ def __init__( Dict[str, Union[bool, int, float, Literal["mean", "max_norm", "max_bound", "max_cost", "median"]]]], + unbalancedness_handler: base_solver.UnbalancednessHandler, optimizer: optax.GradientTransformation, flow: Type[flows.BaseFlow] = flows.ConstantNoiseFlow(0.0), # noqa: B008 time_sampler: Callable[[jax.Array, int], jnp.ndarray] = samplers.uniform_sampler, - checkpoint_manager: Type[checkpoint.CheckpointManager] = None, k_samples_per_x: int = 1, solver_latent_to_data: Optional[Type[was_solver.WassersteinSolver] ] = None, kwargs_solver_latent_to_data: Dict[str, Any] = types.MappingProxyType({}), fused_penalty: float = 0.0, - tau_a: float = 1.0, - tau_b: float = 1.0, - rescaling_a: Callable[[jnp.ndarray], float] = None, - rescaling_b: Callable[[jnp.ndarray], float] = None, - unbalanced_kwargs: Dict[str, Any] = types.MappingProxyType({}), callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, rng: Optional[jax.Array] = None, ): rng = utils.default_prng_key(rng) - rng, rng_unbalanced = jax.random.split(rng) - base_solver.BaseNeuralSolver.__init__( - self, iterations=iterations, valid_freq=valid_freq - ) base_solver.ResampleMixin.__init__(self) - base_solver.UnbalancednessMixin.__init__( - self, - rng=rng_unbalanced, - source_dim=input_dim, - target_dim=input_dim, - cond_dim=cond_dim, - tau_a=tau_a, - tau_b=tau_b, - rescaling_a=rescaling_a, - rescaling_b=rescaling_b, - unbalanced_kwargs=unbalanced_kwargs, - ) + if isinstance( ot_solver, gromov_wasserstein.GromovWasserstein ) and epsilon is not None: @@ -156,12 +125,13 @@ def __init__( ) self.rng = utils.default_prng_key(rng) + self.iterations = iterations + self.valid_freq = valid_freq self.velocity_field = velocity_field self.state_velocity_field: Optional[train_state.TrainState] = None self.flow = flow self.time_sampler = time_sampler self.optimizer = optimizer - self.checkpoint_manager = checkpoint_manager self.latent_noise_fn = jax.tree_util.Partial( jax.random.multivariate_normal, mean=jnp.zeros((output_dim,)), @@ -172,6 +142,9 @@ def __init__( self.cond_dim = cond_dim self.k_samples_per_x = k_samples_per_x + # unbalancedness + self.unbalancedness_handler = unbalancedness_handler + # OT data-data matching parameters self.ot_solver = ot_solver self.epsilon = epsilon @@ -210,8 +183,8 @@ def setup(self) -> None: epsilon=self.epsilon, cost_fn=self.cost_fn, scale_cost=self.scale_cost, - tau_a=self.tau_a, - tau_b=self.tau_b, + tau_a=self.unbalancedness_handler.tau_a, + tau_b=self.unbalancedness_handler.tau_b, filter_input=True ) else: @@ -219,8 +192,8 @@ def setup(self) -> None: ot_solver=self.ot_solver, cost_fn=self.cost_fn, scale_cost=self.scale_cost, - tau_a=self.tau_a, - tau_b=self.tau_b, + tau_a=self.unbalancedness_handler.tau_a, + tau_b=self.unbalancedness_handler.tau_b, fused_penalty=self.fused_penalty ) @@ -278,7 +251,7 @@ def __call__(self, train_loader, valid_loader): tmat, self.k_samples_per_x, (batch["source"], batch["source_conditions"]), (batch["target"],), - source_is_balanced=(self.tau_a == 1.0) + source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) ) jax.random.split(rng_noise, batch_size * self.k_samples_per_x) @@ -310,24 +283,17 @@ def __call__(self, train_loader, valid_loader): ( self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b - ) = self.unbalancedness_step_fn( + ) = self.unbalancedness_handler.step_fn( source=batch["source"], target=batch["target"], condition=batch["source_conditions"], a=tmat.sum(axis=1), b=tmat.sum(axis=0), - state_eta=self.state_eta, - state_xi=self.state_xi, + state_eta=self.unbalancedness_handler.state_eta, + state_xi=self.unbalancedness_handler.state_xi, ) if iteration % self.valid_freq == 0: self._valid_step(valid_loader, iteration) - if self.checkpoint_manager is not None: - states_to_save = {"state_velocity_field": self.state_velocity_field} - if self.state_eta is not None: - states_to_save["state_eta"] = self.state_eta - if self.state_xi is not None: - states_to_save["state_xi"] = self.state_xi - self.checkpoint_manager.save(iteration, states_to_save) def _get_step_fn(self) -> Callable: @@ -441,7 +407,10 @@ def _valid_step(self, valid_loader, iter): @property def learn_rescaling(self) -> bool: """Whether to learn at least one rescaling factor.""" - return self.rescaling_a is not None or self.rescaling_b is not None + return ( + self.unbalancedness_handler.rescaling_a is not None or + self.unbalancedness_handler.rescaling_b is not None + ) def save(self, path: str): """Save the model. diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index 2ec4707c6..ef55e1dc5 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -34,8 +34,7 @@ class OTFlowMatching( - base_solver.UnbalancednessMixin, base_solver.ResampleMixin, - base_solver.BaseNeuralSolver + base_solver.ResampleMixin, ): """(Optimal transport) flow matching class. @@ -61,17 +60,6 @@ class OTFlowMatching( cost_fn: Cost function for the OT problem solved by the `ot_solver`. scale_cost: How to scale the cost matrix for the OT problem solved by the `ot_solver`. - tau_a: If :math:`<1`, defines how much unbalanced the problem is - on the first marginal. - tau_b: If :math:`< 1`, defines how much unbalanced the problem is - on the second marginal. - rescaling_a: Neural network to learn the left rescaling function as - suggested in :cite:`eyring:23`. If :obj:`None`, the left rescaling factor - is not learnt. - rescaling_b: Neural network to learn the right rescaling function as - suggested in :cite:`eyring:23`. If :obj:`None`, the right rescaling factor - is not learnt. - unbalanced_kwargs: Keyword arguments for the unbalancedness solver. callback_fn: Callback function. num_eval_samples: Number of samples to evaluate on during evaluation. rng: Random number generator. @@ -89,17 +77,13 @@ def __init__( flow: Type[flows.BaseFlow], time_sampler: Callable[[jax.Array, int], jnp.ndarray], optimizer: optax.GradientTransformation, + unbalancedness_handler: base_solver.UnbalancednessHandler, checkpoint_manager: Type[checkpoint.CheckpointManager] = None, epsilon: float = 1e-2, cost_fn: Optional[Type[costs.CostFn]] = None, scale_cost: Union[bool, int, float, Literal["mean", "max_norm", "max_bound", "max_cost", "median"]] = "mean", - tau_a: float = 1.0, - tau_b: float = 1.0, - rescaling_a: Callable[[jnp.ndarray], float] = None, - rescaling_b: Callable[[jnp.ndarray], float] = None, - unbalanced_kwargs: Dict[str, Any] = types.MappingProxyType({}), callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], Any]] = None, logging_freq: int = 100, @@ -108,24 +92,10 @@ def __init__( rng: Optional[jax.Array] = None, ): rng = utils.default_prng_key(rng) - rng, rng_unbalanced = jax.random.split(rng) - base_solver.BaseNeuralSolver.__init__( - self, iterations=iterations, valid_freq=valid_freq - ) base_solver.ResampleMixin.__init__(self) - base_solver.UnbalancednessMixin.__init__( - self, - rng=rng_unbalanced, - source_dim=input_dim, - target_dim=input_dim, - cond_dim=cond_dim, - tau_a=tau_a, - tau_b=tau_b, - rescaling_a=rescaling_a, - rescaling_b=rescaling_b, - unbalanced_kwargs=unbalanced_kwargs, - ) - + self.unbalancedness_handler = unbalancedness_handler + self.iterations = iterations + self.valid_freq = valid_freq self.velocity_field = velocity_field self.input_dim = input_dim self.ot_solver = ot_solver @@ -159,8 +129,8 @@ def setup(self) -> None: epsilon=self.epsilon, cost_fn=self.cost_fn, scale_cost=self.scale_cost, - tau_a=self.tau_a, - tau_b=self.tau_b, + tau_a=self.unbalancedness_handler.tau_a, + tau_b=self.unbalancedness_handler.tau_b, ) else: self.match_fn = None @@ -235,26 +205,20 @@ def __call__(self, train_loader, valid_loader): curr_loss = 0.0 if self.learn_rescaling: ( - self.state_eta, self.state_xi, eta_predictions, xi_predictions, - loss_a, loss_b - ) = self.unbalancedness_step_fn( + self.unbalancedness_handler.state_eta, + self.unbalancedness_handler.state_xi, eta_predictions, + xi_predictions, loss_a, loss_b + ) = self.unbalancedness_handler.step_fn( source=batch["source_lin"], target=batch["target_lin"], condition=batch["source_conditions"], a=tmat.sum(axis=1), b=tmat.sum(axis=0), - state_eta=self.state_eta, - state_xi=self.state_xi, + state_eta=self.unbalancedness_handler.state_eta, + state_xi=self.unbalancedness_handler.state_xi, ) if iter % self.valid_freq == 0: self._valid_step(valid_loader, iter) - if self.checkpoint_manager is not None: - states_to_save = {"state_velocity_field": self.state_velocity_field} - if self.state_eta is not None: - states_to_save["state_eta"] = self.state_eta - if self.state_xi is not None: - states_to_save["state_xi"] = self.state_xi - self.checkpoint_manager.save(iter, states_to_save) def transport( self, @@ -319,7 +283,10 @@ def _valid_step(self, valid_loader, iter): @property def learn_rescaling(self) -> bool: """Whether to learn at least one rescaling factor.""" - return self.rescaling_a is not None or self.rescaling_b is not None + return ( + self.unbalancedness_handler.rescaling_a is not None or + self.unbalancedness_handler.rescaling_b is not None + ) def save(self, path: str): """Save the model. diff --git a/src/ott/neural/models/__init__.py b/src/ott/neural/models/__init__.py index 5c2ac3b2b..ba39ae8b4 100644 --- a/src/ott/neural/models/__init__.py +++ b/src/ott/neural/models/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import base_solver, layers, models +from . import base_solver, layers, nets diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index b078393d3..1bc541ec7 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -11,8 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import abc -import pathlib from typing import Any, Callable, Dict, Literal, Mapping, Optional, Tuple, Union import jax @@ -24,47 +22,114 @@ from ott.geometry import costs, pointcloud from ott.problems.linear import linear_problem from ott.problems.quadratic import quadratic_problem +from ott.solvers import was_solver from ott.solvers.linear import sinkhorn +from ott.solvers.quadratic import gromov_wasserstein -__all__ = ["BaseNeuralSolver", "ResampleMixin", "UnbalancednessMixin"] +__all__ = ["ResampleMixin", "UnbalancednessHandler"] -class BaseNeuralSolver(abc.ABC): - """Base class for neural solvers. +def _get_sinkhorn_match_fn( + ot_solver: Any, + epsilon: float = 1e-2, + cost_fn: Optional[costs.CostFn] = None, + scale_cost: Union[bool, int, float, Literal["mean", "max_norm", "max_bound", + "max_cost", "median"]] = "mean", + tau_a: float = 1.0, + tau_b: float = 1.0, + *, + filter_input: bool = False, +) -> Callable: - Args: - iterations: Number of iterations to train for. - valid_freq: Frequency at which to run validation. - """ - - def __init__(self, iterations: int, valid_freq: int, **_: Any): - self.iterations = iterations - self.valid_freq = valid_freq - - @abc.abstractmethod - def setup(self, *args: Any, **kwargs: Any): - """Setup the model.""" - - @abc.abstractmethod - def __call__(self, *args: Any, **kwargs: Any): - """Train the model.""" + @jax.jit + def match_pairs(x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: + geom = pointcloud.PointCloud( + x, y, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn + ) + return ot_solver( + linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) + ) - @abc.abstractmethod - def transport(self, *args: Any, forward: bool, **kwargs: Any) -> Any: - """Transport.""" + @jax.jit + def match_pairs_filtered( + x_lin: jnp.ndarray, x_quad: jnp.ndarray, y_lin: jnp.ndarray, + y_quad: jnp.ndarray + ) -> jnp.ndarray: + geom = pointcloud.PointCloud( + x_lin, y_lin, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn + ) + return ot_solver( + linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) + ) - @abc.abstractmethod - def save(self, path: pathlib.Path): - """Save the model.""" + return match_pairs_filtered if filter_input else match_pairs + + +def _get_gromov_match_fn( + ot_solver: Any, + cost_fn: Union[Any, Mapping[str, Any]], + scale_cost: Union[Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", "max_cost", + "median"]], + Dict[str, Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", + "max_cost", "median"]]]], + tau_a: float, + tau_b: float, + fused_penalty: float, +) -> Callable: + if isinstance(cost_fn, Mapping): + assert "cost_fn_xx" in cost_fn + assert "cost_fn_yy" in cost_fn + cost_fn_xx = cost_fn["cost_fn_xx"] + cost_fn_yy = cost_fn["cost_fn_yy"] + if fused_penalty > 0: + assert "cost_fn_xy" in cost_fn_xx + cost_fn_xy = cost_fn["cost_fn_xy"] + else: + cost_fn_xx = cost_fn_yy = cost_fn_xy = cost_fn + + if isinstance(scale_cost, Mapping): + assert "scale_cost_xx" in scale_cost + assert "scale_cost_yy" in scale_cost + scale_cost_xx = scale_cost["scale_cost_xx"] + scale_cost_yy = scale_cost["scale_cost_yy"] + if fused_penalty > 0: + assert "scale_cost_xy" in scale_cost + scale_cost_xy = cost_fn["scale_cost_xy"] + else: + scale_cost_xx = scale_cost_yy = scale_cost_xy = scale_cost - @abc.abstractmethod - def load(self, path: pathlib.Path): - """Load the model.""" + @jax.jit + def match_pairs( + x_lin: Optional[jnp.ndarray], + x_quad: Tuple[jnp.ndarray, jnp.ndarray], + y_lin: Optional[jnp.ndarray], + y_quad: Tuple[jnp.ndarray, jnp.ndarray], + ) -> jnp.ndarray: + geom_xx = pointcloud.PointCloud( + x=x_quad, y=x_quad, cost_fn=cost_fn_xx, scale_cost=scale_cost_xx + ) + geom_yy = pointcloud.PointCloud( + x=y_quad, y=y_quad, cost_fn=cost_fn_yy, scale_cost=scale_cost_yy + ) + if fused_penalty > 0: + geom_xy = pointcloud.PointCloud( + x=x_lin, y=y_lin, cost_fn=cost_fn_xy, scale_cost=scale_cost_xy + ) + else: + geom_xy = None + prob = quadratic_problem.QuadraticProblem( + geom_xx, + geom_yy, + geom_xy, + fused_penalty=fused_penalty, + tau_a=tau_a, + tau_b=tau_b + ) + return ot_solver(prob) - @property - @abc.abstractmethod - def training_logs(self) -> Dict[str, Any]: - """Return the training logs.""" + return match_pairs class ResampleMixin: @@ -83,14 +148,14 @@ def _resample_data( indices_source = indices // tmat.shape[1] indices_target = indices % tmat.shape[1] return tuple( - b[indices_source, :] if b is not None else None for b in source_arrays + b[indices_source] if b is not None else None for b in source_arrays ), tuple( - b[indices_target, :] if b is not None else None for b in target_arrays + b[indices_target] if b is not None else None for b in target_arrays ) def _sample_conditional_indices_from_tmap( self, - key: jax.random.PRNGKeyArray, + rng: jax.Array, tmat: jnp.ndarray, k_samples_per_x: Union[int, jnp.ndarray], source_arrays: Tuple[jnp.ndarray, ...], @@ -101,9 +166,9 @@ def _sample_conditional_indices_from_tmap( batch_size = tmat.shape[0] left_marginals = tmat.sum(axis=1) if not source_is_balanced: - key, key2 = jax.random.split(key, 2) + rng, rng_2 = jax.random.split(rng, 2) indices = jax.random.choice( - key=key2, + key=rng_2, a=jnp.arange(len(left_marginals)), p=left_marginals, shape=(len(left_marginals),) @@ -112,11 +177,8 @@ def _sample_conditional_indices_from_tmap( indices = jnp.arange(batch_size) tmat_adapted = tmat[indices] indices_per_row = jax.vmap( - lambda tmat_adapted: jax.random.choice( - key=key, - a=jnp.arange(batch_size), - p=tmat_adapted, - shape=(k_samples_per_x,) + lambda row: jax.random.choice( + key=rng, a=jnp.arange(batch_size), p=row, shape=(k_samples_per_x,) ), in_axes=0, out_axes=0, @@ -133,119 +195,60 @@ def _sample_conditional_indices_from_tmap( -1)) if b is not None else None for b in source_arrays ), tuple( - jnp.reshape(b[indices_target, :], (k_samples_per_x, batch_size, - -1)) if b is not None else None + jnp.reshape(b[indices_target], (k_samples_per_x, batch_size, + -1)) if b is not None else None for b in target_arrays ) - def _get_sinkhorn_match_fn( - self, - ot_solver: Any, - epsilon: float = 1e-2, - cost_fn: Optional[costs.CostFn] = None, - scale_cost: Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", "max_cost", - "median"]] = "mean", - tau_a: float = 1.0, - tau_b: float = 1.0, - *, - filter_input: bool = False, - ) -> Callable: + def _get_sinkhorn_match_fn(self, *args, **kwargs) -> jnp.ndarray: + fn = _get_sinkhorn_match_fn(*args, **kwargs) @jax.jit - def match_pairs(x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: - geom = pointcloud.PointCloud( - x, y, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn - ) - return ot_solver( - linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) - ).matrix + def match_pairs(*args, **kwargs): + return fn(*args, **kwargs).matrix - @jax.jit - def match_pairs_filtered( - x_lin: jnp.ndarray, x_quad: jnp.ndarray, y_lin: jnp.ndarray, - y_quad: jnp.ndarray - ) -> jnp.ndarray: - geom = pointcloud.PointCloud( - x_lin, y_lin, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn - ) - return ot_solver( - linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) - ).matrix + return match_pairs - return match_pairs_filtered if filter_input else match_pairs - - def _get_gromov_match_fn( - self, - ot_solver: Any, - cost_fn: Union[Any, Mapping[str, Any]], - scale_cost: Union[Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", - "max_cost", "median"]], - Dict[str, Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", - "max_cost", "median"]]]], - tau_a: float, - tau_b: float, - fused_penalty: float, - ) -> Callable: - if isinstance(cost_fn, Mapping): - assert "cost_fn_xx" in cost_fn - assert "cost_fn_yy" in cost_fn - cost_fn_xx = cost_fn["cost_fn_xx"] - cost_fn_yy = cost_fn["cost_fn_yy"] - if fused_penalty > 0: - assert "cost_fn_xy" in cost_fn_xx - cost_fn_xy = cost_fn["cost_fn_xy"] - else: - cost_fn_xx = cost_fn_yy = cost_fn_xy = cost_fn - - if isinstance(scale_cost, Mapping): - assert "scale_cost_xx" in scale_cost - assert "scale_cost_yy" in scale_cost - scale_cost_xx = scale_cost["scale_cost_xx"] - scale_cost_yy = scale_cost["scale_cost_yy"] - if fused_penalty > 0: - assert "scale_cost_xy" in scale_cost - scale_cost_xy = cost_fn["scale_cost_xy"] - else: - scale_cost_xx = scale_cost_yy = scale_cost_xy = scale_cost + def _get_gromov_match_fn(self, *args, **kwargs) -> jnp.ndarray: + fn = _get_gromov_match_fn(*args, **kwargs) @jax.jit - def match_pairs( - x_lin: Optional[jnp.ndarray], - x_quad: Tuple[jnp.ndarray, jnp.ndarray], - y_lin: Optional[jnp.ndarray], - y_quad: Tuple[jnp.ndarray, jnp.ndarray], - ) -> jnp.ndarray: - geom_xx = pointcloud.PointCloud( - x=x_quad, y=x_quad, cost_fn=cost_fn_xx, scale_cost=scale_cost_xx - ) - geom_yy = pointcloud.PointCloud( - x=y_quad, y=y_quad, cost_fn=cost_fn_yy, scale_cost=scale_cost_yy - ) - if fused_penalty > 0: - geom_xy = pointcloud.PointCloud( - x=x_lin, y=y_lin, cost_fn=cost_fn_xy, scale_cost=scale_cost_xy - ) - else: - geom_xy = None - prob = quadratic_problem.QuadraticProblem( - geom_xx, - geom_yy, - geom_xy, - fused_penalty=fused_penalty, - tau_a=tau_a, - tau_b=tau_b - ) - out = ot_solver(prob) - return out.matrix + def match_pairs(*args, **kwargs): + return fn(*args, **kwargs).matrix return match_pairs -class UnbalancednessMixin: - """Mixin class to incorporate unbalancedness into neural OT models.""" +class UnbalancednessHandler: + """Class to incorporate unbalancedness into neural OT models. + + This class implements the concepts introduced in :cite:`eyring:23` + in the Monge Map scenario and :cite:`klein:23` for the entropic OT case + for linear and quadratic cases. + + Args: + rng: Random number generator. + source_dim: Dimension of the source domain. + target_dim: Dimension of the target domain. + cond_dim: Dimension of the conditioning variable. + If :obj:`None`, no conditioning is used. + tau_a: Unbalancedness parameter for the source distribution. + tau_b: Unbalancedness parameter for the target distribution. + rescaling_a: Rescaling function for the source distribution. + If :obj:`None`, the left rescaling factor is not learnt. + rescaling_b: Rescaling function for the target distribution. + If :obj:`None`, the right rescaling factor is not learnt. + opt_eta: Optimizer for the left rescaling function. + opt_xi: Optimzier for the right rescaling function. + resample_epsilon: Epsilon for resampling. + scale_cost: Scaling of the cost matrix for estimating the rescaling factors. + ot_solver: Solver to compute unbalanced marginals. If `ot_solver` is `None`, + the method + :meth:`ott.neural.models.base_solver.UnbalancednessHandler.compute_unbalanced_marginals` + is not available, and hence the unbalanced marginals must be computed by the neural solver. + kwargs: Additional keyword arguments. + + """ def __init__( self, @@ -259,12 +262,12 @@ def __init__( jnp.ndarray]] = None, rescaling_b: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], jnp.ndarray]] = None, - seed: Optional[int] = None, opt_eta: Optional[optax.GradientTransformation] = None, opt_xi: Optional[optax.GradientTransformation] = None, resample_epsilon: float = 1e-2, scale_cost: Union[bool, int, float, Literal["mean", "max_cost", "median"]] = "mean", + ot_solver: Optional[was_solver.WassersteinSolver] = None, **kwargs: Mapping[str, Any], ): self.rng_unbalanced = rng @@ -275,48 +278,51 @@ def __init__( self.tau_b = tau_b self.rescaling_a = rescaling_a self.rescaling_b = rescaling_b - self.seed = seed self.opt_eta = opt_eta self.opt_xi = opt_xi self.resample_epsilon = resample_epsilon self.scale_cost = scale_cost + self.ot_solver = ot_solver + + if isinstance(ot_solver, sinkhorn.Sinkhorn): + self.compute_unbalanced_marginals = ( + self._get_compute_unbalanced_marginals_lin( + tau_a=tau_a, + tau_b=tau_b, + resample_epsilon=resample_epsilon, + scale_cost=scale_cost, + **kwargs + ) + ) + elif isinstance(ot_solver, gromov_wasserstein.GromovWasserstein): + self.compute_unbalanced_marginals = self._get_compute_unbalanced_marginals_quad + self.setup(source_dim=source_dim, target_dim=target_dim, cond_dim=cond_dim) - self._compute_unbalanced_marginals = self._get_compute_unbalanced_marginals( - tau_a=tau_a, - tau_b=tau_b, - resample_epsilon=resample_epsilon, - scale_cost=scale_cost, - **kwargs - ) - self._setup(source_dim=source_dim, target_dim=target_dim, cond_dim=cond_dim) + def _get_compute_unbalanced_marginals_lin( + self, *args: Any, **kwargs: Mapping[str, Any] + ) -> Tuple[jnp.ndarray, jnp.ndarray]: + """Compute the unbalanced source and target marginals for a batch.""" + fn = _get_sinkhorn_match_fn(*args, **kwargs) - def _get_compute_unbalanced_marginals( - self, - tau_a: float, - tau_b: float, - resample_epsilon: float, - scale_cost: Union[bool, int, float, Literal["mean", "max_cost", - "median"]] = "mean", - **kwargs: Mapping[str, Any], + @jax.jit + def compute_unbalanced_marginals_lin(*args, **kwargs): + out = fn(*args, **kwargs) + return out.marginals(axis=1), out.marginals(axis=0) + + return compute_unbalanced_marginals_lin + + def _get_compute_unbalanced_marginals_quad( + self, *args: Any, **kwargs: Mapping[str, Any] ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Compute the unbalanced source and target marginals for a batch.""" + fn = _get_sinkhorn_match_fn(*args, **kwargs) @jax.jit - def compute_unbalanced_marginals( - batch_source: jnp.ndarray, batch_target: jnp.ndarray - ) -> Tuple[jnp.ndarray, jnp.ndarray]: - geom = pointcloud.PointCloud( - batch_source, - batch_target, - epsilon=resample_epsilon, - scale_cost=scale_cost - ) - out = sinkhorn.Sinkhorn(**kwargs)( - linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) - ) - return out.marginal(axis=1), out.marginal(axis=0) + def compute_unbalanced_marginals_quad(*args, **kwargs): + out = fn(*args, **kwargs) + return out.marginals(axis=1), out.marginals(axis=0) - return compute_unbalanced_marginals + return compute_unbalanced_marginals_quad @jax.jit def _resample_unbalanced( @@ -331,11 +337,19 @@ def _resample_unbalanced( ) return tuple(b[indices] if b is not None else None for b in batch) - def _setup(self, source_dim: int, target_dim: int, cond_dim: int): + def setup(self, source_dim: int, target_dim: int, cond_dim: int): + """Setup the model. + + Args: + source_dim: Dimension of the source domain. + target_dim: Dimension of the target domain. + cond_dim: Dimension of the conditioning variable. + If :obj:`None`, no conditioning is used. + """ self.rng_unbalanced, rng_eta, rng_xi = jax.random.split( self.rng_unbalanced, 3 ) - self.unbalancedness_step_fn = self._get_rescaling_step_fn() + self.step_fn = self._get_rescaling_step_fn() if self.rescaling_a is not None: self.opt_eta = ( self.opt_eta if self.opt_eta is not None else diff --git a/src/ott/neural/models/models.py b/src/ott/neural/models/nets.py similarity index 100% rename from src/ott/neural/models/models.py rename to src/ott/neural/models/nets.py diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 4960c1bec..191b04d2a 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -17,6 +17,7 @@ import pytest import jax.numpy as jnp +from jax import random import optax @@ -24,7 +25,8 @@ from ott.neural.flows.genot import GENOT from ott.neural.flows.models import VelocityField from ott.neural.flows.samplers import uniform_sampler -from ott.neural.models.models import RescalingMLP +from ott.neural.models import base_solver +from ott.neural.models.nets import RescalingMLP from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein @@ -32,7 +34,7 @@ class TestGENOT: @pytest.mark.parametrize("scale_cost", ["mean", 2.0]) - @pytest.mark.parametrize("k_samples_per_x", [1, 2]) + @pytest.mark.parametrize("k_samples_per_x", [1, 3]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) def test_genot_linear_unconditional( self, genot_data_loader_linear: Iterator, @@ -50,16 +52,15 @@ def test_genot_linear_unconditional( target_dim = target_lin.shape[1] condition_dim = 0 - print("source dim is ", source_dim) - print("target dim is ", target_dim) - print("condition dim is ", condition_dim) - neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), source_dim, target_dim, condition_dim + ) time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( @@ -75,6 +76,7 @@ def test_genot_linear_unconditional( scale_cost=scale_cost, optimizer=optimizer, time_sampler=time_sampler, + unbalancedness_handler=unbalancedness_handler, k_samples_per_x=k_samples_per_x, solver_latent_to_data=solver_latent_to_data, ) @@ -110,6 +112,11 @@ def test_genot_quad_unconditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), source_dim, target_dim, condition_dim + ) + time_sampler = functools.partial(uniform_sampler, offset=1e-2) optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( @@ -122,6 +129,7 @@ def test_genot_quad_unconditional( ot_solver=ot_solver, epsilon=None, cost_fn=costs.SqEuclidean(), + unbalancedness_handler=unbalancedness_handler, scale_cost=1.0, optimizer=optimizer, time_sampler=time_sampler, @@ -156,6 +164,10 @@ def test_genot_fused_unconditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), source_dim, target_dim, condition_dim + ) + optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -168,6 +180,7 @@ def test_genot_fused_unconditional( ot_solver=ot_solver, cost_fn=costs.SqEuclidean(), scale_cost=1.0, + unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, fused_penalty=0.5, k_samples_per_x=k_samples_per_x, @@ -203,6 +216,10 @@ def test_genot_linear_conditional( ) ot_solver = sinkhorn.Sinkhorn() time_sampler = uniform_sampler + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), source_dim, target_dim, condition_dim + ) + optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -215,6 +232,7 @@ def test_genot_linear_conditional( epsilon=0.1, cost_fn=costs.SqEuclidean(), scale_cost=1.0, + unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, @@ -250,6 +268,10 @@ def test_genot_quad_conditional( ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) time_sampler = uniform_sampler + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), source_dim, target_dim, condition_dim + ) + optimizer = optax.adam(learning_rate=1e-3) genot = GENOT( neural_vf, @@ -262,6 +284,7 @@ def test_genot_quad_conditional( epsilon=None, cost_fn=costs.SqEuclidean(), scale_cost=1.0, + unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, @@ -298,9 +321,10 @@ def test_genot_fused_conditional( ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) - print("source dim is ", source_dim) - print("target dim is ", target_dim) - print("condition dim is ", condition_dim) + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), source_dim, target_dim, condition_dim + ) + genot = GENOT( neural_vf, input_dim=source_dim, @@ -312,6 +336,7 @@ def test_genot_fused_conditional( epsilon=None, cost_fn=costs.SqEuclidean(), scale_cost=1.0, + unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, @@ -357,10 +382,23 @@ def test_genot_linear_learn_rescaling( ot_solver = sinkhorn.Sinkhorn() time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) + tau_a = 0.9 tau_b = 0.2 rescaling_a = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) rescaling_b = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), + source_dim, + target_dim, + condition_dim, + tau_a=tau_a, + tau_b=tau_b, + rescaling_a=rescaling_a, + rescaling_b=rescaling_b + ) + genot = GENOT( neural_vf, input_dim=source_dim, @@ -374,18 +412,19 @@ def test_genot_linear_learn_rescaling( scale_cost=1.0, optimizer=optimizer, time_sampler=time_sampler, - tau_a=tau_a, - tau_b=tau_b, - rescaling_a=rescaling_a, - rescaling_b=rescaling_b, + unbalancedness_handler=unbalancedness_handler, ) genot(data_loader, data_loader) - result_eta = genot.evaluate_eta(source_lin, condition=source_condition) + result_eta = genot.unbalancedness_handler.evaluate_eta( + source_lin, condition=source_condition + ) assert isinstance(result_eta, jnp.ndarray) assert jnp.sum(jnp.isnan(result_eta)) == 0 - result_xi = genot.evaluate_xi(target_lin, condition=source_condition) + result_xi = genot.unbalancedness_handler.evaluate_xi( + target_lin, condition=source_condition + ) assert isinstance(result_xi, jnp.ndarray) assert jnp.sum(jnp.isnan(result_xi)) == 0 diff --git a/tests/neural/losses_test.py b/tests/neural/losses_test.py index 6379b9dfa..733a8c2b3 100644 --- a/tests/neural/losses_test.py +++ b/tests/neural/losses_test.py @@ -19,7 +19,7 @@ from ott.geometry import costs from ott.neural.gaps import monge_gap -from ott.neural.models import models +from ott.neural.models import nets @pytest.mark.fast() @@ -35,7 +35,7 @@ def test_monge_gap_non_negativity( rng1, rng2 = jax.random.split(rng, 2) reference_points = jax.random.normal(rng1, (n_samples, n_features)) - model = models.MLP(dim_hidden=[8, 8], is_potential=False) + model = nets.MLP(dim_hidden=[8, 8], is_potential=False) params = model.init(rng2, x=reference_points[0]) target = model.apply(params, reference_points) diff --git a/tests/neural/map_estimator_test.py b/tests/neural/map_estimator_test.py index 508143465..c42f31daa 100644 --- a/tests/neural/map_estimator_test.py +++ b/tests/neural/map_estimator_test.py @@ -20,7 +20,7 @@ from ott import datasets from ott.geometry import pointcloud from ott.neural.gaps import map_estimator, monge_gap -from ott.neural.models import models +from ott.neural.models import nets from ott.tools import sinkhorn_divergence @@ -51,7 +51,7 @@ def regularizer(x, y): return gap, out.n_iters # define the model - model = models.MLP(dim_hidden=[16, 8], is_potential=False) + model = nets.MLP(dim_hidden=[16, 8], is_potential=False) # generate data train_dataset, valid_dataset, dim_data = ( diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 7f6a1a8dc..e4d22a789 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -17,10 +17,12 @@ import pytest import jax.numpy as jnp +from jax import random import optax from ott.neural.flows import flows, models, otfm, samplers +from ott.neural.models import base_solver, nets from ott.solvers.linear import sinkhorn @@ -36,6 +38,8 @@ class TestOTFlowMatching: def test_flow_matching( self, data_loader_gaussian, flow: Type[flows.BaseFlow] ): + input_dim = 2 + condition_dim = 0 neural_vf = models.VelocityField( output_dim=2, condition_dim=0, @@ -44,16 +48,20 @@ def test_flow_matching( ot_solver = sinkhorn.Sinkhorn() time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), input_dim, input_dim, condition_dim + ) fm = otfm.OTFlowMatching( neural_vf, - input_dim=2, - cond_dim=0, + input_dim=input_dim, + cond_dim=condition_dim, iterations=3, valid_freq=2, ot_solver=ot_solver, flow=flow, time_sampler=time_sampler, - optimizer=optimizer + optimizer=optimizer, + unbalancedness_handler=unbalancedness_handler ) fm(data_loader_gaussian, data_loader_gaussian) @@ -82,6 +90,8 @@ def test_flow_matching( def test_flow_matching_with_conditions( self, data_loader_gaussian_with_conditions, flow: Type[flows.BaseFlow] ): + input_dim = 2 + condition_dim = 1 neural_vf = models.VelocityField( output_dim=2, condition_dim=1, @@ -90,6 +100,10 @@ def test_flow_matching_with_conditions( ot_solver = sinkhorn.Sinkhorn() time_sampler = functools.partial(samplers.uniform_sampler, offset=1e-5) optimizer = optax.adam(learning_rate=1e-3) + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), input_dim, input_dim, condition_dim + ) + fm = otfm.OTFlowMatching( neural_vf, input_dim=2, @@ -99,7 +113,8 @@ def test_flow_matching_with_conditions( ot_solver=ot_solver, flow=flow, time_sampler=time_sampler, - optimizer=optimizer + optimizer=optimizer, + unbalancedness_handler=unbalancedness_handler ) fm( data_loader_gaussian_with_conditions, @@ -131,24 +146,31 @@ def test_flow_matching_with_conditions( def test_flow_matching_conditional( self, data_loader_gaussian_conditional, flow: Type[flows.BaseFlow] ): + dim = 2 + condition_dim = 0 neural_vf = models.VelocityField( - output_dim=2, - condition_dim=0, + output_dim=dim, + condition_dim=condition_dim, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), dim, dim, condition_dim + ) + fm = otfm.OTFlowMatching( neural_vf, - input_dim=2, - cond_dim=0, + input_dim=dim, + cond_dim=condition_dim, iterations=3, valid_freq=2, ot_solver=ot_solver, flow=flow, time_sampler=time_sampler, - optimizer=optimizer + optimizer=optimizer, + unbalancedness_handler=unbalancedness_handler ) fm(data_loader_gaussian_conditional, data_loader_gaussian_conditional) @@ -191,8 +213,19 @@ def test_flow_matching_learn_rescaling( tau_a = 0.9 tau_b = 0.2 - rescaling_a = models.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - rescaling_b = models.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + rescaling_a = nets.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + rescaling_b = nets.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), + source_dim, + source_dim, + condition_dim, + tau_a=tau_a, + tau_b=tau_b, + rescaling_a=rescaling_a, + rescaling_b=rescaling_b + ) + fm = otfm.OTFlowMatching( neural_vf, input_dim=source_dim, @@ -203,20 +236,17 @@ def test_flow_matching_learn_rescaling( flow=flow, time_sampler=time_sampler, optimizer=optimizer, - tau_a=tau_a, - tau_b=tau_b, - rescaling_a=rescaling_a, - rescaling_b=rescaling_b, + unbalancedness_handler=unbalancedness_handler, ) fm(data_loader, data_loader) - result_eta = fm.evaluate_eta( + result_eta = fm.unbalancedness_handler.evaluate_eta( batch["source_lin"], condition=batch["source_conditions"] ) assert isinstance(result_eta, jnp.ndarray) assert jnp.sum(jnp.isnan(result_eta)) == 0 - result_xi = fm.evaluate_xi( + result_xi = fm.unbalancedness_handler.evaluate_xi( batch["target_lin"], condition=batch["source_conditions"] ) assert isinstance(result_xi, jnp.ndarray) From 5e05bfc69e6f9b3fa492e2877759712bb873b23d Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 9 Feb 2024 14:43:54 +0100 Subject: [PATCH 081/186] incorporate more changes --- src/ott/neural/flows/layers.py | 35 ++++++------ src/ott/neural/flows/samplers.py | 15 ++--- src/ott/tools/soft_sort.py | 2 +- tests/solvers/quadratic/lower_bound_test.py | 63 +++++++++++++++++++++ 4 files changed, 89 insertions(+), 26 deletions(-) create mode 100644 tests/solvers/quadratic/lower_bound_test.py diff --git a/src/ott/neural/flows/layers.py b/src/ott/neural/flows/layers.py index 84a526b1f..d18980c38 100644 --- a/src/ott/neural/flows/layers.py +++ b/src/ott/neural/flows/layers.py @@ -11,37 +11,36 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import abc import jax.numpy as jnp import flax.linen as nn -__all__ = ["TimeEncoder", "CyclicalTimeEncoder"] +__all__ = ["CyclicalTimeEncoder"] -class TimeEncoder(nn.Module, abc.ABC): - """A time encoder.""" - - @abc.abstractmethod - def __call__(self, t: jnp.ndarray) -> jnp.ndarray: - """Encode the time. - - Args: - t: Input time of shape (batch_size, 1). - - Returns: - The encoded time. - """ - pass +class CyclicalTimeEncoder(nn.Module): + r"""A cyclical time encoder. + Encodes time :math:`t` as + :math:`cos(\tilde{t})` and :math:`sin(\tilde{t})` + where :math:`\tilde{t} = [2\\pi t, 2\\pi 2 t,\\ldots, 2\\pi n_frequencies t]` -class CyclicalTimeEncoder(nn.Module): - """A cyclical time encoder.""" + Args: + n_frequencies: Frequency of cyclical encoding. + """ n_frequencies: int = 128 @nn.compact def __call__(self, t: jnp.ndarray) -> jnp.ndarray: # noqa: D102 + """Encode time :math:`t` into a cyclical representation. + + Args: + t: Time of shape ``[n, 1]``. + + Returns: + Encoded time of shape ``[n, 2 * n_frequencies]`` + """ freq = 2 * jnp.arange(self.n_frequencies) * jnp.pi t = freq * t return jnp.concatenate((jnp.cos(t), jnp.sin(t)), axis=-1) diff --git a/src/ott/neural/flows/samplers.py b/src/ott/neural/flows/samplers.py index 1bfee16b4..30373380a 100644 --- a/src/ott/neural/flows/samplers.py +++ b/src/ott/neural/flows/samplers.py @@ -26,10 +26,12 @@ def uniform_sampler( high: float = 1.0, offset: Optional[float] = None ) -> jnp.ndarray: - """Sample from a uniform distribution. + r"""Sample from a uniform distribution. - Sample :math:`t` from a uniform distribution :math:`[low, high]` with - offset `offset`. + Sample :math:`t` from a uniform distribution :math:`[low, high]`. + If `offset` is not :obj:`None`, one element :math:`t` is sampled from + :math:`[low, high]` and the K samples are constructed via + :math:`(t + k)/K \mod (high - low - offset) + low`. Args: rng: Random number generator. @@ -44,7 +46,6 @@ def uniform_sampler( """ if offset is None: return jax.random.uniform(rng, (num_samples, 1), minval=low, maxval=high) - return ( - jax.random.uniform(rng, (1, 1), minval=low, maxval=high) + - jnp.arange(num_samples)[:, None] / num_samples - ) % ((high - low) - offset) + t = jax.random.uniform(rng, (1, 1), minval=low, maxval=high) + mod_term = ((high - low) - offset) + return (t + jnp.arange(num_samples)[:, None] / num_samples) % mod_term diff --git a/src/ott/tools/soft_sort.py b/src/ott/tools/soft_sort.py index 1a30359ee..9e2e0c5d0 100644 --- a/src/ott/tools/soft_sort.py +++ b/src/ott/tools/soft_sort.py @@ -457,7 +457,7 @@ def _quantile( def multivariate_cdf_quantile_maps( inputs: jnp.ndarray, - target_sampler: Optional[Callable[[jnp.ndarray, Tuple[int, int]], + target_sampler: Optional[Callable[[jax.Array, Tuple[int, int]], jax.Array]] = None, rng: Optional[jax.Array] = None, num_target_samples: Optional[int] = None, diff --git a/tests/solvers/quadratic/lower_bound_test.py b/tests/solvers/quadratic/lower_bound_test.py new file mode 100644 index 000000000..08353f711 --- /dev/null +++ b/tests/solvers/quadratic/lower_bound_test.py @@ -0,0 +1,63 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +import jax +import jax.numpy as jnp + +from ott.geometry import costs, distrib_costs, pointcloud +from ott.problems.quadratic import quadratic_problem +from ott.solvers.quadratic import lower_bound + + +class TestLowerBoundSolver: + + @pytest.fixture(autouse=True) + def initialize(self, rng: jax.Array): + d_x = 2 + d_y = 3 + self.n, self.m = 13, 15 + rngs = jax.random.split(rng, 4) + self.x = jax.random.uniform(rngs[0], (self.n, d_x)) + self.y = jax.random.uniform(rngs[1], (self.m, d_y)) + # Currently the Lower Bound only supports uniform distributions: + a = jnp.ones(self.n) + b = jnp.ones(self.m) + self.a = a / jnp.sum(a) + self.b = b / jnp.sum(b) + self.cx = jax.random.uniform(rngs[2], (self.n, self.n)) + self.cy = jax.random.uniform(rngs[3], (self.m, self.m)) + + @pytest.mark.fast.with_args( + "ground_cost", + [costs.SqEuclidean(), costs.PNormP(1.5)], + only_fast=0, + ) + def test_lb_pointcloud(self, ground_cost: costs.TICost): + x, y = self.x, self.y + + geom_x = pointcloud.PointCloud(x) + geom_y = pointcloud.PointCloud(y) + prob = quadratic_problem.QuadraticProblem( + geom_x, geom_y, a=self.a, b=self.b + ) + distrib_cost = distrib_costs.UnivariateWasserstein(ground_cost=ground_cost) + solver = lower_bound.LowerBoundSolver( + epsilon=1e-1, distrib_cost=distrib_cost + ) + + out = jax.jit(solver)(prob) + + assert not jnp.isnan(out.reg_ot_cost) From 831d3ea4a8b15eb9a7914fd780b65e86cd2fc305 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 9 Feb 2024 15:05:37 +0100 Subject: [PATCH 082/186] move noise sampling to flows --- src/ott/neural/flows/flows.py | 6 ++++-- src/ott/neural/flows/genot.py | 25 ++++--------------------- src/ott/neural/flows/models.py | 2 -- src/ott/neural/flows/otfm.py | 27 +++++++-------------------- 4 files changed, 15 insertions(+), 45 deletions(-) diff --git a/src/ott/neural/flows/flows.py b/src/ott/neural/flows/flows.py index c379dcbc3..51e19fb5c 100644 --- a/src/ott/neural/flows/flows.py +++ b/src/ott/neural/flows/flows.py @@ -14,6 +14,7 @@ import abc import jax.numpy as jnp +import jax __all__ = [ "BaseFlow", @@ -75,7 +76,7 @@ def compute_ut( """ def compute_xt( - self, noise: jnp.ndarray, t: jnp.ndarray, src: jnp.ndarray, + self, rng: jax.Array, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray ) -> jnp.ndarray: """Sample from the probability path. @@ -84,7 +85,7 @@ def compute_xt( time :math:`t`. Args: - noise: Noise sampled from a standard normal distribution. + rng: Random number generator. t: Time :math:`t`. src: Sample from the source distribution. tgt: Sample from the target distribution. @@ -93,6 +94,7 @@ def compute_xt( Samples from the probability path between :math:`x_0` and :math:`x_1` at time :math:`t`. """ + noise = jax.random.normal(rng, shape=(src.shape)) mu_t = self.compute_mu_t(t, src, tgt) sigma_t = self.compute_sigma_t(t) return mu_t + sigma_t * noise diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 736d9e268..981f958dc 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -217,7 +217,6 @@ def __call__(self, train_loader, valid_loader): ) if batch["source_lin"] is not None else len(batch["source_quad"]) n_samples = batch_size * self.k_samples_per_x batch["time"] = self.time_sampler(rng_time, n_samples) - batch["noise"] = self.sample_noise(rng_noise, n_samples) batch["latent"] = self.latent_noise_fn( rng_noise, shape=(self.k_samples_per_x, batch_size) ) @@ -309,7 +308,7 @@ def loss_fn( rng: jax.random.PRNGKeyArray ): x_t = self.flow.compute_xt( - batch["noise"], batch["time"], batch["latent"], batch["target"] + rng, batch["time"], batch["latent"], batch["target"] ) apply_fn = functools.partial( state_velocity_field.apply_fn, {"params": params} @@ -322,16 +321,14 @@ def loss_fn( ], axis=1) v_t = jax.vmap(apply_fn - )(t=batch["time"], x=x_t, condition=cond_input, rng=rng) + )(t=batch["time"], x=x_t, condition=cond_input) u_t = self.flow.compute_ut( batch["time"], batch["latent"], batch["target"] ) return jnp.mean((v_t - u_t) ** 2) - keys_model = jax.random.split(rng, len(batch["noise"])) - grad_fn = jax.value_and_grad(loss_fn, has_aux=False) - loss, grads = grad_fn(state_velocity_field.params, batch, keys_model) + loss, grads = grad_fn(state_velocity_field.params, batch, rng) return state_velocity_field.apply_gradients(grads=grads), loss @@ -434,18 +431,4 @@ def load(self, path: str) -> "GENOT": @property def training_logs(self) -> Dict[str, Any]: """Logs of the training.""" - raise NotImplementedError - - def sample_noise( - self, key: jax.random.PRNGKey, batch_size: int - ) -> jnp.ndarray: - """Sample noise from a standard-normal distribution. - - Args: - key: Random key for seeding. - batch_size: Number of samples to draw. - - Returns: - Samples from the standard normal distribution. - """ - return jax.random.normal(key, shape=(batch_size, self.output_dim)) + raise NotImplementedError \ No newline at end of file diff --git a/src/ott/neural/flows/models.py b/src/ott/neural/flows/models.py index 6970e8368..bf365e772 100644 --- a/src/ott/neural/flows/models.py +++ b/src/ott/neural/flows/models.py @@ -95,7 +95,6 @@ def __call__( t: jnp.ndarray, x: jnp.ndarray, condition: Optional[jnp.ndarray] = None, - rng: Optional[jnp.ndarray] = None, ) -> jnp.ndarray: """Forward pass through the neural vector field. @@ -103,7 +102,6 @@ def __call__( t: Time of shape (batch_size, 1). x: Data of shape (batch_size, output_dim). condition: Conditioning vector. - rng: Random number generator. Returns: Output of the neural vector field. diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index ef55e1dc5..e5bdea711 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -33,6 +33,7 @@ __all__ = ["OTFlowMatching"] + class OTFlowMatching( base_solver.ResampleMixin, ): @@ -145,30 +146,29 @@ def step_fn( ) -> Tuple[Any, Any]: def loss_fn( - params: jnp.ndarray, t: jnp.ndarray, noise: jnp.ndarray, - batch: Dict[str, jnp.ndarray], rng: jax.random.PRNGKeyArray + params: jnp.ndarray, t: jnp.ndarray, + batch: Dict[str, jnp.ndarray], rng: jax.Array ) -> jnp.ndarray: x_t = self.flow.compute_xt( - noise, t, batch["source_lin"], batch["target_lin"] + rng, t, batch["source_lin"], batch["target_lin"] ) apply_fn = functools.partial( state_velocity_field.apply_fn, {"params": params} ) v_t = jax.vmap(apply_fn)( - t=t, x=x_t, condition=batch["source_conditions"], rng=rng + t=t, x=x_t, condition=batch["source_conditions"] ) u_t = self.flow.compute_ut(t, batch["source_lin"], batch["target_lin"]) return jnp.mean((v_t - u_t) ** 2) batch_size = len(batch["source_lin"]) - key_noise, key_t, key_model = jax.random.split(rng, 3) + key_t, key_model = jax.random.split(rng, 2) keys_model = jax.random.split(key_model, batch_size) t = self.time_sampler(key_t, batch_size) - noise = self.sample_noise(key_noise, batch_size) grad_fn = jax.value_and_grad(loss_fn) loss, grads = grad_fn( - state_velocity_field.params, t, noise, batch, keys_model + state_velocity_field.params, t, batch, keys_model ) return state_velocity_field.apply_gradients(grads=grads), loss @@ -312,16 +312,3 @@ def training_logs(self) -> Dict[str, Any]: """Logs of the training.""" raise NotImplementedError - def sample_noise( - self, key: jax.random.PRNGKey, batch_size: int - ) -> jnp.ndarray: - """Sample noise from a standard-normal distribution. - - Args: - key: Random key for seeding. - batch_size: Number of samples to draw. - - Returns: - Samples from the standard normal distribution. - """ - return jax.random.normal(key, shape=(batch_size, self.input_dim)) From c18c461019db2368d32c42c730eb8006b2961444 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 9 Feb 2024 15:14:13 +0100 Subject: [PATCH 083/186] fix bug in passing rngs in otfm --- src/ott/neural/flows/flows.py | 7 +++---- src/ott/neural/flows/genot.py | 5 ++--- src/ott/neural/flows/otfm.py | 16 +++++----------- src/ott/neural/flows/samplers.py | 2 +- 4 files changed, 11 insertions(+), 19 deletions(-) diff --git a/src/ott/neural/flows/flows.py b/src/ott/neural/flows/flows.py index 51e19fb5c..65f697d89 100644 --- a/src/ott/neural/flows/flows.py +++ b/src/ott/neural/flows/flows.py @@ -13,8 +13,8 @@ # limitations under the License. import abc -import jax.numpy as jnp import jax +import jax.numpy as jnp __all__ = [ "BaseFlow", @@ -76,8 +76,7 @@ def compute_ut( """ def compute_xt( - self, rng: jax.Array, t: jnp.ndarray, src: jnp.ndarray, - tgt: jnp.ndarray + self, rng: jax.Array, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray ) -> jnp.ndarray: """Sample from the probability path. @@ -94,7 +93,7 @@ def compute_xt( Samples from the probability path between :math:`x_0` and :math:`x_1` at time :math:`t`. """ - noise = jax.random.normal(rng, shape=(src.shape)) + noise = jax.random.normal(rng, shape=src.shape) mu_t = self.compute_mu_t(t, src, tgt) sigma_t = self.compute_sigma_t(t) return mu_t + sigma_t * noise diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 981f958dc..73a4cb1bc 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -320,8 +320,7 @@ def loss_fn( if batch[el] is not None ], axis=1) - v_t = jax.vmap(apply_fn - )(t=batch["time"], x=x_t, condition=cond_input) + v_t = jax.vmap(apply_fn)(t=batch["time"], x=x_t, condition=cond_input) u_t = self.flow.compute_ut( batch["time"], batch["latent"], batch["target"] ) @@ -431,4 +430,4 @@ def load(self, path: str) -> "GENOT": @property def training_logs(self) -> Dict[str, Any]: """Logs of the training.""" - raise NotImplementedError \ No newline at end of file + raise NotImplementedError diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index e5bdea711..e8233153e 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -33,7 +33,6 @@ __all__ = ["OTFlowMatching"] - class OTFlowMatching( base_solver.ResampleMixin, ): @@ -146,8 +145,8 @@ def step_fn( ) -> Tuple[Any, Any]: def loss_fn( - params: jnp.ndarray, t: jnp.ndarray, - batch: Dict[str, jnp.ndarray], rng: jax.Array + params: jnp.ndarray, t: jnp.ndarray, batch: Dict[str, jnp.ndarray], + rng: jax.Array ) -> jnp.ndarray: x_t = self.flow.compute_xt( @@ -156,20 +155,16 @@ def loss_fn( apply_fn = functools.partial( state_velocity_field.apply_fn, {"params": params} ) - v_t = jax.vmap(apply_fn)( - t=t, x=x_t, condition=batch["source_conditions"] - ) + v_t = jax.vmap(apply_fn + )(t=t, x=x_t, condition=batch["source_conditions"]) u_t = self.flow.compute_ut(t, batch["source_lin"], batch["target_lin"]) return jnp.mean((v_t - u_t) ** 2) batch_size = len(batch["source_lin"]) key_t, key_model = jax.random.split(rng, 2) - keys_model = jax.random.split(key_model, batch_size) t = self.time_sampler(key_t, batch_size) grad_fn = jax.value_and_grad(loss_fn) - loss, grads = grad_fn( - state_velocity_field.params, t, batch, keys_model - ) + loss, grads = grad_fn(state_velocity_field.params, t, batch, key_model) return state_velocity_field.apply_gradients(grads=grads), loss return step_fn @@ -311,4 +306,3 @@ def load(self, path: str) -> "OTFlowMatching": def training_logs(self) -> Dict[str, Any]: """Logs of the training.""" raise NotImplementedError - diff --git a/src/ott/neural/flows/samplers.py b/src/ott/neural/flows/samplers.py index 30373380a..34a28c2d2 100644 --- a/src/ott/neural/flows/samplers.py +++ b/src/ott/neural/flows/samplers.py @@ -29,7 +29,7 @@ def uniform_sampler( r"""Sample from a uniform distribution. Sample :math:`t` from a uniform distribution :math:`[low, high]`. - If `offset` is not :obj:`None`, one element :math:`t` is sampled from + If `offset` is not :obj:`None`, one element :math:`t` is sampled from :math:`[low, high]` and the K samples are constructed via :math:`(t + k)/K \mod (high - low - offset) + low`. From 83418215c9c23fa1ebd817fb681e4e823cb8d565 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 9 Feb 2024 15:41:57 +0100 Subject: [PATCH 084/186] introduce otmatcher in otfm --- src/ott/neural/flows/genot.py | 19 ++++------ src/ott/neural/flows/otfm.py | 53 ++++++++-------------------- src/ott/neural/models/base_solver.py | 47 ++++++++++++++++++++---- tests/neural/otfm_test.py | 16 ++++++--- 4 files changed, 73 insertions(+), 62 deletions(-) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 73a4cb1bc..02a203a24 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -33,9 +33,7 @@ __all__ = ["GENOT"] -class GENOT( - base_solver.ResampleMixin, -): +class GENOT: """The GENOT training class as introduced in :cite:`klein_uscidda:23`. Args: @@ -113,7 +111,6 @@ def __init__( rng: Optional[jax.Array] = None, ): rng = utils.default_prng_key(rng) - base_solver.ResampleMixin.__init__(self) if isinstance( ot_solver, gromov_wasserstein.GromovWasserstein @@ -252,7 +249,6 @@ def __call__(self, train_loader, valid_loader): (batch["target"],), source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) ) - jax.random.split(rng_noise, batch_size * self.k_samples_per_x) if self.solver_latent_to_data is not None: tmats_latent_data = jnp.array( @@ -339,7 +335,7 @@ def transport( condition: Optional[jnp.ndarray] = None, rng: Optional[jax.Array] = None, forward: bool = True, - diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}), + **kwargs: Any, ) -> Union[jnp.array, diffrax.Solution, Optional[jnp.ndarray]]: """Transport data with the learnt plan. @@ -352,7 +348,7 @@ def transport( condition: Condition of the input data. rng: random seed for sampling from the latent distribution. forward: If `True` integrates forward, otherwise backwards. - diffeqsolve_kwargs: Keyword arguments for the ODE solver. + kwargs: Keyword arguments for the ODE solver. Returns: The push-forward or pull-back distribution defined by the learnt @@ -362,7 +358,6 @@ def transport( rng = utils.default_prng_key(rng) if not forward: raise NotImplementedError - diffeqsolve_kwargs = dict(diffeqsolve_kwargs) assert len(source) == len(condition) if condition is not None else True latent_batch = self.latent_noise_fn(rng, shape=(len(source),)) @@ -382,16 +377,16 @@ def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: x=x, condition=cond) ), - diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()), + kwargs.pop("solver", diffrax.Tsit5()), t0=t0, t1=t1, - dt0=diffeqsolve_kwargs.pop("dt0", None), + dt0=kwargs.pop("dt0", None), y0=input, - stepsize_controller=diffeqsolve_kwargs.pop( + stepsize_controller=kwargs.pop( "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) ), - **diffeqsolve_kwargs, + **kwargs, ).ys[0] return jax.vmap(solve_ode)(latent_batch, cond_input) diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index e8233153e..de0c8c5e5 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -13,7 +13,6 @@ # limitations under the License. import collections import functools -import types from typing import Any, Callable, Dict, Literal, Mapping, Optional, Tuple, Type, Union import jax @@ -28,14 +27,11 @@ from ott.geometry import costs from ott.neural.flows import flows from ott.neural.models import base_solver -from ott.solvers import was_solver __all__ = ["OTFlowMatching"] -class OTFlowMatching( - base_solver.ResampleMixin, -): +class OTFlowMatching: """(Optimal transport) flow matching class. Flow matching as introduced in :cite:`lipman:22`, with extension to OT-FM @@ -47,19 +43,10 @@ class OTFlowMatching( cond_dim: Dimension of the conditioning variable. iterations: Number of iterations. valid_freq: Frequency of validation. - ot_solver: OT solver to match samples from the source and the target - distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. - If :obj:`None`, no matching will be performed as proposed in - :cite:`lipman:22`. flow: Flow between source and target distribution. time_sampler: Sampler for the time. optimizer: Optimizer for `velocity_field`. checkpoint_manager: Checkpoint manager. - epsilon: Entropy regularization term of the OT OT problem solved by the - `ot_solver`. - cost_fn: Cost function for the OT problem solved by the `ot_solver`. - scale_cost: How to scale the cost matrix for the OT problem solved by the - `ot_solver`. callback_fn: Callback function. num_eval_samples: Number of samples to evaluate on during evaluation. rng: Random number generator. @@ -73,10 +60,10 @@ def __init__( input_dim: int, cond_dim: int, iterations: int, - ot_solver: Optional[Type[was_solver.WassersteinSolver]], flow: Type[flows.BaseFlow], time_sampler: Callable[[jax.Array, int], jnp.ndarray], optimizer: optax.GradientTransformation, + ot_matcher: base_solver.OTMatcher, unbalancedness_handler: base_solver.UnbalancednessHandler, checkpoint_manager: Type[checkpoint.CheckpointManager] = None, epsilon: float = 1e-2, @@ -92,13 +79,12 @@ def __init__( rng: Optional[jax.Array] = None, ): rng = utils.default_prng_key(rng) - base_solver.ResampleMixin.__init__(self) self.unbalancedness_handler = unbalancedness_handler self.iterations = iterations self.valid_freq = valid_freq self.velocity_field = velocity_field self.input_dim = input_dim - self.ot_solver = ot_solver + self.ot_matcher = ot_matcher self.flow = flow self.time_sampler = time_sampler self.optimizer = optimizer @@ -123,17 +109,6 @@ def setup(self) -> None: ) self.step_fn = self._get_step_fn() - if self.ot_solver is not None: - self.match_fn = self._get_sinkhorn_match_fn( - self.ot_solver, - epsilon=self.epsilon, - cost_fn=self.cost_fn, - scale_cost=self.scale_cost, - tau_a=self.unbalancedness_handler.tau_a, - tau_b=self.unbalancedness_handler.tau_b, - ) - else: - self.match_fn = None def _get_step_fn(self) -> Callable: @@ -182,11 +157,13 @@ def __call__(self, train_loader, valid_loader): for iter in range(self.iterations): rng_resample, rng_step_fn, self.rng = jax.random.split(self.rng, 3) batch = next(train_loader) - if self.ot_solver is not None: - tmat = self.match_fn(batch["source_lin"], batch["target_lin"]) + if self.ot_matcher is not None: + tmat = self.ot_matcher.match_fn( + batch["source_lin"], batch["target_lin"] + ) (batch["source_lin"], batch["source_conditions"] ), (batch["target_lin"], - batch["target_conditions"]) = self._resample_data( + batch["target_conditions"]) = self.ot_matcher._resample_data( rng_resample, tmat, (batch["source_lin"], batch["source_conditions"]), (batch["target_lin"], batch["target_conditions"]) @@ -222,7 +199,7 @@ def transport( forward: bool = True, t_0: float = 0.0, t_1: float = 1.0, - diffeqsolve_kwargs: Dict[str, Any] = types.MappingProxyType({}) + **kwargs: Any, ) -> diffrax.Solution: """Transport data with the learnt map. @@ -236,15 +213,13 @@ def transport( forward: If `True` integrates forward, otherwise backwards. t_0: Starting point of integration. t_1: End point of integration. - diffeqsolve_kwargs: Keyword arguments for the ODE solver. + kwargs: Keyword arguments for the ODE solver. Returns: The push-forward or pull-back distribution defined by the learnt transport plan. """ - diffeqsolve_kwargs = dict(diffeqsolve_kwargs) - t0, t1 = (t_0, t_1) if forward else (t_1, t_0) @jax.jit @@ -257,16 +232,16 @@ def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: x=x, condition=cond) ), - diffeqsolve_kwargs.pop("solver", diffrax.Tsit5()), + kwargs.pop("solver", diffrax.Tsit5()), t0=t0, t1=t1, - dt0=diffeqsolve_kwargs.pop("dt0", None), + dt0=kwargs.pop("dt0", None), y0=input, - stepsize_controller=diffeqsolve_kwargs.pop( + stepsize_controller=kwargs.pop( "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) ), - **diffeqsolve_kwargs, + **kwargs, ).ys[0] return jax.vmap(solve_ode)(data, condition) diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 1bc541ec7..43675d4fc 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -26,7 +26,7 @@ from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein -__all__ = ["ResampleMixin", "UnbalancednessHandler"] +__all__ = ["OTMatcher", "UnbalancednessHandler"] def _get_sinkhorn_match_fn( @@ -132,8 +132,37 @@ def match_pairs( return match_pairs -class ResampleMixin: - """Mixin class for mini-batch OT in neural optimal transport solvers.""" +class OTMatcher: + """Class for mini-batch OT in neural optimal transport solvers. + + Args: + ot_solver: OT solver to match samples from the source and the target + distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. + If :obj:`None`, no matching will be performed as proposed in + :cite:`lipman:22`. + """ + + def __init__( + self, + ot_solver: was_solver.WassersteinSolver, + epsilon: float = 1e-2, + cost_fn: Optional[costs.CostFn] = None, + scale_cost: Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", "max_cost", + "median"]] = "mean", + tau_a: float = 1.0, + tau_b: float = 1.0 + ) -> None: + self.ot_solver = ot_solver + self.epsilon = epsilon + self.cost_fn = cost_fn + self.scale_cost = scale_cost + self.tau_a = tau_a + self.tau_b = tau_b + self.match_fn = self._get_sinkhorn_match_fn( + self.ot_solver, self.epsilon, self.cost_fn, self.scale_cost, self.tau_a, + self.tau_b + ) def _resample_data( self, @@ -233,19 +262,21 @@ class UnbalancednessHandler: cond_dim: Dimension of the conditioning variable. If :obj:`None`, no conditioning is used. tau_a: Unbalancedness parameter for the source distribution. + Only used if `ot_solver` is not :obj:`None`. tau_b: Unbalancedness parameter for the target distribution. + Only used if `ot_solver` is not :obj:`None`. rescaling_a: Rescaling function for the source distribution. - If :obj:`None`, the left rescaling factor is not learnt. + If :obj:`None`, the left rescaling factor is not learnt. rescaling_b: Rescaling function for the target distribution. - If :obj:`None`, the right rescaling factor is not learnt. + If :obj:`None`, the right rescaling factor is not learnt. opt_eta: Optimizer for the left rescaling function. opt_xi: Optimzier for the right rescaling function. resample_epsilon: Epsilon for resampling. scale_cost: Scaling of the cost matrix for estimating the rescaling factors. ot_solver: Solver to compute unbalanced marginals. If `ot_solver` is `None`, - the method + the method :meth:`ott.neural.models.base_solver.UnbalancednessHandler.compute_unbalanced_marginals` - is not available, and hence the unbalanced marginals must be computed by the neural solver. + is not available, and hence the unbalanced marginals must be computed by the neural solver. kwargs: Additional keyword arguments. """ @@ -296,6 +327,8 @@ def __init__( ) elif isinstance(ot_solver, gromov_wasserstein.GromovWasserstein): self.compute_unbalanced_marginals = self._get_compute_unbalanced_marginals_quad + else: + self.compute_unbalanced_marginals = None self.setup(source_dim=source_dim, target_dim=target_dim, cond_dim=cond_dim) def _get_compute_unbalanced_marginals_lin( diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index e4d22a789..c3675b820 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -46,6 +46,7 @@ def test_flow_matching( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() + ot_matcher = base_solver.OTMatcher(ot_solver) time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) unbalancedness_handler = base_solver.UnbalancednessHandler( @@ -57,7 +58,7 @@ def test_flow_matching( cond_dim=condition_dim, iterations=3, valid_freq=2, - ot_solver=ot_solver, + ot_matcher=ot_matcher, flow=flow, time_sampler=time_sampler, optimizer=optimizer, @@ -98,6 +99,7 @@ def test_flow_matching_with_conditions( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() + ot_matcher = base_solver.OTMatcher(ot_solver) time_sampler = functools.partial(samplers.uniform_sampler, offset=1e-5) optimizer = optax.adam(learning_rate=1e-3) unbalancedness_handler = base_solver.UnbalancednessHandler( @@ -110,7 +112,7 @@ def test_flow_matching_with_conditions( cond_dim=1, iterations=3, valid_freq=2, - ot_solver=ot_solver, + ot_matcher=ot_matcher, flow=flow, time_sampler=time_sampler, optimizer=optimizer, @@ -154,6 +156,7 @@ def test_flow_matching_conditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() + ot_matcher = base_solver.OTMatcher(ot_solver) time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) unbalancedness_handler = base_solver.UnbalancednessHandler( @@ -166,7 +169,7 @@ def test_flow_matching_conditional( cond_dim=condition_dim, iterations=3, valid_freq=2, - ot_solver=ot_solver, + ot_matcher=ot_matcher, flow=flow, time_sampler=time_sampler, optimizer=optimizer, @@ -215,6 +218,11 @@ def test_flow_matching_learn_rescaling( tau_b = 0.2 rescaling_a = nets.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) rescaling_b = nets.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + ot_matcher = base_solver.OTMatcher( + ot_solver, + tau_a=tau_a, + tau_b=tau_b, + ) unbalancedness_handler = base_solver.UnbalancednessHandler( random.PRNGKey(0), source_dim, @@ -232,7 +240,7 @@ def test_flow_matching_learn_rescaling( cond_dim=condition_dim, iterations=3, valid_freq=2, - ot_solver=ot_solver, + ot_matcher=ot_matcher, flow=flow, time_sampler=time_sampler, optimizer=optimizer, From 3cae628bdae6546f28e38cf3506efe8c44804360 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 9 Feb 2024 16:36:25 +0100 Subject: [PATCH 085/186] [ci skip] split GENOT into GENOTLin and GENOTQuad --- src/ott/neural/flows/genot.py | 346 ++++++++++++++------------- src/ott/neural/flows/otfm.py | 2 +- src/ott/neural/models/base_solver.py | 118 ++++++--- tests/neural/genot_test.py | 340 +++++++++++++------------- tests/neural/otfm_test.py | 8 +- 5 files changed, 445 insertions(+), 369 deletions(-) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 02a203a24..bd825ad40 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -13,7 +13,7 @@ # limitations under the License. import functools import types -from typing import Any, Callable, Dict, Literal, Optional, Type, Union +from typing import Any, Callable, Dict, Optional, Type, Union import jax import jax.numpy as jnp @@ -23,17 +23,13 @@ from flax.training import train_state from ott import utils -from ott.geometry import costs from ott.neural.flows import flows, samplers from ott.neural.models import base_solver -from ott.solvers import was_solver -from ott.solvers.linear import sinkhorn -from ott.solvers.quadratic import gromov_wasserstein -__all__ = ["GENOT"] +__all__ = ["GENOTBase", "GENOTLin", "GENOTQuad"] -class GENOT: +class GENOTBase: """The GENOT training class as introduced in :cite:`klein_uscidda:23`. Args: @@ -87,23 +83,14 @@ def __init__( cond_dim: int, iterations: int, valid_freq: int, - ot_solver: was_solver.WassersteinSolver, - epsilon: float, - cost_fn: Union[costs.CostFn, Dict[str, costs.CostFn]], - scale_cost: Union[Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", - "max_cost", "median"]], - Dict[str, Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", - "max_cost", "median"]]]], + ot_matcher: base_solver.BaseOTMatcher, unbalancedness_handler: base_solver.UnbalancednessHandler, optimizer: optax.GradientTransformation, flow: Type[flows.BaseFlow] = flows.ConstantNoiseFlow(0.0), # noqa: B008 time_sampler: Callable[[jax.Array, int], jnp.ndarray] = samplers.uniform_sampler, k_samples_per_x: int = 1, - solver_latent_to_data: Optional[Type[was_solver.WassersteinSolver] - ] = None, + matcher_latent_to_data: Optional[base_solver.OTMatcherLinear] = None, kwargs_solver_latent_to_data: Dict[str, Any] = types.MappingProxyType({}), fused_penalty: float = 0.0, callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], @@ -112,15 +99,6 @@ def __init__( ): rng = utils.default_prng_key(rng) - if isinstance( - ot_solver, gromov_wasserstein.GromovWasserstein - ) and epsilon is not None: - raise ValueError( - "If `ot_solver` is `GromovWasserstein`, `epsilon` must be `None`. " + - "This check is performed to ensure that in the (fused) Gromov case " + - "the `epsilon` parameter is passed via the `ot_solver`." - ) - self.rng = utils.default_prng_key(rng) self.iterations = iterations self.valid_freq = valid_freq @@ -129,6 +107,7 @@ def __init__( self.flow = flow self.time_sampler = time_sampler self.optimizer = optimizer + self.ot_matcher = ot_matcher self.latent_noise_fn = jax.tree_util.Partial( jax.random.multivariate_normal, mean=jnp.zeros((output_dim,)), @@ -143,14 +122,11 @@ def __init__( self.unbalancedness_handler = unbalancedness_handler # OT data-data matching parameters - self.ot_solver = ot_solver - self.epsilon = epsilon - self.cost_fn = cost_fn - self.scale_cost = scale_cost + self.fused_penalty = fused_penalty # OT latent-data matching parameters - self.solver_latent_to_data = solver_latent_to_data + self.matcher_latent_to_data = matcher_latent_to_data self.kwargs_solver_latent_to_data = kwargs_solver_latent_to_data # callback parameteres @@ -165,130 +141,6 @@ def setup(self) -> None: ) ) self.step_fn = self._get_step_fn() - if self.solver_latent_to_data is not None: - self.match_latent_to_data_fn = self._get_sinkhorn_match_fn( - ot_solver=self.solver_latent_to_data, - **self.kwargs_solver_latent_to_data - ) - else: - self.match_latent_to_data_fn = lambda key, x, y, **_: (x, y) - - # TODO: add graph construction function - if isinstance(self.ot_solver, sinkhorn.Sinkhorn): - self.match_fn = self._get_sinkhorn_match_fn( - ot_solver=self.ot_solver, - epsilon=self.epsilon, - cost_fn=self.cost_fn, - scale_cost=self.scale_cost, - tau_a=self.unbalancedness_handler.tau_a, - tau_b=self.unbalancedness_handler.tau_b, - filter_input=True - ) - else: - self.match_fn = self._get_gromov_match_fn( - ot_solver=self.ot_solver, - cost_fn=self.cost_fn, - scale_cost=self.scale_cost, - tau_a=self.unbalancedness_handler.tau_a, - tau_b=self.unbalancedness_handler.tau_b, - fused_penalty=self.fused_penalty - ) - - def __call__(self, train_loader, valid_loader): - """Train GENOT. - - Args: - train_loader: Data loader for the training data. - valid_loader: Data loader for the validation data. - """ - batch: Dict[str, jnp.array] = {} - for iteration in range(self.iterations): - batch = next(train_loader) - - ( - self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, - rng_step_fn - ) = jax.random.split(self.rng, 6) - batch_size = len( - batch["source_lin"] - ) if batch["source_lin"] is not None else len(batch["source_quad"]) - n_samples = batch_size * self.k_samples_per_x - batch["time"] = self.time_sampler(rng_time, n_samples) - batch["latent"] = self.latent_noise_fn( - rng_noise, shape=(self.k_samples_per_x, batch_size) - ) - - tmat = self.match_fn( - batch["source_lin"], batch["source_quad"], batch["target_lin"], - batch["target_quad"] - ) - - batch["source"] = jnp.concatenate([ - batch[el] - for el in ["source_lin", "source_quad"] - if batch[el] is not None - ], - axis=1) - batch["target"] = jnp.concatenate([ - batch[el] - for el in ["target_lin", "target_quad"] - if batch[el] is not None - ], - axis=1) - - batch = { - k: v for k, v in batch.items() if k in - ["source", "target", "source_conditions", "time", "noise", "latent"] - } - - (batch["source"], batch["source_conditions"] - ), (batch["target"],) = self._sample_conditional_indices_from_tmap( - rng_resample, - tmat, - self.k_samples_per_x, (batch["source"], batch["source_conditions"]), - (batch["target"],), - source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) - ) - - if self.solver_latent_to_data is not None: - tmats_latent_data = jnp.array( - jax.vmap(self.match_latent_to_data_fn, 0, - 0)(x=batch["latent"], y=batch["target"]) - ) - - rng_latent_data_match = jax.random.split( - rng_latent_data_match, self.k_samples_per_x - ) - (batch["source"], batch["source_conditions"] - ), (batch["target"],) = jax.vmap(self._resample_data, 0, 0)( - rng_latent_data_match, tmats_latent_data, - (batch["source"], batch["source_conditions"]), (batch["target"],) - ) - batch = { - key: - jnp.reshape(arr, (batch_size * self.k_samples_per_x, - -1)) if arr is not None else None - for key, arr in batch.items() - } - - self.state_velocity_field, loss = self.step_fn( - rng_step_fn, self.state_velocity_field, batch - ) - if self.learn_rescaling: - ( - self.state_eta, self.state_xi, eta_predictions, xi_predictions, - loss_a, loss_b - ) = self.unbalancedness_handler.step_fn( - source=batch["source"], - target=batch["target"], - condition=batch["source_conditions"], - a=tmat.sum(axis=1), - b=tmat.sum(axis=0), - state_eta=self.unbalancedness_handler.state_eta, - state_xi=self.unbalancedness_handler.state_xi, - ) - if iteration % self.valid_freq == 0: - self._valid_step(valid_loader, iteration) def _get_step_fn(self) -> Callable: @@ -403,26 +255,178 @@ def learn_rescaling(self) -> bool: self.unbalancedness_handler.rescaling_b is not None ) - def save(self, path: str): - """Save the model. + +class GENOTLin(GENOTBase): + + def __call__(self, train_loader, valid_loader): + """Train GENOT. Args: - path: Where to save the model to. + train_loader: Data loader for the training data. + valid_loader: Data loader for the validation data. """ - raise NotImplementedError + batch: Dict[str, jnp.array] = {} + for iteration in range(self.iterations): + batch = next(train_loader) + + ( + self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, + rng_step_fn + ) = jax.random.split(self.rng, 6) + batch_size = len(batch["source_lin"]) + n_samples = batch_size * self.k_samples_per_x + batch["time"] = self.time_sampler(rng_time, n_samples) + batch["latent"] = self.latent_noise_fn( + rng_noise, shape=(self.k_samples_per_x, batch_size) + ) - def load(self, path: str) -> "GENOT": - """Load a model. + tmat = self.ot_matcher.match_fn( + batch["source_lin"], + batch["target_lin"], + ) - Args: - path: Where to load the model from. + batch["source"] = batch["source_lin"] + batch["target"] = batch["target_lin"] - Returns: - An instance of :class:`ott.neural.solvers.OTFlowMatching`. + (batch["source"], batch["source_conditions"]), ( + batch["target"], + ) = self.ot_matcher._sample_conditional_indices_from_tmap( + rng_resample, + tmat, + self.k_samples_per_x, (batch["source"], batch["source_conditions"]), + (batch["target"],), + source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) + ) + + if self.matcher_latent_to_data.match_fn is not None: + tmats_latent_data = jnp.array( + jax.vmap(self.matcher_latent_to_data.match_fn, 0, + 0)(x=batch["latent"], y=batch["target"]) + ) + + rng_latent_data_match = jax.random.split( + rng_latent_data_match, self.k_samples_per_x + ) + (batch["source"], batch["source_conditions"] + ), (batch["target"],) = jax.vmap(self.ot_matcher._resample_data, 0, 0)( + rng_latent_data_match, tmats_latent_data, + (batch["source"], batch["source_conditions"]), (batch["target"],) + ) + batch = { + key: + jnp.reshape(arr, (batch_size * self.k_samples_per_x, + -1)) if arr is not None else None + for key, arr in batch.items() + } + + self.state_velocity_field, loss = self.step_fn( + rng_step_fn, self.state_velocity_field, batch + ) + if self.learn_rescaling: + ( + self.state_eta, self.state_xi, eta_predictions, xi_predictions, + loss_a, loss_b + ) = self.unbalancedness_handler.step_fn( + source=batch["source"], + target=batch["target"], + condition=batch["source_conditions"], + a=tmat.sum(axis=1), + b=tmat.sum(axis=0), + state_eta=self.unbalancedness_handler.state_eta, + state_xi=self.unbalancedness_handler.state_xi, + ) + if iteration % self.valid_freq == 0: + self._valid_step(valid_loader, iteration) + + +class GENOTQuad(GENOTBase): + + def __call__(self, train_loader, valid_loader): + """Train GENOT. + + Args: + train_loader: Data loader for the training data. + valid_loader: Data loader for the validation data. """ - raise NotImplementedError + batch: Dict[str, jnp.array] = {} + for iteration in range(self.iterations): + batch = next(train_loader) - @property - def training_logs(self) -> Dict[str, Any]: - """Logs of the training.""" - raise NotImplementedError + ( + self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, + rng_step_fn + ) = jax.random.split(self.rng, 6) + batch_size = len( + batch["source_lin"] + ) if batch["source_lin"] is not None else len(batch["source_quad"]) + n_samples = batch_size * self.k_samples_per_x + batch["time"] = self.time_sampler(rng_time, n_samples) + batch["latent"] = self.latent_noise_fn( + rng_noise, shape=(self.k_samples_per_x, batch_size) + ) + + tmat = self.ot_matcher.match_fn( + batch["source_lin"], batch["source_quad"], batch["target_lin"], + batch["target_quad"] + ) + + if self.ot_matcher.fused_penalty > 0.0: + batch["source"] = jnp.concatenate( + (batch["source_lin"], batch["source_quad"]), axis=1 + ) + batch["target"] = jnp.concatenate( + (batch["target_lin"], batch["target_quad"]), axis=1 + ) + else: + batch["source"] = batch["source_quad"] + batch["target"] = batch["target_quad"] + + (batch["source"], batch["source_conditions"]), ( + batch["target"], + ) = self.ot_matcher._sample_conditional_indices_from_tmap( + rng_resample, + tmat, + self.k_samples_per_x, (batch["source"], batch["source_conditions"]), + (batch["target"],), + source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) + ) + + if self.matcher_latent_to_data.match_fn is not None: + tmats_latent_data = jnp.array( + jax.vmap(self.matcher_latent_to_data.match_fn, 0, + 0)(x=batch["latent"], y=batch["target"]) + ) + + rng_latent_data_match = jax.random.split( + rng_latent_data_match, self.k_samples_per_x + ) + (batch["source"], batch["source_conditions"] + ), (batch["target"],) = jax.vmap(self.ot_matcher._resample_data, 0, 0)( + rng_latent_data_match, tmats_latent_data, + (batch["source"], batch["source_conditions"]), (batch["target"],) + ) + batch = { + key: + jnp.reshape(arr, (batch_size * self.k_samples_per_x, + -1)) if arr is not None else None + for key, arr in batch.items() + } + + self.state_velocity_field, loss = self.step_fn( + rng_step_fn, self.state_velocity_field, batch + ) + if self.learn_rescaling: + ( + self.state_eta, self.state_xi, eta_predictions, xi_predictions, + loss_a, loss_b + ) = self.unbalancedness_handler.step_fn( + source=batch["source"], + target=batch["target"], + condition=batch["source_conditions"], + a=tmat.sum(axis=1), + b=tmat.sum(axis=0), + state_eta=self.unbalancedness_handler.state_eta, + state_xi=self.unbalancedness_handler.state_xi, + ) + if iteration % self.valid_freq == 0: + self._valid_step(valid_loader, iteration) diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index de0c8c5e5..84fcd5e96 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -63,7 +63,7 @@ def __init__( flow: Type[flows.BaseFlow], time_sampler: Callable[[jax.Array, int], jnp.ndarray], optimizer: optax.GradientTransformation, - ot_matcher: base_solver.OTMatcher, + ot_matcher: base_solver.OTMatcherLinear, unbalancedness_handler: base_solver.UnbalancednessHandler, checkpoint_manager: Type[checkpoint.CheckpointManager] = None, epsilon: float = 1e-2, diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 43675d4fc..a4238cd05 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -26,7 +26,9 @@ from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein -__all__ = ["OTMatcher", "UnbalancednessHandler"] +__all__ = [ + "BaseOTMatcher", "OTMatcherLinear", "OTMatcherQuad", "UnbalancednessHandler" +] def _get_sinkhorn_match_fn( @@ -132,37 +134,8 @@ def match_pairs( return match_pairs -class OTMatcher: - """Class for mini-batch OT in neural optimal transport solvers. - - Args: - ot_solver: OT solver to match samples from the source and the target - distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. - If :obj:`None`, no matching will be performed as proposed in - :cite:`lipman:22`. - """ - - def __init__( - self, - ot_solver: was_solver.WassersteinSolver, - epsilon: float = 1e-2, - cost_fn: Optional[costs.CostFn] = None, - scale_cost: Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", "max_cost", - "median"]] = "mean", - tau_a: float = 1.0, - tau_b: float = 1.0 - ) -> None: - self.ot_solver = ot_solver - self.epsilon = epsilon - self.cost_fn = cost_fn - self.scale_cost = scale_cost - self.tau_a = tau_a - self.tau_b = tau_b - self.match_fn = self._get_sinkhorn_match_fn( - self.ot_solver, self.epsilon, self.cost_fn, self.scale_cost, self.tau_a, - self.tau_b - ) +class BaseOTMatcher: + """Base class for mini-batch neural OT matching classes.""" def _resample_data( self, @@ -229,6 +202,48 @@ def _sample_conditional_indices_from_tmap( for b in target_arrays ) + +class OTMatcherLinear(BaseOTMatcher): + """Class for mini-batch OT in neural optimal transport solvers. + + Args: + ot_solver: OT solver to match samples from the source and the target + distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. + If :obj:`None`, no matching will be performed as proposed in + :cite:`lipman:22`. + """ + + def __init__( + self, + ot_solver: was_solver.WassersteinSolver, + epsilon: float = 1e-2, + cost_fn: Optional[costs.CostFn] = None, + scale_cost: Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", "max_cost", + "median"]] = "mean", + tau_a: float = 1.0, + tau_b: float = 1.0, + ) -> None: + + if isinstance( + ot_solver, gromov_wasserstein.GromovWasserstein + ) and epsilon is not None: + raise ValueError( + "If `ot_solver` is `GromovWasserstein`, `epsilon` must be `None`. " + + "This check is performed to ensure that in the (fused) Gromov case " + + "the `epsilon` parameter is passed via the `ot_solver`." + ) + self.ot_solver = ot_solver + self.epsilon = epsilon + self.cost_fn = cost_fn + self.scale_cost = scale_cost + self.tau_a = tau_a + self.tau_b = tau_b + self.match_fn = None if ot_solver is None else self._get_sinkhorn_match_fn( + self.ot_solver, self.epsilon, self.cost_fn, self.scale_cost, self.tau_a, + self.tau_b + ) + def _get_sinkhorn_match_fn(self, *args, **kwargs) -> jnp.ndarray: fn = _get_sinkhorn_match_fn(*args, **kwargs) @@ -238,6 +253,43 @@ def match_pairs(*args, **kwargs): return match_pairs + +class OTMatcherQuad(BaseOTMatcher): + """Class for mini-batch OT in neural optimal transport solvers. + + Args: + ot_solver: OT solver to match samples from the source and the target + distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. + If :obj:`None`, no matching will be performed as proposed in + :cite:`lipman:22`. + """ + + def __init__( + self, + ot_solver: was_solver.WassersteinSolver, + cost_fn: Optional[costs.CostFn] = None, + scale_cost: Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", "max_cost", + "median"]] = "mean", + tau_a: float = 1.0, + tau_b: float = 1.0, + fused_penalty: float = 0.0, + ) -> None: + self.ot_solver = ot_solver + self.cost_fn = cost_fn + self.scale_cost = scale_cost + self.tau_a = tau_a + self.tau_b = tau_b + self.fused_penalty = fused_penalty + self.match_fn = self._get_gromov_match_fn( + self.ot_solver, + self.cost_fn, + self.scale_cost, + self.tau_a, + self.tau_b, + fused_penalty=self.fused_penalty + ) + def _get_gromov_match_fn(self, *args, **kwargs) -> jnp.ndarray: fn = _get_gromov_match_fn(*args, **kwargs) @@ -326,7 +378,7 @@ def __init__( ) ) elif isinstance(ot_solver, gromov_wasserstein.GromovWasserstein): - self.compute_unbalanced_marginals = self._get_compute_unbalanced_marginals_quad + raise NotImplementedError else: self.compute_unbalanced_marginals = None self.setup(source_dim=source_dim, target_dim=target_dim, cond_dim=cond_dim) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 191b04d2a..7ab2a957c 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -22,7 +22,7 @@ import optax from ott.geometry import costs -from ott.neural.flows.genot import GENOT +from ott.neural.flows.genot import GENOTLin, GENOTQuad from ott.neural.flows.models import VelocityField from ott.neural.flows.samplers import uniform_sampler from ott.neural.models import base_solver @@ -31,7 +31,7 @@ from ott.solvers.quadratic import gromov_wasserstein -class TestGENOT: +class TestGENOTLin: @pytest.mark.parametrize("scale_cost", ["mean", 2.0]) @pytest.mark.parametrize("k_samples_per_x", [1, 3]) @@ -41,7 +41,7 @@ def test_genot_linear_unconditional( scale_cost: Union[float, Literal["mean"]], k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - solver_latent_to_data = ( + matcher_latent_to_data = base_solver.OTMatcherLinear( None if solver_latent_to_data is None else sinkhorn.Sinkhorn() ) batch = next(genot_data_loader_linear) @@ -58,27 +58,27 @@ def test_genot_linear_unconditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() + ot_matcher = base_solver.OTMatcherLinear( + ot_solver, cost_fn=costs.SqEuclidean(), scale_cost=scale_cost + ) unbalancedness_handler = base_solver.UnbalancednessHandler( random.PRNGKey(0), source_dim, target_dim, condition_dim ) time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) - genot = GENOT( + genot = GENOTLin( neural_vf, input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, iterations=3, valid_freq=2, - ot_solver=ot_solver, - epsilon=0.1, - cost_fn=costs.SqEuclidean(), - scale_cost=scale_cost, + ot_matcher=ot_matcher, optimizer=optimizer, time_sampler=time_sampler, unbalancedness_handler=unbalancedness_handler, k_samples_per_x=k_samples_per_x, - solver_latent_to_data=solver_latent_to_data, + matcher_latent_to_data=matcher_latent_to_data, ) genot(genot_data_loader_linear, genot_data_loader_linear) @@ -94,69 +94,157 @@ def test_genot_linear_unconditional( @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - def test_genot_quad_unconditional( - self, genot_data_loader_quad: Iterator, k_samples_per_x: int, - solver_latent_to_data: Optional[str] + def test_genot_linear_conditional( + self, genot_data_loader_linear_conditional: Iterator, + k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - batch = next(genot_data_loader_quad) - source_quad, target_quad, source_condition = batch["source_quad"], batch[ - "target_quad"], batch["source_conditions"] + matcher_latent_to_data = base_solver.OTMatcherLinear( + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + ) + batch = next(genot_data_loader_linear_conditional) + source_lin, target_lin, source_condition = batch["source_lin"], batch[ + "target_lin"], batch["source_conditions"] + source_dim = source_lin.shape[1] + target_dim = target_lin.shape[1] + condition_dim = source_condition.shape[1] - source_dim = source_quad.shape[1] - target_dim = target_quad.shape[1] - condition_dim = 0 neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) - ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) - + ot_solver = sinkhorn.Sinkhorn() + ot_matcher = base_solver.OTMatcherLinear( + ot_solver, cost_fn=costs.SqEuclidean() + ) + time_sampler = uniform_sampler unbalancedness_handler = base_solver.UnbalancednessHandler( random.PRNGKey(0), source_dim, target_dim, condition_dim ) - time_sampler = functools.partial(uniform_sampler, offset=1e-2) optimizer = optax.adam(learning_rate=1e-3) - genot = GENOT( + genot = GENOTLin( neural_vf, input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, iterations=3, valid_freq=2, - ot_solver=ot_solver, - epsilon=None, - cost_fn=costs.SqEuclidean(), + ot_matcher=ot_matcher, unbalancedness_handler=unbalancedness_handler, - scale_cost=1.0, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, + matcher_latent_to_data=matcher_latent_to_data, + ) + genot( + genot_data_loader_linear_conditional, + genot_data_loader_linear_conditional ) - genot(genot_data_loader_quad, genot_data_loader_quad) - result_forward = genot.transport( - source_quad, condition=source_condition, forward=True + source_lin, condition=source_condition, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 + @pytest.mark.parametrize("conditional", [False, True]) + @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) + def test_genot_linear_learn_rescaling( + self, conditional: bool, genot_data_loader_linear: Iterator, + solver_latent_to_data: Optional[str], + genot_data_loader_linear_conditional: Iterator + ): + matcher_latent_to_data = base_solver.OTMatcherLinear( + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + ) + data_loader = ( + genot_data_loader_linear_conditional + if conditional else genot_data_loader_linear + ) + + batch = next(data_loader) + source_lin, target_lin, source_condition = batch["source_lin"], batch[ + "target_lin"], batch["source_conditions"] + + source_dim = source_lin.shape[1] + target_dim = target_lin.shape[1] + condition_dim = source_condition.shape[1] if conditional else 0 + + neural_vf = VelocityField( + output_dim=target_dim, + condition_dim=source_dim + condition_dim, + latent_embed_dim=5, + ) + ot_solver = sinkhorn.Sinkhorn() + ot_matcher = base_solver.OTMatcherLinear( + ot_solver, + cost_fn=costs.SqEuclidean(), + ) + time_sampler = uniform_sampler + optimizer = optax.adam(learning_rate=1e-3) + + tau_a = 0.9 + tau_b = 0.2 + rescaling_a = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + rescaling_b = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) + + unbalancedness_handler = base_solver.UnbalancednessHandler( + random.PRNGKey(0), + source_dim, + target_dim, + condition_dim, + tau_a=tau_a, + tau_b=tau_b, + rescaling_a=rescaling_a, + rescaling_b=rescaling_b + ) + + genot = GENOTLin( + neural_vf, + input_dim=source_dim, + output_dim=target_dim, + cond_dim=condition_dim, + iterations=3, + valid_freq=2, + ot_matcher=ot_matcher, + optimizer=optimizer, + time_sampler=time_sampler, + unbalancedness_handler=unbalancedness_handler, + matcher_latent_to_data=matcher_latent_to_data, + ) + + genot(data_loader, data_loader) + + result_eta = genot.unbalancedness_handler.evaluate_eta( + source_lin, condition=source_condition + ) + assert isinstance(result_eta, jnp.ndarray) + assert jnp.sum(jnp.isnan(result_eta)) == 0 + + result_xi = genot.unbalancedness_handler.evaluate_xi( + target_lin, condition=source_condition + ) + assert isinstance(result_xi, jnp.ndarray) + assert jnp.sum(jnp.isnan(result_xi)) == 0 + + +class TestGENOTQuad: + @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - def test_genot_fused_unconditional( - self, genot_data_loader_fused: Iterator, k_samples_per_x: int, + def test_genot_quad_unconditional( + self, genot_data_loader_quad: Iterator, k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - batch = next(genot_data_loader_fused) - source_lin, source_quad, target_lin, target_quad, source_condition = batch[ - "source_lin"], batch["source_quad"], batch["target_lin"], batch[ - "target_quad"], batch["source_conditions"] + matcher_latent_to_data = base_solver.OTMatcherLinear( + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + ) + batch = next(genot_data_loader_quad) + source_quad, target_quad, source_condition = batch["source_quad"], batch[ + "target_quad"], batch["source_conditions"] - source_dim = source_lin.shape[1] + source_quad.shape[1] - target_dim = target_lin.shape[1] + target_quad.shape[1] + source_dim = source_quad.shape[1] + target_dim = target_quad.shape[1] condition_dim = 0 neural_vf = VelocityField( output_dim=target_dim, @@ -164,85 +252,89 @@ def test_genot_fused_unconditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + ot_matcher = base_solver.OTMatcherQuad( + ot_solver, cost_fn=costs.SqEuclidean() + ) + unbalancedness_handler = base_solver.UnbalancednessHandler( random.PRNGKey(0), source_dim, target_dim, condition_dim ) + time_sampler = functools.partial(uniform_sampler, offset=1e-2) optimizer = optax.adam(learning_rate=1e-3) - genot = GENOT( + genot = GENOTQuad( neural_vf, input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, - epsilon=None, iterations=3, valid_freq=2, - ot_solver=ot_solver, - cost_fn=costs.SqEuclidean(), - scale_cost=1.0, + ot_matcher=ot_matcher, unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, - fused_penalty=0.5, + time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, + matcher_latent_to_data=matcher_latent_to_data, ) - genot(genot_data_loader_fused, genot_data_loader_fused) + genot(genot_data_loader_quad, genot_data_loader_quad) result_forward = genot.transport( - jnp.concatenate((source_lin, source_quad), axis=1), - condition=source_condition, - forward=True + source_quad, condition=source_condition, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - def test_genot_linear_conditional( - self, genot_data_loader_linear_conditional: Iterator, - k_samples_per_x: int, solver_latent_to_data: Optional[str] + def test_genot_fused_unconditional( + self, genot_data_loader_fused: Iterator, k_samples_per_x: int, + solver_latent_to_data: Optional[str] ): - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - batch = next(genot_data_loader_linear_conditional) - source_lin, target_lin, source_condition = batch["source_lin"], batch[ - "target_lin"], batch["source_conditions"] - source_dim = source_lin.shape[1] - target_dim = target_lin.shape[1] - condition_dim = source_condition.shape[1] + matcher_latent_to_data = base_solver.OTMatcherLinear( + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + ) + batch = next(genot_data_loader_fused) + source_lin, source_quad, target_lin, target_quad, source_condition = batch[ + "source_lin"], batch["source_quad"], batch["target_lin"], batch[ + "target_quad"], batch["source_conditions"] + source_dim = source_lin.shape[1] + source_quad.shape[1] + target_dim = target_lin.shape[1] + target_quad.shape[1] + condition_dim = 0 neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) - ot_solver = sinkhorn.Sinkhorn() - time_sampler = uniform_sampler + ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + ot_matcher = base_solver.OTMatcherQuad( + ot_solver, cost_fn=costs.SqEuclidean(), fused_penalty=0.5 + ) + unbalancedness_handler = base_solver.UnbalancednessHandler( random.PRNGKey(0), source_dim, target_dim, condition_dim ) optimizer = optax.adam(learning_rate=1e-3) - genot = GENOT( + genot = GENOTQuad( neural_vf, input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, iterations=3, valid_freq=2, - ot_solver=ot_solver, - epsilon=0.1, - cost_fn=costs.SqEuclidean(), - scale_cost=1.0, + ot_matcher=ot_matcher, unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, - time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, + matcher_latent_to_data=matcher_latent_to_data, ) - genot( - genot_data_loader_linear_conditional, - genot_data_loader_linear_conditional - ) + genot(genot_data_loader_fused, genot_data_loader_fused) + result_forward = genot.transport( - source_lin, condition=source_condition, forward=True + jnp.concatenate((source_lin, source_quad), axis=1), + condition=source_condition, + forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -253,7 +345,9 @@ def test_genot_quad_conditional( self, genot_data_loader_quad_conditional: Iterator, k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + matcher_latent_to_data = base_solver.OTMatcherLinear( + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + ) batch = next(genot_data_loader_quad_conditional) source_quad, target_quad, source_condition = batch["source_quad"], batch[ "target_quad"], batch["source_conditions"] @@ -267,27 +361,28 @@ def test_genot_quad_conditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + ot_matcher = base_solver.OTMatcherQuad( + ot_solver, cost_fn=costs.SqEuclidean() + ) time_sampler = uniform_sampler unbalancedness_handler = base_solver.UnbalancednessHandler( random.PRNGKey(0), source_dim, target_dim, condition_dim ) optimizer = optax.adam(learning_rate=1e-3) - genot = GENOT( + genot = GENOTQuad( neural_vf, input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, iterations=3, valid_freq=2, - ot_solver=ot_solver, - epsilon=None, - cost_fn=costs.SqEuclidean(), - scale_cost=1.0, + ot_matcher=ot_matcher, unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, + matcher_latent_to_data=matcher_latent_to_data, ) genot( genot_data_loader_quad_conditional, genot_data_loader_quad_conditional @@ -305,7 +400,7 @@ def test_genot_fused_conditional( self, genot_data_loader_fused_conditional: Iterator, k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + matcher_latent_to_data = base_solver.OTMatcherLinear(solver_latent_to_data) batch = next(genot_data_loader_fused_conditional) source_lin, source_quad, target_lin, target_quad, source_condition = batch[ "source_lin"], batch["source_quad"], batch["target_lin"], batch[ @@ -319,27 +414,28 @@ def test_genot_fused_conditional( latent_embed_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + ot_matcher = base_solver.OTMatcherQuad( + ot_solver, cost_fn=costs.SqEuclidean(), fused_penalty=0.5 + ) time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) unbalancedness_handler = base_solver.UnbalancednessHandler( random.PRNGKey(0), source_dim, target_dim, condition_dim ) - genot = GENOT( + genot = GENOTQuad( neural_vf, input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, iterations=3, valid_freq=2, - ot_solver=ot_solver, - epsilon=None, - cost_fn=costs.SqEuclidean(), - scale_cost=1.0, + ot_matcher=ot_matcher, unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, + matcher_latent_to_data=matcher_latent_to_data, ) genot( genot_data_loader_fused_conditional, genot_data_loader_fused_conditional @@ -352,79 +448,3 @@ def test_genot_fused_conditional( ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 - - @pytest.mark.parametrize("conditional", [False, True]) - @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - def test_genot_linear_learn_rescaling( - self, conditional: bool, genot_data_loader_linear: Iterator, - solver_latent_to_data: Optional[str], - genot_data_loader_linear_conditional: Iterator - ): - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - data_loader = ( - genot_data_loader_linear_conditional - if conditional else genot_data_loader_linear - ) - - batch = next(data_loader) - source_lin, target_lin, source_condition = batch["source_lin"], batch[ - "target_lin"], batch["source_conditions"] - - source_dim = source_lin.shape[1] - target_dim = target_lin.shape[1] - condition_dim = source_condition.shape[1] if conditional else 0 - - neural_vf = VelocityField( - output_dim=target_dim, - condition_dim=source_dim + condition_dim, - latent_embed_dim=5, - ) - ot_solver = sinkhorn.Sinkhorn() - time_sampler = uniform_sampler - optimizer = optax.adam(learning_rate=1e-3) - - tau_a = 0.9 - tau_b = 0.2 - rescaling_a = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - rescaling_b = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), - source_dim, - target_dim, - condition_dim, - tau_a=tau_a, - tau_b=tau_b, - rescaling_a=rescaling_a, - rescaling_b=rescaling_b - ) - - genot = GENOT( - neural_vf, - input_dim=source_dim, - output_dim=target_dim, - cond_dim=condition_dim, - iterations=3, - valid_freq=2, - ot_solver=ot_solver, - epsilon=0.1, - cost_fn=costs.SqEuclidean(), - scale_cost=1.0, - optimizer=optimizer, - time_sampler=time_sampler, - unbalancedness_handler=unbalancedness_handler, - ) - - genot(data_loader, data_loader) - - result_eta = genot.unbalancedness_handler.evaluate_eta( - source_lin, condition=source_condition - ) - assert isinstance(result_eta, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_eta)) == 0 - - result_xi = genot.unbalancedness_handler.evaluate_xi( - target_lin, condition=source_condition - ) - assert isinstance(result_xi, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_xi)) == 0 diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index c3675b820..9452c2faa 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -46,7 +46,7 @@ def test_flow_matching( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - ot_matcher = base_solver.OTMatcher(ot_solver) + ot_matcher = base_solver.OTMatcherLinear(ot_solver) time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) unbalancedness_handler = base_solver.UnbalancednessHandler( @@ -99,7 +99,7 @@ def test_flow_matching_with_conditions( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - ot_matcher = base_solver.OTMatcher(ot_solver) + ot_matcher = base_solver.OTMatcherLinear(ot_solver) time_sampler = functools.partial(samplers.uniform_sampler, offset=1e-5) optimizer = optax.adam(learning_rate=1e-3) unbalancedness_handler = base_solver.UnbalancednessHandler( @@ -156,7 +156,7 @@ def test_flow_matching_conditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() - ot_matcher = base_solver.OTMatcher(ot_solver) + ot_matcher = base_solver.OTMatcherLinear(ot_solver) time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) unbalancedness_handler = base_solver.UnbalancednessHandler( @@ -218,7 +218,7 @@ def test_flow_matching_learn_rescaling( tau_b = 0.2 rescaling_a = nets.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) rescaling_b = nets.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - ot_matcher = base_solver.OTMatcher( + ot_matcher = base_solver.OTMatcherLinear( ot_solver, tau_a=tau_a, tau_b=tau_b, From 20fbbb86633c39dce2d6647a894afa778745d57c Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 11 Feb 2024 13:06:09 +0100 Subject: [PATCH 086/186] remove dictionaries in OTFM and GENOT classes --- src/ott/neural/flows/genot.py | 190 +++++++++++++++------------ src/ott/neural/flows/otfm.py | 50 ++++--- src/ott/neural/models/base_solver.py | 2 +- tests/neural/genot_test.py | 3 + 4 files changed, 133 insertions(+), 112 deletions(-) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index bd825ad40..7a2989b0e 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -13,7 +13,7 @@ # limitations under the License. import functools import types -from typing import Any, Callable, Dict, Optional, Type, Union +from typing import Any, Callable, Dict, Optional, Tuple, Type, Union import jax import jax.numpy as jnp @@ -30,7 +30,13 @@ class GENOTBase: - """The GENOT training class as introduced in :cite:`klein_uscidda:23`. + """Base class for GENOT models (:cite:`klein_uscidda:23`). + + GENOT (Generative Entropic Neural Optimal Transport) is a neural solver + for entropic OT prooblems, in the linear + (:class:`ott.neural.flows.genot.GENOTLin`), the Gromov-Wasserstein, and + the Fused Gromov-Wasserstein ((:class:`ott.neural.flows.genot.GENOTQUad`)) + setting. Args: velocity_field: Neural vector field parameterized by a neural network. @@ -148,34 +154,35 @@ def _get_step_fn(self) -> Callable: def step_fn( rng: jax.Array, state_velocity_field: train_state.TrainState, - batch: Dict[str, jnp.array], + time: jnp.ndarray, + source: jnp.ndarray, + target: jnp.ndarray, + latent: jnp.ndarray, + source_conditions: Optional[jnp.ndarray], ): def loss_fn( - params: jnp.ndarray, batch: Dict[str, jnp.array], - rng: jax.random.PRNGKeyArray + params: jnp.ndarray, time: jnp.ndarray, source: jnp.ndarray, + target: jnp.ndarray, latent: jnp.ndarray, + source_conditions: Optional[jnp.ndarray], rng: jax.random.PRNGKeyArray ): - x_t = self.flow.compute_xt( - rng, batch["time"], batch["latent"], batch["target"] - ) + x_t = self.flow.compute_xt(rng, time, latent, target) apply_fn = functools.partial( state_velocity_field.apply_fn, {"params": params} ) cond_input = jnp.concatenate([ - batch[el] - for el in ["source", "source_conditions"] - if batch[el] is not None - ], - axis=1) - v_t = jax.vmap(apply_fn)(t=batch["time"], x=x_t, condition=cond_input) - u_t = self.flow.compute_ut( - batch["time"], batch["latent"], batch["target"] - ) + source, source_conditions + ], axis=1) if source_conditions is not None else source + v_t = jax.vmap(apply_fn)(t=time, x=x_t, condition=cond_input) + u_t = self.flow.compute_ut(time, latent, target) return jnp.mean((v_t - u_t) ** 2) grad_fn = jax.value_and_grad(loss_fn, has_aux=False) - loss, grads = grad_fn(state_velocity_field.params, batch, rng) + loss, grads = grad_fn( + state_velocity_field.params, time, source, target, latent, + source_conditions, rng + ) return state_velocity_field.apply_gradients(grads=grads), loss @@ -255,8 +262,20 @@ def learn_rescaling(self) -> bool: self.unbalancedness_handler.rescaling_b is not None ) + def _reshape_samples(self, arrays: Tuple[jnp.ndarray, ...], + batch_size: int) -> Tuple[jnp.ndarray, ...]: + return tuple( + jnp.reshape(arr, (batch_size * self.k_samples_per_x, + -1)) if arr is not None else None for arr in arrays + ) + class GENOTLin(GENOTBase): + """Implementation of GENOT-L (:cite:`klein:23`). + + GENOT-L (Generative Entropic Neural Optimal Transport, linear) solves the + entropic (linear) OT problem. + """ def __call__(self, train_loader, valid_loader): """Train GENOT. @@ -273,63 +292,61 @@ def __call__(self, train_loader, valid_loader): self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn ) = jax.random.split(self.rng, 6) + source, source_conditions, target = batch["source_lin"], batch[ + "source_conditions"], batch["target_lin"] + batch_size = len(batch["source_lin"]) n_samples = batch_size * self.k_samples_per_x - batch["time"] = self.time_sampler(rng_time, n_samples) - batch["latent"] = self.latent_noise_fn( + time = self.time_sampler(rng_time, n_samples) + latent = self.latent_noise_fn( rng_noise, shape=(self.k_samples_per_x, batch_size) ) tmat = self.ot_matcher.match_fn( - batch["source_lin"], - batch["target_lin"], + source, + target, ) - batch["source"] = batch["source_lin"] - batch["target"] = batch["target_lin"] - - (batch["source"], batch["source_conditions"]), ( - batch["target"], - ) = self.ot_matcher._sample_conditional_indices_from_tmap( - rng_resample, - tmat, - self.k_samples_per_x, (batch["source"], batch["source_conditions"]), - (batch["target"],), + (source, source_conditions + ), (target,) = self.ot_matcher._sample_conditional_indices_from_tmap( + rng=rng_resample, + tmat=tmat, + k_samples_per_x=self.k_samples_per_x, + source_arrays=(source, source_conditions), + target_arrays=(target,), source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) ) if self.matcher_latent_to_data.match_fn is not None: tmats_latent_data = jnp.array( jax.vmap(self.matcher_latent_to_data.match_fn, 0, - 0)(x=batch["latent"], y=batch["target"]) + 0)(x=latent, y=target) ) rng_latent_data_match = jax.random.split( rng_latent_data_match, self.k_samples_per_x ) - (batch["source"], batch["source_conditions"] - ), (batch["target"],) = jax.vmap(self.ot_matcher._resample_data, 0, 0)( + (source, source_conditions + ), (target,) = jax.vmap(self.ot_matcher._resample_data, 0, 0)( rng_latent_data_match, tmats_latent_data, - (batch["source"], batch["source_conditions"]), (batch["target"],) + (source, source_conditions), (target,) ) - batch = { - key: - jnp.reshape(arr, (batch_size * self.k_samples_per_x, - -1)) if arr is not None else None - for key, arr in batch.items() - } + source, source_conditions, target, latent = self._reshape_samples( + (source, source_conditions, target, latent), batch_size + ) self.state_velocity_field, loss = self.step_fn( - rng_step_fn, self.state_velocity_field, batch + rng_step_fn, self.state_velocity_field, time, source, target, latent, + source_conditions ) if self.learn_rescaling: ( self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b ) = self.unbalancedness_handler.step_fn( - source=batch["source"], - target=batch["target"], - condition=batch["source_conditions"], + source=source, + target=target, + condition=source_conditions, a=tmat.sum(axis=1), b=tmat.sum(axis=0), state_eta=self.unbalancedness_handler.state_eta, @@ -340,6 +357,13 @@ def __call__(self, train_loader, valid_loader): class GENOTQuad(GENOTBase): + """Implementation of GENOT-Q and GENOT-F (:cite:`klein:23`). + + GENOT-Q (Generative Entropic Neural Optimal Transport, quadratic) and + GENOT-F (Generative Entropic Neural Optimal Transport, fused) solve the + entropic Gromov-Wasserstein and the entropic Fused Gromov-Wasserstein problem, + respectively. + """ def __call__(self, train_loader, valid_loader): """Train GENOT. @@ -356,73 +380,71 @@ def __call__(self, train_loader, valid_loader): self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn ) = jax.random.split(self.rng, 6) - batch_size = len( - batch["source_lin"] - ) if batch["source_lin"] is not None else len(batch["source_quad"]) + (source_lin, source_quad, source_conditions, target_lin, target_quad) = ( + batch["source_lin"], batch["source_quad"], batch["source_conditions"], + batch["target_lin"], batch["target_quad"] + ) + batch_size = len(source_quad) n_samples = batch_size * self.k_samples_per_x - batch["time"] = self.time_sampler(rng_time, n_samples) - batch["latent"] = self.latent_noise_fn( + time = self.time_sampler(rng_time, n_samples) + latent = self.latent_noise_fn( rng_noise, shape=(self.k_samples_per_x, batch_size) ) tmat = self.ot_matcher.match_fn( - batch["source_lin"], batch["source_quad"], batch["target_lin"], - batch["target_quad"] + source_lin, source_quad, target_lin, target_quad ) if self.ot_matcher.fused_penalty > 0.0: - batch["source"] = jnp.concatenate( - (batch["source_lin"], batch["source_quad"]), axis=1 - ) - batch["target"] = jnp.concatenate( - (batch["target_lin"], batch["target_quad"]), axis=1 - ) + source = jnp.concatenate((source_lin, source_quad), axis=1) + target = jnp.concatenate((target_lin, target_quad), axis=1) else: - batch["source"] = batch["source_quad"] - batch["target"] = batch["target_quad"] - - (batch["source"], batch["source_conditions"]), ( - batch["target"], - ) = self.ot_matcher._sample_conditional_indices_from_tmap( - rng_resample, - tmat, - self.k_samples_per_x, (batch["source"], batch["source_conditions"]), - (batch["target"],), - source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) + source = source_quad + target = target_quad + + (source, source_conditions), (target,) = ( + self.ot_matcher._sample_conditional_indices_from_tmap( + rng=rng_resample, + tmat=tmat, + k_samples_per_x=self.k_samples_per_x, + source_arrays=(source, source_conditions), + target_arrays=(target,), + source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) + ) ) if self.matcher_latent_to_data.match_fn is not None: tmats_latent_data = jnp.array( jax.vmap(self.matcher_latent_to_data.match_fn, 0, - 0)(x=batch["latent"], y=batch["target"]) + 0)(x=latent, y=target) ) rng_latent_data_match = jax.random.split( rng_latent_data_match, self.k_samples_per_x ) - (batch["source"], batch["source_conditions"] - ), (batch["target"],) = jax.vmap(self.ot_matcher._resample_data, 0, 0)( + + (source, source_conditions + ), (target,) = jax.vmap(self.ot_matcher._resample_data, 0, 0)( rng_latent_data_match, tmats_latent_data, - (batch["source"], batch["source_conditions"]), (batch["target"],) + (source, source_conditions), (target,) ) - batch = { - key: - jnp.reshape(arr, (batch_size * self.k_samples_per_x, - -1)) if arr is not None else None - for key, arr in batch.items() - } + + source, source_conditions, target, latent = self._reshape_samples( + (source, source_conditions, target, latent), batch_size + ) self.state_velocity_field, loss = self.step_fn( - rng_step_fn, self.state_velocity_field, batch + rng_step_fn, self.state_velocity_field, time, source, target, latent, + source_conditions ) if self.learn_rescaling: ( self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, loss_b ) = self.unbalancedness_handler.step_fn( - source=batch["source"], - target=batch["target"], - condition=batch["source_conditions"], + source=source, + target=target, + condition=source_conditions, a=tmat.sum(axis=1), b=tmat.sum(axis=0), state_eta=self.unbalancedness_handler.state_eta, diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index 84fcd5e96..0027bf345 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -21,7 +21,6 @@ import diffrax import optax from flax.training import train_state -from orbax import checkpoint from ott import utils from ott.geometry import costs @@ -46,7 +45,6 @@ class OTFlowMatching: flow: Flow between source and target distribution. time_sampler: Sampler for the time. optimizer: Optimizer for `velocity_field`. - checkpoint_manager: Checkpoint manager. callback_fn: Callback function. num_eval_samples: Number of samples to evaluate on during evaluation. rng: Random number generator. @@ -65,7 +63,6 @@ def __init__( optimizer: optax.GradientTransformation, ot_matcher: base_solver.OTMatcherLinear, unbalancedness_handler: base_solver.UnbalancednessHandler, - checkpoint_manager: Type[checkpoint.CheckpointManager] = None, epsilon: float = 1e-2, cost_fn: Optional[Type[costs.CostFn]] = None, scale_cost: Union[bool, int, float, @@ -92,7 +89,6 @@ def __init__( self.cost_fn = cost_fn self.scale_cost = scale_cost self.callback_fn = callback_fn - self.checkpoint_manager = checkpoint_manager self.rng = rng self.logging_freq = logging_freq self.num_eval_samples = num_eval_samples @@ -116,30 +112,33 @@ def _get_step_fn(self) -> Callable: def step_fn( rng: jax.Array, state_velocity_field: train_state.TrainState, - batch: Dict[str, jnp.ndarray], + source: jnp.ndarray, + target: jnp.ndarray, + source_conditions: Optional[jnp.ndarray], ) -> Tuple[Any, Any]: def loss_fn( - params: jnp.ndarray, t: jnp.ndarray, batch: Dict[str, jnp.ndarray], + params: jnp.ndarray, t: jnp.ndarray, source: jnp.ndarray, + target: jnp.ndarray, source_conditions: Optional[jnp.ndarray], rng: jax.Array ) -> jnp.ndarray: - x_t = self.flow.compute_xt( - rng, t, batch["source_lin"], batch["target_lin"] - ) + x_t = self.flow.compute_xt(rng, t, source, target) apply_fn = functools.partial( state_velocity_field.apply_fn, {"params": params} ) - v_t = jax.vmap(apply_fn - )(t=t, x=x_t, condition=batch["source_conditions"]) - u_t = self.flow.compute_ut(t, batch["source_lin"], batch["target_lin"]) + v_t = jax.vmap(apply_fn)(t=t, x=x_t, condition=source_conditions) + u_t = self.flow.compute_ut(t, source, target) return jnp.mean((v_t - u_t) ** 2) - batch_size = len(batch["source_lin"]) + batch_size = len(source) key_t, key_model = jax.random.split(rng, 2) t = self.time_sampler(key_t, batch_size) grad_fn = jax.value_and_grad(loss_fn) - loss, grads = grad_fn(state_velocity_field.params, t, batch, key_model) + loss, grads = grad_fn( + state_velocity_field.params, t, source, target, source_conditions, + key_model + ) return state_velocity_field.apply_gradients(grads=grads), loss return step_fn @@ -157,19 +156,16 @@ def __call__(self, train_loader, valid_loader): for iter in range(self.iterations): rng_resample, rng_step_fn, self.rng = jax.random.split(self.rng, 3) batch = next(train_loader) + source, source_conditions, target = batch["source_lin"], batch[ + "source_conditions"], batch["target_lin"] if self.ot_matcher is not None: - tmat = self.ot_matcher.match_fn( - batch["source_lin"], batch["target_lin"] + tmat = self.ot_matcher.match_fn(source, target) + (source, source_conditions), (target,) = self.ot_matcher._resample_data( + rng_resample, tmat, (source, source_conditions), (target,) ) - (batch["source_lin"], batch["source_conditions"] - ), (batch["target_lin"], - batch["target_conditions"]) = self.ot_matcher._resample_data( - rng_resample, tmat, - (batch["source_lin"], batch["source_conditions"]), - (batch["target_lin"], batch["target_conditions"]) - ) self.state_velocity_field, loss = self.step_fn( - rng_step_fn, self.state_velocity_field, batch + rng_step_fn, self.state_velocity_field, source, target, + source_conditions ) curr_loss += loss if iter % self.logging_freq == 0: @@ -181,9 +177,9 @@ def __call__(self, train_loader, valid_loader): self.unbalancedness_handler.state_xi, eta_predictions, xi_predictions, loss_a, loss_b ) = self.unbalancedness_handler.step_fn( - source=batch["source_lin"], - target=batch["target_lin"], - condition=batch["source_conditions"], + source=source, + target=target, + condition=source_conditions, a=tmat.sum(axis=1), b=tmat.sum(axis=0), state_eta=self.unbalancedness_handler.state_eta, diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index a4238cd05..a55370403 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -159,10 +159,10 @@ def _sample_conditional_indices_from_tmap( self, rng: jax.Array, tmat: jnp.ndarray, + *, k_samples_per_x: Union[int, jnp.ndarray], source_arrays: Tuple[jnp.ndarray, ...], target_arrays: Tuple[jnp.ndarray, ...], - *, source_is_balanced: bool, ) -> Tuple[jnp.array, jnp.array]: batch_size = tmat.shape[0] diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 7ab2a957c..9880f3f2b 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -400,6 +400,9 @@ def test_genot_fused_conditional( self, genot_data_loader_fused_conditional: Iterator, k_samples_per_x: int, solver_latent_to_data: Optional[str] ): + solver_latent_to_data = ( + None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + ) matcher_latent_to_data = base_solver.OTMatcherLinear(solver_latent_to_data) batch = next(genot_data_loader_fused_conditional) source_lin, source_quad, target_lin, target_quad, source_condition = batch[ From 525ef64a14b550a0e39ad2640f3f7916dfffb46b Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 11 Feb 2024 13:13:24 +0100 Subject: [PATCH 087/186] change logic in match_latent_to_data in genot --- src/ott/neural/flows/genot.py | 8 +++---- src/ott/neural/flows/otfm.py | 2 +- tests/neural/genot_test.py | 40 +++++++++++++++++++++++------------ 3 files changed, 32 insertions(+), 18 deletions(-) diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 7a2989b0e..267818cc3 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -34,8 +34,8 @@ class GENOTBase: GENOT (Generative Entropic Neural Optimal Transport) is a neural solver for entropic OT prooblems, in the linear - (:class:`ott.neural.flows.genot.GENOTLin`), the Gromov-Wasserstein, and - the Fused Gromov-Wasserstein ((:class:`ott.neural.flows.genot.GENOTQUad`)) + (:class:`ott.neural.flows.genot.GENOTLin`), the Gromov-Wasserstein, and + the Fused Gromov-Wasserstein ((:class:`ott.neural.flows.genot.GENOTQUad`)) setting. Args: @@ -317,7 +317,7 @@ def __call__(self, train_loader, valid_loader): source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) ) - if self.matcher_latent_to_data.match_fn is not None: + if self.matcher_latent_to_data is not None: tmats_latent_data = jnp.array( jax.vmap(self.matcher_latent_to_data.match_fn, 0, 0)(x=latent, y=target) @@ -413,7 +413,7 @@ def __call__(self, train_loader, valid_loader): ) ) - if self.matcher_latent_to_data.match_fn is not None: + if self.matcher_latent_to_data is not None: tmats_latent_data = jnp.array( jax.vmap(self.matcher_latent_to_data.match_fn, 0, 0)(x=latent, y=target) diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index 0027bf345..0e8f616f9 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -61,7 +61,7 @@ def __init__( flow: Type[flows.BaseFlow], time_sampler: Callable[[jax.Array, int], jnp.ndarray], optimizer: optax.GradientTransformation, - ot_matcher: base_solver.OTMatcherLinear, + ot_matcher: Optional[base_solver.OTMatcherLinear], unbalancedness_handler: base_solver.UnbalancednessHandler, epsilon: float = 1e-2, cost_fn: Optional[Type[costs.CostFn]] = None, diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 9880f3f2b..00e0b2d46 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -41,8 +41,9 @@ def test_genot_linear_unconditional( scale_cost: Union[float, Literal["mean"]], k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - matcher_latent_to_data = base_solver.OTMatcherLinear( - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + matcher_latent_to_data = ( + None if solver_latent_to_data is None else + base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) batch = next(genot_data_loader_linear) source_lin, target_lin, source_condition = batch["source_lin"], batch[ @@ -98,9 +99,11 @@ def test_genot_linear_conditional( self, genot_data_loader_linear_conditional: Iterator, k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - matcher_latent_to_data = base_solver.OTMatcherLinear( - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + matcher_latent_to_data = ( + None if solver_latent_to_data is None else + base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) + batch = next(genot_data_loader_linear_conditional) source_lin, target_lin, source_condition = batch["source_lin"], batch[ "target_lin"], batch["source_conditions"] @@ -154,9 +157,11 @@ def test_genot_linear_learn_rescaling( solver_latent_to_data: Optional[str], genot_data_loader_linear_conditional: Iterator ): - matcher_latent_to_data = base_solver.OTMatcherLinear( - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + matcher_latent_to_data = ( + None if solver_latent_to_data is None else + base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) + data_loader = ( genot_data_loader_linear_conditional if conditional else genot_data_loader_linear @@ -236,9 +241,11 @@ def test_genot_quad_unconditional( self, genot_data_loader_quad: Iterator, k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - matcher_latent_to_data = base_solver.OTMatcherLinear( - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + matcher_latent_to_data = ( + None if solver_latent_to_data is None else + base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) + batch = next(genot_data_loader_quad) source_quad, target_quad, source_condition = batch["source_quad"], batch[ "target_quad"], batch["source_conditions"] @@ -290,9 +297,11 @@ def test_genot_fused_unconditional( self, genot_data_loader_fused: Iterator, k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - matcher_latent_to_data = base_solver.OTMatcherLinear( - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + matcher_latent_to_data = ( + None if solver_latent_to_data is None else + base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) + batch = next(genot_data_loader_fused) source_lin, source_quad, target_lin, target_quad, source_condition = batch[ "source_lin"], batch["source_quad"], batch["target_lin"], batch[ @@ -345,9 +354,11 @@ def test_genot_quad_conditional( self, genot_data_loader_quad_conditional: Iterator, k_samples_per_x: int, solver_latent_to_data: Optional[str] ): - matcher_latent_to_data = base_solver.OTMatcherLinear( - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() + matcher_latent_to_data = ( + None if solver_latent_to_data is None else + base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) + batch = next(genot_data_loader_quad_conditional) source_quad, target_quad, source_condition = batch["source_quad"], batch[ "target_quad"], batch["source_conditions"] @@ -403,7 +414,10 @@ def test_genot_fused_conditional( solver_latent_to_data = ( None if solver_latent_to_data is None else sinkhorn.Sinkhorn() ) - matcher_latent_to_data = base_solver.OTMatcherLinear(solver_latent_to_data) + matcher_latent_to_data = ( + None if solver_latent_to_data is None else + base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) + ) batch = next(genot_data_loader_fused_conditional) source_lin, source_quad, target_lin, target_quad, source_condition = batch[ "source_lin"], batch["source_quad"], batch["target_lin"], batch[ From 1b30c115d27c51b7cf5ad66ab98e8dfee82892ca Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 11 Feb 2024 16:05:06 +0100 Subject: [PATCH 088/186] change data loaders / data sets --- src/ott/neural/data/dataloaders.py | 72 ++++++-------- src/ott/neural/flows/genot.py | 148 ++++++++++++++++------------- src/ott/neural/flows/otfm.py | 81 ++++++++-------- tests/neural/conftest.py | 46 +++++---- tests/neural/genot_test.py | 95 ++++++++++-------- tests/neural/otfm_test.py | 57 +++++++---- 6 files changed, 271 insertions(+), 228 deletions(-) diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index 68da7de6e..e063deefd 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -11,39 +11,34 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, Iterator, Mapping, Optional +from typing import Any, List, Mapping, Optional import numpy as np -__all__ = ["OTDataLoader", "ConditionalDataLoader"] +__all__ = ["OTDataSet", "ConditionalOTDataLoader"] -class OTDataLoader: - """Data loader for OT problems. +class OTDataSet: + """Data set for OT problems. Args: - batch_size: Number of samples per batch. source_lin: Linear part of the source measure. source_quad: Quadratic part of the source measure. target_lin: Linear part of the target measure. target_quad: Quadratic part of the target measure. source_conditions: Conditions of the source measure. target_conditions: Conditions of the target measure. - seed: Random seed. """ def __init__( self, - batch_size: int = 64, source_lin: Optional[np.ndarray] = None, source_quad: Optional[np.ndarray] = None, target_lin: Optional[np.ndarray] = None, target_quad: Optional[np.ndarray] = None, source_conditions: Optional[np.ndarray] = None, target_conditions: Optional[np.ndarray] = None, - seed: int = 0, ): - super().__init__() if source_lin is not None: if source_quad is not None: assert len(source_lin) == len(source_quad) @@ -71,60 +66,53 @@ def __init__( self.target_quad = target_quad self.source_conditions = source_conditions self.target_conditions = target_conditions - self.batch_size = batch_size - self.rng = np.random.default_rng(seed=seed) - def __next__(self) -> Mapping[str, np.ndarray]: - inds_source = self.rng.choice(self.n_source, size=[self.batch_size]) - inds_target = self.rng.choice(self.n_target, size=[self.batch_size]) + def __getitem__(self, idx: np.ndarray) -> Mapping[str, np.ndarray]: return { "source_lin": - self.source_lin[inds_source, :] - if self.source_lin is not None else None, + self.source_lin[idx] if self.source_lin is not None else [], "source_quad": - self.source_quad[inds_source, :] - if self.source_quad is not None else None, + self.source_quad[idx] if self.source_quad is not None else [], "target_lin": - self.target_lin[inds_target, :] - if self.target_lin is not None else None, + self.target_lin[idx] if self.target_lin is not None else [], "target_quad": - self.target_quad[inds_target, :] - if self.target_quad is not None else None, + self.target_quad[idx] if self.target_quad is not None else [], "source_conditions": - self.source_conditions[inds_source, :] - if self.source_conditions is not None else None, + self.source_conditions[idx] + if self.source_conditions is not None else [], "target_conditions": - self.target_conditions[inds_target, :] - if self.target_conditions is not None else None, + self.target_conditions[idx] + if self.target_conditions is not None else [], } + def __len__(self): + return len(self.source_lin + ) if self.source_lin is not None else len(self.source_quad) -class ConditionalDataLoader: + +class ConditionalOTDataLoader: """Data loader for OT problems with conditions. - This data loader wraps several data loaders and samples from them according - to their conditions. + This data loader wraps several data loaders and samples from them. Args: - dataloaders: Dictionary of data loaders with keys corresponding to - conditions. - p: Probability of sampling from each data loader. + dataloaders: List of data loaders. seed: Random seed. """ def __init__( - self, dataloaders: Dict[str, Iterator], p: np.ndarray, seed: int = 0 + self, + dataloaders: List[Any], + seed: int = 0 # dataloader should subclass torch dataloader ): super().__init__() self.dataloaders = dataloaders - self.conditions = list(dataloaders.keys()) - self.p = p + self.conditions = list(dataloaders) self.rng = np.random.default_rng(seed=seed) - def __next__(self, cond: str = None) -> Mapping[str, np.ndarray]: - if cond is not None: - if cond not in self.conditions: - raise ValueError(f"Condition {cond} not in {self.conditions}") - return next(self.dataloaders[cond]) - idx = self.rng.choice(len(self.conditions), p=self.p) - return next(self.dataloaders[self.conditions[idx]]) + def __next__(self) -> Mapping[str, np.ndarray]: + idx = self.rng.choice(len(self.conditions)) + return next(iter(self.dataloaders[idx])) + + def __iter__(self) -> "ConditionalOTDataLoader": + return self diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flows/genot.py index 267818cc3..0ab1b4a87 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flows/genot.py @@ -251,8 +251,7 @@ def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: return jax.vmap(solve_ode)(latent_batch, cond_input) def _valid_step(self, valid_loader, iter): - """TODO.""" - next(valid_loader) + pass @property def learn_rescaling(self) -> bool: @@ -284,76 +283,85 @@ def __call__(self, train_loader, valid_loader): train_loader: Data loader for the training data. valid_loader: Data loader for the validation data. """ - batch: Dict[str, jnp.array] = {} - for iteration in range(self.iterations): - batch = next(train_loader) - - ( - self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, - rng_step_fn - ) = jax.random.split(self.rng, 6) - source, source_conditions, target = batch["source_lin"], batch[ - "source_conditions"], batch["target_lin"] - - batch_size = len(batch["source_lin"]) - n_samples = batch_size * self.k_samples_per_x - time = self.time_sampler(rng_time, n_samples) - latent = self.latent_noise_fn( - rng_noise, shape=(self.k_samples_per_x, batch_size) - ) - - tmat = self.ot_matcher.match_fn( - source, - target, - ) - - (source, source_conditions - ), (target,) = self.ot_matcher._sample_conditional_indices_from_tmap( - rng=rng_resample, - tmat=tmat, - k_samples_per_x=self.k_samples_per_x, - source_arrays=(source, source_conditions), - target_arrays=(target,), - source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) - ) - - if self.matcher_latent_to_data is not None: - tmats_latent_data = jnp.array( - jax.vmap(self.matcher_latent_to_data.match_fn, 0, - 0)(x=latent, y=target) + iter = -1 + while True: + for batch in train_loader: + iter += 1 + if iter >= self.iterations: + stop = True + break + ( + self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, + rng_step_fn + ) = jax.random.split(self.rng, 6) + source, source_conditions, target = jnp.array( + batch["source_lin"] + ), jnp.array(batch["source_conditions"] + ) if len(batch["source_conditions"]) else None, jnp.array( + batch["target_lin"] + ) + + batch_size = len(source) + n_samples = batch_size * self.k_samples_per_x + time = self.time_sampler(rng_time, n_samples) + latent = self.latent_noise_fn( + rng_noise, shape=(self.k_samples_per_x, batch_size) ) - rng_latent_data_match = jax.random.split( - rng_latent_data_match, self.k_samples_per_x + tmat = self.ot_matcher.match_fn( + source, + target, ) + (source, source_conditions - ), (target,) = jax.vmap(self.ot_matcher._resample_data, 0, 0)( - rng_latent_data_match, tmats_latent_data, - (source, source_conditions), (target,) + ), (target,) = self.ot_matcher._sample_conditional_indices_from_tmap( + rng=rng_resample, + tmat=tmat, + k_samples_per_x=self.k_samples_per_x, + source_arrays=(source, source_conditions), + target_arrays=(target,), + source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) ) - source, source_conditions, target, latent = self._reshape_samples( - (source, source_conditions, target, latent), batch_size - ) - self.state_velocity_field, loss = self.step_fn( - rng_step_fn, self.state_velocity_field, time, source, target, latent, - source_conditions - ) - if self.learn_rescaling: - ( - self.state_eta, self.state_xi, eta_predictions, xi_predictions, - loss_a, loss_b - ) = self.unbalancedness_handler.step_fn( - source=source, - target=target, - condition=source_conditions, - a=tmat.sum(axis=1), - b=tmat.sum(axis=0), - state_eta=self.unbalancedness_handler.state_eta, - state_xi=self.unbalancedness_handler.state_xi, + if self.matcher_latent_to_data is not None: + tmats_latent_data = jnp.array( + jax.vmap(self.matcher_latent_to_data.match_fn, 0, + 0)(x=latent, y=target) + ) + + rng_latent_data_match = jax.random.split( + rng_latent_data_match, self.k_samples_per_x + ) + (source, source_conditions + ), (target,) = jax.vmap(self.ot_matcher._resample_data, 0, 0)( + rng_latent_data_match, tmats_latent_data, + (source, source_conditions), (target,) + ) + + source, source_conditions, target, latent = self._reshape_samples( + (source, source_conditions, target, latent), batch_size ) - if iteration % self.valid_freq == 0: - self._valid_step(valid_loader, iteration) + self.state_velocity_field, loss = self.step_fn( + rng_step_fn, self.state_velocity_field, time, source, target, + latent, source_conditions + ) + if self.learn_rescaling: + ( + self.state_eta, self.state_xi, eta_predictions, xi_predictions, + loss_a, loss_b + ) = self.unbalancedness_handler.step_fn( + source=source, + target=target, + condition=source_conditions, + a=tmat.sum(axis=1), + b=tmat.sum(axis=0), + state_eta=self.unbalancedness_handler.state_eta, + state_xi=self.unbalancedness_handler.state_xi, + ) + if iter % self.valid_freq == 0: + self._valid_step(valid_loader, iter) + if stop: + break class GENOTQuad(GENOTBase): @@ -374,15 +382,19 @@ def __call__(self, train_loader, valid_loader): """ batch: Dict[str, jnp.array] = {} for iteration in range(self.iterations): - batch = next(train_loader) + batch = next(iter(train_loader)) ( self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, rng_step_fn ) = jax.random.split(self.rng, 6) (source_lin, source_quad, source_conditions, target_lin, target_quad) = ( - batch["source_lin"], batch["source_quad"], batch["source_conditions"], - batch["target_lin"], batch["target_quad"] + jnp.array(batch["source_lin"]) if len(batch["source_lin"]) else None, + jnp.array(batch["source_quad"]), + jnp.array(batch["source_conditions"]) + if len(batch["source_conditions"]) else None, + jnp.array(batch["target_lin"]) if len(batch["target_lin"]) else None, + jnp.array(batch["target_quad"]) ) batch_size = len(source_quad) n_samples = batch_size * self.k_samples_per_x diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index 0e8f616f9..69bad0e69 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -151,42 +151,50 @@ def __call__(self, train_loader, valid_loader): valid_loader: Dataloader for the validation data. """ batch: Mapping[str, jnp.ndarray] = {} - curr_loss = 0.0 - - for iter in range(self.iterations): - rng_resample, rng_step_fn, self.rng = jax.random.split(self.rng, 3) - batch = next(train_loader) - source, source_conditions, target = batch["source_lin"], batch[ - "source_conditions"], batch["target_lin"] - if self.ot_matcher is not None: - tmat = self.ot_matcher.match_fn(source, target) - (source, source_conditions), (target,) = self.ot_matcher._resample_data( - rng_resample, tmat, (source, source_conditions), (target,) - ) - self.state_velocity_field, loss = self.step_fn( - rng_step_fn, self.state_velocity_field, source, target, - source_conditions - ) - curr_loss += loss - if iter % self.logging_freq == 0: - self._training_logs["loss"].append(curr_loss / self.logging_freq) - curr_loss = 0.0 - if self.learn_rescaling: - ( - self.unbalancedness_handler.state_eta, - self.unbalancedness_handler.state_xi, eta_predictions, - xi_predictions, loss_a, loss_b - ) = self.unbalancedness_handler.step_fn( - source=source, - target=target, - condition=source_conditions, - a=tmat.sum(axis=1), - b=tmat.sum(axis=0), - state_eta=self.unbalancedness_handler.state_eta, - state_xi=self.unbalancedness_handler.state_xi, + + iter = -1 + while True: + for batch in train_loader: + iter += 1 + if iter >= self.iterations: + stop = True + break + rng_resample, rng_step_fn, self.rng = jax.random.split(self.rng, 3) + source, source_conditions, target = jnp.array( + batch["source_lin"] + ), jnp.array(batch["source_conditions"] + ) if batch["source_conditions"] else None, jnp.array( + batch["target_lin"] + ) + if self.ot_matcher is not None: + tmat = self.ot_matcher.match_fn(source, target) + (source, + source_conditions), (target,) = self.ot_matcher._resample_data( + rng_resample, tmat, (source, source_conditions), (target,) + ) + self.state_velocity_field, loss = self.step_fn( + rng_step_fn, self.state_velocity_field, source, target, + source_conditions ) - if iter % self.valid_freq == 0: - self._valid_step(valid_loader, iter) + self._training_logs["loss"].append(loss) + if self.learn_rescaling: + ( + self.unbalancedness_handler.state_eta, + self.unbalancedness_handler.state_xi, eta_predictions, + xi_predictions, loss_a, loss_b + ) = self.unbalancedness_handler.step_fn( + source=source, + target=target, + condition=source_conditions, + a=tmat.sum(axis=1), + b=tmat.sum(axis=0), + state_eta=self.unbalancedness_handler.state_eta, + state_xi=self.unbalancedness_handler.state_xi, + ) + if iter % self.valid_freq == 0: + self._valid_step(valid_loader, iter) + if stop: + break def transport( self, @@ -243,8 +251,7 @@ def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: return jax.vmap(solve_ode)(data, condition) def _valid_step(self, valid_loader, iter): - next(valid_loader) - # TODO: add callback and logging + pass @property def learn_rescaling(self) -> bool: diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 05cd38af1..504de7e52 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -14,6 +14,8 @@ import pytest import numpy as np +import torch +from torch.utils.data import DataLoader as Torch_loader from ott.neural.data import dataloaders @@ -24,7 +26,8 @@ def data_loader_gaussian(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - return dataloaders.OTDataLoader(16, source_lin=source, target_lin=target) + dataset = dataloaders.OTDataSet(source_lin=source, target_lin=target) + return Torch_loader(dataset, batch_size=16, shuffle=True) @pytest.fixture(scope="module") @@ -36,23 +39,22 @@ def data_loader_gaussian_conditional(): source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 2)) - 2.0 - dl0 = dataloaders.OTDataLoader( - 16, + ds0 = dataloaders.OTDataSet( source_lin=source_0, target_lin=target_0, source_conditions=np.zeros_like(source_0) * 0.0 ) - dl1 = dataloaders.OTDataLoader( - 16, + ds1 = dataloaders.OTDataSet( source_lin=source_1, target_lin=target_1, source_conditions=np.ones_like(source_1) * 1.0 ) + sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) + sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) + dl0 = Torch_loader(ds0, batch_size=16, sampler=sampler0) + dl1 = Torch_loader(ds1, batch_size=16, sampler=sampler1) - return dataloaders.ConditionalDataLoader({ - "0": dl0, - "1": dl1 - }, np.array([0.5, 0.5])) + return dataloaders.ConditionalOTDataLoader((dl0, dl1)) @pytest.fixture(scope="module") @@ -63,13 +65,14 @@ def data_loader_gaussian_with_conditions(): target = rng.normal(size=(100, 2)) + 1.0 source_conditions = rng.normal(size=(100, 1)) target_conditions = rng.normal(size=(100, 1)) - 1.0 - return dataloaders.OTDataLoader( - 16, + + dataset = dataloaders.OTDataSet( source_lin=source, target_lin=target, source_conditions=source_conditions, target_conditions=target_conditions ) + return Torch_loader(dataset, batch_size=16, shuffle=True) @pytest.fixture(scope="module") @@ -78,7 +81,8 @@ def genot_data_loader_linear(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - return dataloaders.OTDataLoader(16, source_lin=source, target_lin=target) + dataset = dataloaders.OTDataSet(source_lin=source, target_lin=target) + return Torch_loader(dataset, batch_size=16, shuffle=True) @pytest.fixture(scope="module") @@ -88,8 +92,7 @@ def genot_data_loader_linear_conditional(): source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 source_conditions = rng.normal(size=(100, 4)) - return dataloaders.OTDataLoader( - 16, + return dataloaders.OTDataSet( source_lin=source, target_lin=target, source_conditions=source_conditions, @@ -102,7 +105,8 @@ def genot_data_loader_quad(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 1)) + 1.0 - return dataloaders.OTDataLoader(16, source_quad=source, target_quad=target) + dataset = dataloaders.OTDataSet(source_quad=source, target_quad=target) + return Torch_loader(dataset, batch_size=16, shuffle=True) @pytest.fixture(scope="module") @@ -112,12 +116,12 @@ def genot_data_loader_quad_conditional(): source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 1)) + 1.0 source_conditions = rng.normal(size=(100, 7)) - return dataloaders.OTDataLoader( - 16, + dataset = dataloaders.OTDataSet( source_quad=source, target_quad=target, source_conditions=source_conditions, ) + return Torch_loader(dataset, batch_size=16, shuffle=True) @pytest.fixture(scope="module") @@ -128,13 +132,13 @@ def genot_data_loader_fused(): target_q = rng.normal(size=(100, 1)) + 1.0 source_lin = rng.normal(size=(100, 2)) target_lin = rng.normal(size=(100, 2)) + 1.0 - return dataloaders.OTDataLoader( - 16, + dataset = dataloaders.OTDataSet( source_lin=source_lin, source_quad=source_q, target_lin=target_lin, target_quad=target_q ) + return Torch_loader(dataset, batch_size=16, shuffle=True) @pytest.fixture(scope="module") @@ -146,11 +150,11 @@ def genot_data_loader_fused_conditional(): source_lin = rng.normal(size=(100, 2)) target_lin = rng.normal(size=(100, 2)) + 1.0 source_conditions = rng.normal(size=(100, 7)) - return dataloaders.OTDataLoader( - 16, + dataset = dataloaders.OTDataSet( source_lin=source_lin, source_quad=source_q, target_lin=target_lin, target_quad=target_q, source_conditions=source_conditions, ) + return Torch_loader(dataset, batch_size=16, shuffle=True) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 00e0b2d46..0a030521c 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -45,9 +45,13 @@ def test_genot_linear_unconditional( None if solver_latent_to_data is None else base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) - batch = next(genot_data_loader_linear) - source_lin, target_lin, source_condition = batch["source_lin"], batch[ - "target_lin"], batch["source_conditions"] + batch = next(iter(genot_data_loader_linear)) + source_lin, source_conditions, target_lin = jnp.array( + batch["source_lin"] + ), jnp.array(batch["source_conditions"]) if len(batch["source_conditions"] + ) else None, jnp.array( + batch["target_lin"] + ) source_dim = source_lin.shape[1] target_dim = target_lin.shape[1] @@ -83,12 +87,9 @@ def test_genot_linear_unconditional( ) genot(genot_data_loader_linear, genot_data_loader_linear) - batch = next(genot_data_loader_linear) - source_lin, target_lin, source_condition = batch["source_lin"], batch[ - "target_lin"], batch["source_conditions"] - + batch = next(iter(genot_data_loader_linear)) result_forward = genot.transport( - source_lin, condition=source_condition, forward=True + source_lin, condition=source_conditions, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -104,12 +105,16 @@ def test_genot_linear_conditional( base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) - batch = next(genot_data_loader_linear_conditional) - source_lin, target_lin, source_condition = batch["source_lin"], batch[ - "target_lin"], batch["source_conditions"] + batch = next(iter(genot_data_loader_linear_conditional)) + source_lin, source_conditions, target_lin = jnp.array( + batch["source_lin"] + ), jnp.array(batch["source_conditions"]) if len(batch["source_conditions"] + ) else None, jnp.array( + batch["target_lin"] + ) source_dim = source_lin.shape[1] target_dim = target_lin.shape[1] - condition_dim = source_condition.shape[1] + condition_dim = source_conditions.shape[1] neural_vf = VelocityField( output_dim=target_dim, @@ -145,7 +150,7 @@ def test_genot_linear_conditional( genot_data_loader_linear_conditional ) result_forward = genot.transport( - source_lin, condition=source_condition, forward=True + source_lin, condition=source_conditions, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -167,9 +172,10 @@ def test_genot_linear_learn_rescaling( if conditional else genot_data_loader_linear ) - batch = next(data_loader) - source_lin, target_lin, source_condition = batch["source_lin"], batch[ - "target_lin"], batch["source_conditions"] + batch = next(iter(data_loader)) + source_lin, target_lin, source_condition = jnp.array( + batch["source_lin"] + ), jnp.array(batch["target_lin"]), jnp.array(batch["source_conditions"]) source_dim = source_lin.shape[1] target_dim = target_lin.shape[1] @@ -246,10 +252,12 @@ def test_genot_quad_unconditional( base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) - batch = next(genot_data_loader_quad) - source_quad, target_quad, source_condition = batch["source_quad"], batch[ - "target_quad"], batch["source_conditions"] - + batch = next(iter(genot_data_loader_quad)) + (source_quad, source_conditions, target_quad) = ( + jnp.array(batch["source_quad"]), jnp.array(batch["source_conditions"]) + if len(batch["source_conditions"]) else None, + jnp.array(batch["target_quad"]) + ) source_dim = source_quad.shape[1] target_dim = target_quad.shape[1] condition_dim = 0 @@ -286,7 +294,7 @@ def test_genot_quad_unconditional( genot(genot_data_loader_quad, genot_data_loader_quad) result_forward = genot.transport( - source_quad, condition=source_condition, forward=True + source_quad, condition=source_conditions, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -302,11 +310,14 @@ def test_genot_fused_unconditional( base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) - batch = next(genot_data_loader_fused) - source_lin, source_quad, target_lin, target_quad, source_condition = batch[ - "source_lin"], batch["source_quad"], batch["target_lin"], batch[ - "target_quad"], batch["source_conditions"] - + batch = next(iter(genot_data_loader_fused)) + (source_lin, source_quad, source_conditions, target_lin, target_quad) = ( + jnp.array(batch["source_lin"]) if len(batch["source_lin"]) else None, + jnp.array(batch["source_quad"]), jnp.array(batch["source_conditions"]) + if len(batch["source_conditions"]) else None, + jnp.array(batch["target_lin"]) if len(batch["target_lin"]) else None, + jnp.array(batch["target_quad"]) + ) source_dim = source_lin.shape[1] + source_quad.shape[1] target_dim = target_lin.shape[1] + target_quad.shape[1] condition_dim = 0 @@ -342,7 +353,7 @@ def test_genot_fused_unconditional( result_forward = genot.transport( jnp.concatenate((source_lin, source_quad), axis=1), - condition=source_condition, + condition=source_conditions, forward=True ) assert isinstance(result_forward, jnp.ndarray) @@ -359,13 +370,15 @@ def test_genot_quad_conditional( base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) - batch = next(genot_data_loader_quad_conditional) - source_quad, target_quad, source_condition = batch["source_quad"], batch[ - "target_quad"], batch["source_conditions"] - + batch = next(iter(genot_data_loader_quad_conditional)) + (source_quad, source_conditions, target_quad) = ( + jnp.array(batch["source_quad"]), jnp.array(batch["source_conditions"]) + if len(batch["source_conditions"]) else None, + jnp.array(batch["target_quad"]) + ) source_dim = source_quad.shape[1] target_dim = target_quad.shape[1] - condition_dim = source_condition.shape[1] + condition_dim = source_conditions.shape[1] neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, @@ -400,7 +413,7 @@ def test_genot_quad_conditional( ) result_forward = genot.transport( - source_quad, condition=source_condition, forward=True + source_quad, condition=source_conditions, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 @@ -418,13 +431,17 @@ def test_genot_fused_conditional( None if solver_latent_to_data is None else base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) ) - batch = next(genot_data_loader_fused_conditional) - source_lin, source_quad, target_lin, target_quad, source_condition = batch[ - "source_lin"], batch["source_quad"], batch["target_lin"], batch[ - "target_quad"], batch["source_conditions"] + batch = next(iter(genot_data_loader_fused_conditional)) + (source_lin, source_quad, source_conditions, target_lin, target_quad) = ( + jnp.array(batch["source_lin"]) if len(batch["source_lin"]) else None, + jnp.array(batch["source_quad"]), jnp.array(batch["source_conditions"]) + if len(batch["source_conditions"]) else None, + jnp.array(batch["target_lin"]) if len(batch["target_lin"]) else None, + jnp.array(batch["target_quad"]) + ) source_dim = source_lin.shape[1] + source_quad.shape[1] target_dim = target_lin.shape[1] + target_quad.shape[1] - condition_dim = source_condition.shape[1] + condition_dim = source_conditions.shape[1] neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, @@ -460,7 +477,7 @@ def test_genot_fused_conditional( result_forward = genot.transport( jnp.concatenate((source_lin, source_quad), axis=1), - condition=source_condition, + condition=source_conditions, forward=True ) assert isinstance(result_forward, jnp.ndarray) diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 9452c2faa..d660ec33f 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -35,7 +35,7 @@ class TestOTFlowMatching: flows.BrownianNoiseFlow(0.2) ] ) - def test_flow_matching( + def test_flow_matching_unconditional( self, data_loader_gaussian, flow: Type[flows.BaseFlow] ): input_dim = 2 @@ -66,17 +66,20 @@ def test_flow_matching( ) fm(data_loader_gaussian, data_loader_gaussian) - batch = next(data_loader_gaussian) + batch = next(iter(data_loader_gaussian)) + source = jnp.asarray(batch["source_lin"]) + target = jnp.asarray(batch["target_lin"]) + source_conditions = jnp.asarray(batch["source_conditions"]) if len( + batch["source_conditions"] + ) > 0 else None result_forward = fm.transport( - batch["source_lin"], condition=batch["source_conditions"], forward=True + source, condition=source_conditions, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 result_backward = fm.transport( - batch["target_lin"], - condition=batch["source_conditions"], - forward=False + target, condition=source_conditions, forward=False ) assert isinstance(result_backward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_backward)) == 0 @@ -123,17 +126,20 @@ def test_flow_matching_with_conditions( data_loader_gaussian_with_conditions ) - batch = next(data_loader_gaussian_with_conditions) + batch = next(iter(data_loader_gaussian_with_conditions)) + source = jnp.asarray(batch["source_lin"]) + target = jnp.asarray(batch["target_lin"]) + source_conditions = jnp.asarray(batch["source_conditions"]) if len( + batch["source_conditions"] + ) > 0 else None result_forward = fm.transport( - batch["source_lin"], condition=batch["source_conditions"], forward=True + source, condition=source_conditions, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 result_backward = fm.transport( - batch["target_lin"], - condition=batch["source_conditions"], - forward=False + target, condition=source_conditions, forward=False ) assert isinstance(result_backward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_backward)) == 0 @@ -177,17 +183,20 @@ def test_flow_matching_conditional( ) fm(data_loader_gaussian_conditional, data_loader_gaussian_conditional) - batch = next(data_loader_gaussian_conditional) + batch = next(iter(data_loader_gaussian_conditional)) + source = jnp.asarray(batch["source_lin"]) + target = jnp.asarray(batch["target_lin"]) + source_conditions = jnp.asarray(batch["source_conditions"]) if len( + batch["source_conditions"] + ) > 0 else None result_forward = fm.transport( - batch["source_lin"], condition=batch["source_conditions"], forward=True + source, condition=source_conditions, forward=True ) assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 result_backward = fm.transport( - batch["target_lin"], - condition=batch["source_conditions"], - forward=False + target, condition=source_conditions, forward=False ) assert isinstance(result_backward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_backward)) == 0 @@ -201,9 +210,15 @@ def test_flow_matching_learn_rescaling( data_loader_gaussian_conditional if conditional else data_loader_gaussian ) - batch = next(data_loader) - source_dim = batch["source_lin"].shape[1] - condition_dim = batch["source_conditions"].shape[1] if conditional else 0 + batch = next(iter(data_loader)) + source = jnp.asarray(batch["source_lin"]) + target = jnp.asarray(batch["target_lin"]) + source_conditions = jnp.asarray(batch["source_conditions"]) if len( + batch["source_conditions"] + ) > 0 else None + + source_dim = source.shape[1] + condition_dim = source_conditions.shape[1] if conditional else 0 neural_vf = models.VelocityField( output_dim=2, condition_dim=0, @@ -249,13 +264,13 @@ def test_flow_matching_learn_rescaling( fm(data_loader, data_loader) result_eta = fm.unbalancedness_handler.evaluate_eta( - batch["source_lin"], condition=batch["source_conditions"] + source, condition=source_conditions ) assert isinstance(result_eta, jnp.ndarray) assert jnp.sum(jnp.isnan(result_eta)) == 0 result_xi = fm.unbalancedness_handler.evaluate_xi( - batch["target_lin"], condition=batch["source_conditions"] + target, condition=source_conditions ) assert isinstance(result_xi, jnp.ndarray) assert jnp.sum(jnp.isnan(result_xi)) == 0 From e2ebb19ef79514bafad063f931f7f0d47a946d91 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 11 Feb 2024 17:54:30 +0100 Subject: [PATCH 089/186] finish data loader refactoring --- tests/neural/conftest.py | 93 +++++++++++++++++++++++++++----------- tests/neural/genot_test.py | 1 + 2 files changed, 67 insertions(+), 27 deletions(-) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 504de7e52..f33252f07 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -89,14 +89,26 @@ def genot_data_loader_linear(): def genot_data_loader_linear_conditional(): """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) - source = rng.normal(size=(100, 2)) - target = rng.normal(size=(100, 2)) + 1.0 - source_conditions = rng.normal(size=(100, 4)) - return dataloaders.OTDataSet( - source_lin=source, - target_lin=target, - source_conditions=source_conditions, + source_0 = rng.normal(size=(100, 2)) + target_0 = rng.normal(size=(100, 2)) + 1.0 + source_1 = rng.normal(size=(100, 2)) + target_1 = rng.normal(size=(100, 2)) + 1.0 + ds0 = dataloaders.OTDataSet( + source_lin=source_0, + target_lin=target_0, + source_conditions=np.zeros_like(source_0) * 0.0 + ) + ds1 = dataloaders.OTDataSet( + source_lin=source_1, + target_lin=target_1, + source_conditions=np.ones_like(source_1) * 1.0 ) + sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) + sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) + dl0 = Torch_loader(ds0, batch_size=16, sampler=sampler0) + dl1 = Torch_loader(ds1, batch_size=16, sampler=sampler1) + + return dataloaders.ConditionalOTDataLoader((dl0, dl1)) @pytest.fixture(scope="module") @@ -113,15 +125,26 @@ def genot_data_loader_quad(): def genot_data_loader_quad_conditional(): """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) - source = rng.normal(size=(100, 2)) - target = rng.normal(size=(100, 1)) + 1.0 - source_conditions = rng.normal(size=(100, 7)) - dataset = dataloaders.OTDataSet( - source_quad=source, - target_quad=target, - source_conditions=source_conditions, + source_0 = rng.normal(size=(100, 2)) + target_0 = rng.normal(size=(100, 1)) + 1.0 + source_1 = rng.normal(size=(100, 2)) + target_1 = rng.normal(size=(100, 1)) + 1.0 + ds0 = dataloaders.OTDataSet( + source_quad=source_0, + target_quad=target_0, + source_conditions=np.zeros_like(source_0) * 0.0 ) - return Torch_loader(dataset, batch_size=16, shuffle=True) + ds1 = dataloaders.OTDataSet( + source_quad=source_1, + target_quad=target_1, + source_conditions=np.ones_like(source_1) * 1.0 + ) + sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) + sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) + dl0 = Torch_loader(ds0, batch_size=16, sampler=sampler0) + dl1 = Torch_loader(ds1, batch_size=16, sampler=sampler1) + + return dataloaders.ConditionalOTDataLoader((dl0, dl1)) @pytest.fixture(scope="module") @@ -145,16 +168,32 @@ def genot_data_loader_fused(): def genot_data_loader_fused_conditional(): """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) - source_q = rng.normal(size=(100, 2)) - target_q = rng.normal(size=(100, 1)) + 1.0 - source_lin = rng.normal(size=(100, 2)) - target_lin = rng.normal(size=(100, 2)) + 1.0 - source_conditions = rng.normal(size=(100, 7)) - dataset = dataloaders.OTDataSet( - source_lin=source_lin, - source_quad=source_q, - target_lin=target_lin, - target_quad=target_q, - source_conditions=source_conditions, + source_q_0 = rng.normal(size=(100, 2)) + target_q_0 = rng.normal(size=(100, 1)) + 1.0 + source_lin_0 = rng.normal(size=(100, 2)) + target_lin_0 = rng.normal(size=(100, 2)) + 1.0 + + source_q_1 = 2 * rng.normal(size=(100, 2)) + target_q_1 = 2 * rng.normal(size=(100, 1)) + 1.0 + source_lin_1 = 2 * rng.normal(size=(100, 2)) + target_lin_1 = 2 * rng.normal(size=(100, 2)) + 1.0 + + ds0 = dataloaders.OTDataSet( + source_lin=source_lin_0, + target_lin=target_lin_0, + source_quad=source_q_0, + target_quad=target_q_0, + source_conditions=np.zeros_like(source_lin_0) * 0.0 ) - return Torch_loader(dataset, batch_size=16, shuffle=True) + ds1 = dataloaders.OTDataSet( + source_lin=source_lin_1, + target_lin=target_lin_1, + source_quad=source_q_1, + target_quad=target_q_1, + source_conditions=np.ones_like(source_lin_1) * 1.0 + ) + sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) + sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) + dl0 = Torch_loader(ds0, batch_size=16, sampler=sampler0) + dl1 = Torch_loader(ds1, batch_size=16, sampler=sampler1) + return dataloaders.ConditionalOTDataLoader((dl0, dl1)) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 0a030521c..d44db4476 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -376,6 +376,7 @@ def test_genot_quad_conditional( if len(batch["source_conditions"]) else None, jnp.array(batch["target_quad"]) ) + source_dim = source_quad.shape[1] target_dim = target_quad.shape[1] condition_dim = source_conditions.shape[1] From 8644fd93afbaa1281cb8e623ec0b84a466c259d1 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 12 Feb 2024 17:53:44 +0100 Subject: [PATCH 090/186] Update linter --- .pre-commit-config.yaml | 6 +++--- docs/tutorials/MetaOT.ipynb | 1 + pyproject.toml | 21 +++++++++++-------- src/ott/__init__.py | 11 +++++++++- src/ott/neural/duality/models.py | 1 - src/ott/neural/duality/neuraldual.py | 12 ++++++++++- src/ott/neural/flows/otfm.py | 12 ++++++++++- src/ott/neural/gaps/map_estimator.py | 11 +++++++++- src/ott/neural/models/base_solver.py | 9 ++++---- src/ott/solvers/linear/sinkhorn_lr.py | 11 +++++++++- src/ott/solvers/quadratic/__init__.py | 7 ++++++- .../quadratic/gromov_wasserstein_lr.py | 11 +++++++++- .../tools/gaussian_mixture/fit_gmm_pair.py | 6 +++++- .../gaussian_mixture/gaussian_mixture.py | 7 ++++++- tests/conftest.py | 3 +-- tests/geometry/lr_kernel_test.py | 4 +++- 16 files changed, 103 insertions(+), 30 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1f84672bb..ec54873a3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,12 +20,12 @@ repos: - id: trailing-whitespace - id: check-case-conflict - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.1.6 + rev: v0.2.1 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort name: isort @@ -42,7 +42,7 @@ repos: - id: nbqa-black - id: nbqa-isort - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.11.0 + rev: v2.12.0 hooks: - id: pretty-format-yaml args: [--autofix, --indent, '2'] diff --git a/docs/tutorials/MetaOT.ipynb b/docs/tutorials/MetaOT.ipynb index 1ef687b28..172024733 100644 --- a/docs/tutorials/MetaOT.ipynb +++ b/docs/tutorials/MetaOT.ipynb @@ -63,6 +63,7 @@ "import jax.numpy as jnp\n", "import numpy as np\n", "import torchvision\n", + "\n", "from flax import linen as nn\n", "\n", "import matplotlib.pyplot as plt\n", diff --git a/pyproject.toml b/pyproject.toml index 6128854ab..7306525b1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,13 +103,14 @@ include = '\.ipynb$' [tool.isort] profile = "black" +line_length = 80 include_trailing_comma = true multi_line_output = 3 sections = ["FUTURE", "STDLIB", "THIRDPARTY", "TEST", "NUMERIC", "NEURAL", "PLOTTING", "FIRSTPARTY", "LOCALFOLDER"] # also contains what we import in notebooks/tests known_neural = ["flax", "optax", "diffrax", "orbax"] known_numeric = ["numpy", "scipy", "jax", "flax", "optax", "jaxopt", "torch", "ot", "torchvision", "pandas", "sklearn", "tslearn"] -known_test = ["pytest"] +known_test = ["_pytest", "pytest"] known_plotting = ["IPython", "matplotlib", "mpl_toolkits", "seaborn"] [tool.pytest.ini_options] @@ -274,6 +275,10 @@ exclude = [ "docs/_build", "dist" ] +line-length = 80 +target-version = "py38" + +[tool.ruff.lint] ignore = [ # Do not assign a lambda expression, use a def -> lambda expression assignments are convenient "E731", @@ -288,10 +293,8 @@ ignore = [ # Missing docstring in magic method "D105", ] -line-length = 80 select = [ "D", # flake8-docstrings - "I", # isort "E", # pycodestyle "F", # pyflakes "W", # pycodestyle @@ -308,20 +311,20 @@ select = [ "RET", # flake8-raise ] unfixable = ["B", "UP", "C4", "BLE", "T20", "RET"] -target-version = "py38" -[tool.ruff.per-file-ignores] + +[tool.ruff.lint.per-file-ignores] # TODO(michalk8): PO004 - remove `self.initialize` "tests/*" = ["D", "PT004", "E402"] "*/__init__.py" = ["F401"] "docs/*" = ["D"] "src/ott/types.py" = ["D102"] -[tool.ruff.pydocstyle] +[tool.ruff.lint.pydocstyle] convention = "google" -[tool.ruff.pyupgrade] +[tool.ruff.lint.pyupgrade] # Preserve types, even if a file imports `from __future__ import annotations`. keep-runtime-typing = true -[tool.ruff.flake8-tidy-imports] +[tool.ruff.lint.flake8-tidy-imports] # Disallow all relative imports. ban-relative-imports = "parents" -[tool.ruff.flake8-quotes] +[tool.ruff.lint.flake8-quotes] inline-quotes = "double" diff --git a/src/ott/__init__.py b/src/ott/__init__.py index 8d2f007c5..dac0eb854 100644 --- a/src/ott/__init__.py +++ b/src/ott/__init__.py @@ -13,7 +13,16 @@ # limitations under the License. import contextlib -from . import datasets, geometry, initializers, math, problems, solvers, tools, utils +from . import ( + datasets, + geometry, + initializers, + math, + problems, + solvers, + tools, + utils, +) with contextlib.suppress(ImportError): # TODO(michalk8): add warning that neural module is not imported diff --git a/src/ott/neural/duality/models.py b/src/ott/neural/duality/models.py index d498a8d4e..b3ce94c35 100644 --- a/src/ott/neural/duality/models.py +++ b/src/ott/neural/duality/models.py @@ -16,7 +16,6 @@ import jax import jax.numpy as jnp -from jax.nn import initializers import flax.linen as nn import optax diff --git a/src/ott/neural/duality/neuraldual.py b/src/ott/neural/duality/neuraldual.py index 09f51dc80..3ea88f74a 100644 --- a/src/ott/neural/duality/neuraldual.py +++ b/src/ott/neural/duality/neuraldual.py @@ -13,7 +13,17 @@ # limitations under the License. import abc import warnings -from typing import Any, Callable, Dict, Iterator, List, Literal, Optional, Tuple, Union +from typing import ( + Any, + Callable, + Dict, + Iterator, + List, + Literal, + Optional, + Tuple, + Union, +) import jax import jax.numpy as jnp diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flows/otfm.py index 69bad0e69..42ffa422e 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flows/otfm.py @@ -13,7 +13,17 @@ # limitations under the License. import collections import functools -from typing import Any, Callable, Dict, Literal, Mapping, Optional, Tuple, Type, Union +from typing import ( + Any, + Callable, + Dict, + Literal, + Mapping, + Optional, + Tuple, + Type, + Union, +) import jax import jax.numpy as jnp diff --git a/src/ott/neural/gaps/map_estimator.py b/src/ott/neural/gaps/map_estimator.py index be8834458..61c24f0c3 100644 --- a/src/ott/neural/gaps/map_estimator.py +++ b/src/ott/neural/gaps/map_estimator.py @@ -13,7 +13,16 @@ # limitations under the License. import collections import functools -from typing import Any, Callable, Dict, Iterator, Optional, Sequence, Tuple, Union +from typing import ( + Any, + Callable, + Dict, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) import jax import jax.numpy as jnp diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index a55370403..b0587d3f0 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -326,12 +326,11 @@ class UnbalancednessHandler: resample_epsilon: Epsilon for resampling. scale_cost: Scaling of the cost matrix for estimating the rescaling factors. ot_solver: Solver to compute unbalanced marginals. If `ot_solver` is `None`, - the method - :meth:`ott.neural.models.base_solver.UnbalancednessHandler.compute_unbalanced_marginals` - is not available, and hence the unbalanced marginals must be computed by the neural solver. + the method :meth:`ott.neural.models.base_solver.UnbalancednessHandler.compute_unbalanced_marginals` + is not available, and hence the unbalanced marginals must be computed + by the neural solver. kwargs: Additional keyword arguments. - - """ + """ # noqa: E501 # TODO(MUCDK): fix me def __init__( self, diff --git a/src/ott/solvers/linear/sinkhorn_lr.py b/src/ott/solvers/linear/sinkhorn_lr.py index f6e6216fe..da949da0d 100644 --- a/src/ott/solvers/linear/sinkhorn_lr.py +++ b/src/ott/solvers/linear/sinkhorn_lr.py @@ -11,7 +11,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Callable, Literal, Mapping, NamedTuple, Optional, Tuple, Union +from typing import ( + Any, + Callable, + Literal, + Mapping, + NamedTuple, + Optional, + Tuple, + Union, +) import jax import jax.experimental diff --git a/src/ott/solvers/quadratic/__init__.py b/src/ott/solvers/quadratic/__init__.py index 507812971..560ac3ddd 100644 --- a/src/ott/solvers/quadratic/__init__.py +++ b/src/ott/solvers/quadratic/__init__.py @@ -11,5 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import gromov_wasserstein, gromov_wasserstein_lr, gw_barycenter, lower_bound +from . import ( + gromov_wasserstein, + gromov_wasserstein_lr, + gw_barycenter, + lower_bound, +) from ._solve import solve diff --git a/src/ott/solvers/quadratic/gromov_wasserstein_lr.py b/src/ott/solvers/quadratic/gromov_wasserstein_lr.py index df2237477..cb12911bf 100644 --- a/src/ott/solvers/quadratic/gromov_wasserstein_lr.py +++ b/src/ott/solvers/quadratic/gromov_wasserstein_lr.py @@ -12,7 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. """A Jax implementation of the unbalanced low-rank GW algorithm.""" -from typing import Any, Callable, Literal, Mapping, NamedTuple, Optional, Tuple, Union +from typing import ( + Any, + Callable, + Literal, + Mapping, + NamedTuple, + Optional, + Tuple, + Union, +) import jax import jax.experimental diff --git a/src/ott/tools/gaussian_mixture/fit_gmm_pair.py b/src/ott/tools/gaussian_mixture/fit_gmm_pair.py index 0c3c78ba3..7ecde263c 100644 --- a/src/ott/tools/gaussian_mixture/fit_gmm_pair.py +++ b/src/ott/tools/gaussian_mixture/fit_gmm_pair.py @@ -84,7 +84,11 @@ import jax import jax.numpy as jnp -from ott.tools.gaussian_mixture import fit_gmm, gaussian_mixture, gaussian_mixture_pair +from ott.tools.gaussian_mixture import ( + fit_gmm, + gaussian_mixture, + gaussian_mixture_pair, +) __all__ = ["get_fit_model_em_fn"] diff --git a/src/ott/tools/gaussian_mixture/gaussian_mixture.py b/src/ott/tools/gaussian_mixture/gaussian_mixture.py index c3f04c8fa..27a568989 100644 --- a/src/ott/tools/gaussian_mixture/gaussian_mixture.py +++ b/src/ott/tools/gaussian_mixture/gaussian_mixture.py @@ -16,7 +16,12 @@ import jax import jax.numpy as jnp -from ott.tools.gaussian_mixture import gaussian, linalg, probabilities, scale_tril +from ott.tools.gaussian_mixture import ( + gaussian, + linalg, + probabilities, + scale_tril, +) __all__ = ["GaussianMixture"] diff --git a/tests/conftest.py b/tests/conftest.py index da7e6a3dc..8fe7166aa 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,9 +15,8 @@ import itertools from typing import Any, Mapping, Optional, Sequence -from _pytest.python import Metafunc - import pytest +from _pytest.python import Metafunc import jax import jax.experimental diff --git a/tests/geometry/lr_kernel_test.py b/tests/geometry/lr_kernel_test.py index 1f0a42e7d..6db247179 100644 --- a/tests/geometry/lr_kernel_test.py +++ b/tests/geometry/lr_kernel_test.py @@ -1,9 +1,11 @@ from typing import Literal, Optional +import pytest + import jax import jax.numpy as jnp import numpy as np -import pytest + from ott.geometry import costs, low_rank, pointcloud from ott.solvers import linear From 460bf901f29676e13568a63f833475a77c760f52 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 14 Feb 2024 09:05:26 +0100 Subject: [PATCH 091/186] fix bug in _resample_data` --- src/ott/neural/models/base_solver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index b0587d3f0..733fb6477 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -146,7 +146,7 @@ def _resample_data( ) -> Tuple[jnp.ndarray, ...]: """Resample a batch according to coupling `tmat`.""" tmat_flattened = tmat.flatten() - indices = jax.random.choice(rng, len(tmat_flattened), shape=[tmat.shape[0]]) + indices = jax.random.choice(rng, len(tmat_flattened), p=tmat_flattened, shape=[tmat.shape[0]]) indices_source = indices // tmat.shape[1] indices_target = indices % tmat.shape[1] return tuple( From ce42c1a1f80edb39174f3490e19e8de10304e236 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 16 Feb 2024 01:10:10 +0100 Subject: [PATCH 092/186] incorporate more changes --- src/ott/neural/__init__.py | 2 +- .../neural/{flows => flow_models}/__init__.py | 0 .../neural/{flows => flow_models}/flows.py | 32 +-- .../neural/{flows => flow_models}/genot.py | 182 +++++++++--------- .../neural/{flows => flow_models}/layers.py | 0 .../neural/{flows => flow_models}/models.py | 2 +- src/ott/neural/{flows => flow_models}/otfm.py | 16 +- .../neural/{flows => flow_models}/samplers.py | 0 src/ott/neural/models/base_solver.py | 142 ++++++++------ tests/neural/genot_test.py | 6 +- tests/neural/otfm_test.py | 2 +- 11 files changed, 208 insertions(+), 176 deletions(-) rename src/ott/neural/{flows => flow_models}/__init__.py (100%) rename src/ott/neural/{flows => flow_models}/flows.py (79%) rename src/ott/neural/{flows => flow_models}/genot.py (76%) rename src/ott/neural/{flows => flow_models}/layers.py (100%) rename src/ott/neural/{flows => flow_models}/models.py (99%) rename src/ott/neural/{flows => flow_models}/otfm.py (95%) rename src/ott/neural/{flows => flow_models}/samplers.py (100%) diff --git a/src/ott/neural/__init__.py b/src/ott/neural/__init__.py index 2a61ca021..678919a8c 100644 --- a/src/ott/neural/__init__.py +++ b/src/ott/neural/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import data, duality, flows, gaps, models +from . import data, duality, flow_models, gaps, models diff --git a/src/ott/neural/flows/__init__.py b/src/ott/neural/flow_models/__init__.py similarity index 100% rename from src/ott/neural/flows/__init__.py rename to src/ott/neural/flow_models/__init__.py diff --git a/src/ott/neural/flows/flows.py b/src/ott/neural/flow_models/flows.py similarity index 79% rename from src/ott/neural/flows/flows.py rename to src/ott/neural/flow_models/flows.py index 65f697d89..fd1009cef 100644 --- a/src/ott/neural/flows/flows.py +++ b/src/ott/neural/flow_models/flows.py @@ -44,9 +44,9 @@ def compute_mu_t( at time :math:`t`. Args: - t: Time :math:`t`. - src: Sample from the source distribution. - tgt: Sample from the target distribution. + t: Time :math:`t` of shape `(batch_size, 1)`. + src: Sample from the source distribution of shape `(batch_size, ...)`. + tgt: Sample from the target distribution of shape `(batch_size, ...)`. """ @abc.abstractmethod @@ -54,7 +54,7 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: """Compute the standard deviation of the probablity path at time :math:`t`. Args: - t: Time :math:`t`. + t: Time :math:`t` of shape `(batch_size, 1)`. """ @abc.abstractmethod @@ -67,9 +67,9 @@ def compute_ut( :math:`x_1` at time :math:`t`. Args: - t: Time :math:`t`. - src: Sample from the source distribution. - tgt: Sample from the target distribution. + t: Time :math:`t` of shape `(batch_size, 1)`.. + src: Sample from the source distribution of shape `(batch_size, ...)`. + tgt: Sample from the target distribution of shape `(batch_size, ...)`. Returns: Conditional vector field evaluated at time :math:`t`. @@ -85,9 +85,9 @@ def compute_xt( Args: rng: Random number generator. - t: Time :math:`t`. - src: Sample from the source distribution. - tgt: Sample from the target distribution. + t: Time :math:`t` of shape `(batch_size, 1)`.. + src: Sample from the source distribution of shape `(batch_size, ...)`. + tgt: Sample from the target distribution of shape `(batch_size, ...)`. Returns: Samples from the probability path between :math:`x_0` and :math:`x_1` @@ -116,9 +116,9 @@ def compute_ut( :math:`x_1` at time :math:`t`. Args: - t: Time :math:`t`. - src: Sample from the source distribution. - tgt: Sample from the target distribution. + t: Time :math:`t` of shape `(batch_size, 1)`. + src: Sample from the source distribution of shape `(batch_size, ...)`. + tgt: Sample from the target distribution of shape `(batch_size, ...)`.. Returns: Conditional vector field evaluated at time :math:`t`. @@ -134,7 +134,7 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: r"""Compute noise of the flow at time :math:`t`. Args: - t: Time :math:`t`. + t: Time :math:`t` of shape `(batch_size, 1)`.. Returns: Constant, time-independent standard deviation :math:`\sigma`. @@ -147,7 +147,7 @@ class BrownianNoiseFlow(StraightFlow): Sampler for sampling noise implicitly defined by a Schroedinger Bridge problem with parameter :math:`\sigma` such that - :math:`\sigma_t = \sigma * \sqrt(t * (1-t))`. + :math:`\sigma_t = \sigma * \sqrt(t * (1-t))` (:cite:`tong:23`). Returns: Samples from the probability path between :math:`x_0` and :math:`x_1` @@ -158,7 +158,7 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: """Compute the standard deviation of the probablity path at time :math:`t`. Args: - t: Time :math:`t`. + t: Time :math:`t` of shape `(batch_size, 1)`.. Returns: Standard deviation of the probablity path at time :math:`t`. diff --git a/src/ott/neural/flows/genot.py b/src/ott/neural/flow_models/genot.py similarity index 76% rename from src/ott/neural/flows/genot.py rename to src/ott/neural/flow_models/genot.py index 0ab1b4a87..c900d1268 100644 --- a/src/ott/neural/flows/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -23,7 +23,7 @@ from flax.training import train_state from ott import utils -from ott.neural.flows import flows, samplers +from ott.neural.flow_models import flows, samplers from ott.neural.models import base_solver __all__ = ["GENOTBase", "GENOTLin", "GENOTQuad"] @@ -164,7 +164,7 @@ def step_fn( def loss_fn( params: jnp.ndarray, time: jnp.ndarray, source: jnp.ndarray, target: jnp.ndarray, latent: jnp.ndarray, - source_conditions: Optional[jnp.ndarray], rng: jax.random.PRNGKeyArray + source_conditions: Optional[jnp.ndarray], rng: jax.Array ): x_t = self.flow.compute_xt(rng, time, latent, target) apply_fn = functools.partial( @@ -263,17 +263,17 @@ def learn_rescaling(self) -> bool: def _reshape_samples(self, arrays: Tuple[jnp.ndarray, ...], batch_size: int) -> Tuple[jnp.ndarray, ...]: - return tuple( - jnp.reshape(arr, (batch_size * self.k_samples_per_x, - -1)) if arr is not None else None for arr in arrays + return jax.tree_util.tree_map( + lambda x: jnp.reshape(x, (batch_size * self.k_samples_per_x, -1)) + if x is not None else None, arrays ) class GENOTLin(GENOTBase): """Implementation of GENOT-L (:cite:`klein:23`). - GENOT-L (Generative Entropic Neural Optimal Transport, linear) solves the - entropic (linear) OT problem. + GENOT-L (Generative Entropic Neural Optimal Transport, linear) is a + neural solver for entropic (linear) OT problems. """ def __call__(self, train_loader, valid_loader): @@ -314,9 +314,9 @@ def __call__(self, train_loader, valid_loader): ) (source, source_conditions - ), (target,) = self.ot_matcher._sample_conditional_indices_from_tmap( + ), (target,) = self.ot_matcher.sample_conditional_indices_from_tmap( rng=rng_resample, - tmat=tmat, + conditional_distributions=tmat, k_samples_per_x=self.k_samples_per_x, source_arrays=(source, source_conditions), target_arrays=(target,), @@ -333,7 +333,7 @@ def __call__(self, train_loader, valid_loader): rng_latent_data_match, self.k_samples_per_x ) (source, source_conditions - ), (target,) = jax.vmap(self.ot_matcher._resample_data, 0, 0)( + ), (target,) = jax.vmap(self.ot_matcher.sample_joint, 0, 0)( rng_latent_data_match, tmats_latent_data, (source, source_conditions), (target,) ) @@ -368,9 +368,9 @@ class GENOTQuad(GENOTBase): """Implementation of GENOT-Q and GENOT-F (:cite:`klein:23`). GENOT-Q (Generative Entropic Neural Optimal Transport, quadratic) and - GENOT-F (Generative Entropic Neural Optimal Transport, fused) solve the - entropic Gromov-Wasserstein and the entropic Fused Gromov-Wasserstein problem, - respectively. + GENOT-F (Generative Entropic Neural Optimal Transport, fused) are neural + solver for entropic Gromov-Wasserstein and entropic Fused Gromov-Wasserstein + problems, respectively. """ def __call__(self, train_loader, valid_loader): @@ -381,86 +381,94 @@ def __call__(self, train_loader, valid_loader): valid_loader: Data loader for the validation data. """ batch: Dict[str, jnp.array] = {} - for iteration in range(self.iterations): - batch = next(iter(train_loader)) - - ( - self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, - rng_step_fn - ) = jax.random.split(self.rng, 6) - (source_lin, source_quad, source_conditions, target_lin, target_quad) = ( - jnp.array(batch["source_lin"]) if len(batch["source_lin"]) else None, - jnp.array(batch["source_quad"]), - jnp.array(batch["source_conditions"]) - if len(batch["source_conditions"]) else None, - jnp.array(batch["target_lin"]) if len(batch["target_lin"]) else None, - jnp.array(batch["target_quad"]) - ) - batch_size = len(source_quad) - n_samples = batch_size * self.k_samples_per_x - time = self.time_sampler(rng_time, n_samples) - latent = self.latent_noise_fn( - rng_noise, shape=(self.k_samples_per_x, batch_size) - ) - - tmat = self.ot_matcher.match_fn( - source_lin, source_quad, target_lin, target_quad - ) - - if self.ot_matcher.fused_penalty > 0.0: - source = jnp.concatenate((source_lin, source_quad), axis=1) - target = jnp.concatenate((target_lin, target_quad), axis=1) - else: - source = source_quad - target = target_quad - - (source, source_conditions), (target,) = ( - self.ot_matcher._sample_conditional_indices_from_tmap( - rng=rng_resample, - tmat=tmat, - k_samples_per_x=self.k_samples_per_x, - source_arrays=(source, source_conditions), - target_arrays=(target,), - source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) - ) - ) + iter = -1 + while True: + for batch in train_loader: + iter += 1 + if iter >= self.iterations: + stop = True + break - if self.matcher_latent_to_data is not None: - tmats_latent_data = jnp.array( - jax.vmap(self.matcher_latent_to_data.match_fn, 0, - 0)(x=latent, y=target) + ( + self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, + rng_step_fn + ) = jax.random.split(self.rng, 6) + (source_lin, source_quad, source_conditions, target_lin, + target_quad) = ( + jnp.array(batch["source_lin"]) if len(batch["source_lin"]) else + None, jnp.array(batch["source_quad"]), + jnp.array(batch["source_conditions"]) + if len(batch["source_conditions"]) else None, + jnp.array(batch["target_lin"]) if len(batch["target_lin"]) else + None, jnp.array(batch["target_quad"]) + ) + batch_size = len(source_quad) + n_samples = batch_size * self.k_samples_per_x + time = self.time_sampler(rng_time, n_samples) + latent = self.latent_noise_fn( + rng_noise, shape=(self.k_samples_per_x, batch_size) ) - rng_latent_data_match = jax.random.split( - rng_latent_data_match, self.k_samples_per_x + tmat = self.ot_matcher.match_fn( + source_quad, target_quad, source_lin, target_lin ) - (source, source_conditions - ), (target,) = jax.vmap(self.ot_matcher._resample_data, 0, 0)( - rng_latent_data_match, tmats_latent_data, - (source, source_conditions), (target,) + if self.ot_matcher.fused_penalty > 0.0: + source = jnp.concatenate((source_lin, source_quad), axis=1) + target = jnp.concatenate((target_lin, target_quad), axis=1) + else: + source = source_quad + target = target_quad + + (source, source_conditions), (target,) = ( + self.ot_matcher.sample_conditional_indices_from_tmap( + rng=rng_resample, + conditional_distributions=tmat, + k_samples_per_x=self.k_samples_per_x, + source_arrays=(source, source_conditions), + target_arrays=(target,), + source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) + ) ) - source, source_conditions, target, latent = self._reshape_samples( - (source, source_conditions, target, latent), batch_size - ) + if self.matcher_latent_to_data is not None: + tmats_latent_data = jnp.array( + jax.vmap(self.matcher_latent_to_data.match_fn, 0, + 0)(x=latent, y=target) + ) - self.state_velocity_field, loss = self.step_fn( - rng_step_fn, self.state_velocity_field, time, source, target, latent, - source_conditions - ) - if self.learn_rescaling: - ( - self.state_eta, self.state_xi, eta_predictions, xi_predictions, - loss_a, loss_b - ) = self.unbalancedness_handler.step_fn( - source=source, - target=target, - condition=source_conditions, - a=tmat.sum(axis=1), - b=tmat.sum(axis=0), - state_eta=self.unbalancedness_handler.state_eta, - state_xi=self.unbalancedness_handler.state_xi, + rng_latent_data_match = jax.random.split( + rng_latent_data_match, self.k_samples_per_x + ) + + (source, source_conditions + ), (target,) = jax.vmap(self.ot_matcher.sample_joint, 0, 0)( + rng_latent_data_match, tmats_latent_data, + (source, source_conditions), (target,) + ) + + source, source_conditions, target, latent = self._reshape_samples( + (source, source_conditions, target, latent), batch_size + ) + + self.state_velocity_field, loss = self.step_fn( + rng_step_fn, self.state_velocity_field, time, source, target, + latent, source_conditions ) - if iteration % self.valid_freq == 0: - self._valid_step(valid_loader, iteration) + if self.learn_rescaling: + ( + self.state_eta, self.state_xi, eta_predictions, xi_predictions, + loss_a, loss_b + ) = self.unbalancedness_handler.step_fn( + source=source, + target=target, + condition=source_conditions, + a=tmat.sum(axis=1), + b=tmat.sum(axis=0), + state_eta=self.unbalancedness_handler.state_eta, + state_xi=self.unbalancedness_handler.state_xi, + ) + if iter % self.valid_freq == 0: + self._valid_step(valid_loader, iter) + if stop: + break diff --git a/src/ott/neural/flows/layers.py b/src/ott/neural/flow_models/layers.py similarity index 100% rename from src/ott/neural/flows/layers.py rename to src/ott/neural/flow_models/layers.py diff --git a/src/ott/neural/flows/models.py b/src/ott/neural/flow_models/models.py similarity index 99% rename from src/ott/neural/flows/models.py rename to src/ott/neural/flow_models/models.py index bf365e772..ebb29aa99 100644 --- a/src/ott/neural/flows/models.py +++ b/src/ott/neural/flow_models/models.py @@ -20,7 +20,7 @@ import optax from flax.training import train_state -import ott.neural.flows.layers as flow_layers +import ott.neural.flow_models.layers as flow_layers from ott.neural.models import layers __all__ = ["VelocityField"] diff --git a/src/ott/neural/flows/otfm.py b/src/ott/neural/flow_models/otfm.py similarity index 95% rename from src/ott/neural/flows/otfm.py rename to src/ott/neural/flow_models/otfm.py index 42ffa422e..dca7bea60 100644 --- a/src/ott/neural/flows/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -34,7 +34,7 @@ from ott import utils from ott.geometry import costs -from ott.neural.flows import flows +from ott.neural.flow_models import flows from ott.neural.models import base_solver __all__ = ["OTFlowMatching"] @@ -172,16 +172,14 @@ def __call__(self, train_loader, valid_loader): rng_resample, rng_step_fn, self.rng = jax.random.split(self.rng, 3) source, source_conditions, target = jnp.array( batch["source_lin"] - ), jnp.array(batch["source_conditions"] - ) if batch["source_conditions"] else None, jnp.array( - batch["target_lin"] - ) + ), jnp.array(batch["source_conditions"]) if len( + batch["source_conditions"] + ) > 0 else None, jnp.array(batch["target_lin"]) if self.ot_matcher is not None: tmat = self.ot_matcher.match_fn(source, target) - (source, - source_conditions), (target,) = self.ot_matcher._resample_data( - rng_resample, tmat, (source, source_conditions), (target,) - ) + (source, source_conditions), (target,) = self.ot_matcher.sample_joint( + rng_resample, tmat, (source, source_conditions), (target,) + ) self.state_velocity_field, loss = self.step_fn( rng_step_fn, self.state_velocity_field, source, target, source_conditions diff --git a/src/ott/neural/flows/samplers.py b/src/ott/neural/flow_models/samplers.py similarity index 100% rename from src/ott/neural/flows/samplers.py rename to src/ott/neural/flow_models/samplers.py diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 733fb6477..c0da4db6a 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -15,6 +15,7 @@ import jax import jax.numpy as jnp +from jax import tree_util import optax from flax.training import train_state @@ -39,8 +40,6 @@ def _get_sinkhorn_match_fn( "max_cost", "median"]] = "mean", tau_a: float = 1.0, tau_b: float = 1.0, - *, - filter_input: bool = False, ) -> Callable: @jax.jit @@ -52,19 +51,7 @@ def match_pairs(x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) ) - @jax.jit - def match_pairs_filtered( - x_lin: jnp.ndarray, x_quad: jnp.ndarray, y_lin: jnp.ndarray, - y_quad: jnp.ndarray - ) -> jnp.ndarray: - geom = pointcloud.PointCloud( - x_lin, y_lin, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn - ) - return ot_solver( - linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) - ) - - return match_pairs_filtered if filter_input else match_pairs + return match_pairs def _get_gromov_match_fn( @@ -104,10 +91,10 @@ def _get_gromov_match_fn( @jax.jit def match_pairs( - x_lin: Optional[jnp.ndarray], x_quad: Tuple[jnp.ndarray, jnp.ndarray], - y_lin: Optional[jnp.ndarray], y_quad: Tuple[jnp.ndarray, jnp.ndarray], + x_lin: Optional[jnp.ndarray], + y_lin: Optional[jnp.ndarray], ) -> jnp.ndarray: geom_xx = pointcloud.PointCloud( x=x_quad, y=x_quad, cost_fn=cost_fn_xx, scale_cost=scale_cost_xx @@ -137,36 +124,67 @@ def match_pairs( class BaseOTMatcher: """Base class for mini-batch neural OT matching classes.""" - def _resample_data( + def sample_joint( self, rng: jax.Array, - tmat: jnp.ndarray, - source_arrays: Tuple[jnp.ndarray, ...], - target_arrays: Tuple[jnp.ndarray, ...], + joint_dist: jnp.ndarray, + source_arrays: Tuple[Optional[jnp.ndarray], ...], + target_arrays: Tuple[Optional[jnp.ndarray], ...], ) -> Tuple[jnp.ndarray, ...]: - """Resample a batch according to coupling `tmat`.""" - tmat_flattened = tmat.flatten() - indices = jax.random.choice(rng, len(tmat_flattened), p=tmat_flattened, shape=[tmat.shape[0]]) - indices_source = indices // tmat.shape[1] - indices_target = indices % tmat.shape[1] - return tuple( - b[indices_source] if b is not None else None for b in source_arrays - ), tuple( - b[indices_target] if b is not None else None for b in target_arrays + """Resample from arrays according to discrete joint distribution. + + Args: + rng: Random number generator. + joint_dist: Joint distribution between source and target to sample from. + source_arrays: Arrays corresponding to source distriubution to sample + from. + target_arrays: Arrays corresponding to target arrays to sample from. + + Returns: + Resampled source and target arrays. + """ + _, n_tgt = joint_dist.shape + tmat_flattened = joint_dist.flatten() + indices = jax.random.choice( + rng, len(tmat_flattened), p=tmat_flattened, shape=[joint_dist.shape[0]] + ) + indices_source = indices // n_tgt + indices_target = indices % n_tgt + return tree_util.tree_map( + lambda b: b[indices_source] if b is not None else b, source_arrays + ), tree_util.tree_map( + lambda b: b[indices_target] if b is not None else b, target_arrays ) - def _sample_conditional_indices_from_tmap( + def sample_conditional_indices_from_tmap( self, rng: jax.Array, - tmat: jnp.ndarray, + conditional_distributions: jnp.ndarray, *, k_samples_per_x: Union[int, jnp.ndarray], - source_arrays: Tuple[jnp.ndarray, ...], - target_arrays: Tuple[jnp.ndarray, ...], + source_arrays: Tuple[Optional[jnp.ndarray], ...], + target_arrays: Tuple[Optional[jnp.ndarray], ...], source_is_balanced: bool, - ) -> Tuple[jnp.array, jnp.array]: - batch_size = tmat.shape[0] - left_marginals = tmat.sum(axis=1) + ) -> Tuple[jnp.ndarray, ...]: + """Sample from arrays according to discrete conditional distributions. + + Args: + rng: Random number generator. + conditional_distributions: Conditional distributions to sample from. + k_samples_per_x: Expectation of number of samples to draw from each + conditional distribution. + source_arrays: Arrays corresponding to source distriubution to sample + from. + target_arrays: Arrays corresponding to target arrays to sample from. + source_is_balanced: Whether the source distribution is balanced. + If :obj:`False`, the number of samples drawn from each conditional + distribution `k_samples_per_x` is proportional to the left marginals. + + Returns: + Resampled source and target arrays. + """ + n_src, n_tgt = conditional_distributions.shape + left_marginals = conditional_distributions.sum(axis=1) if not source_is_balanced: rng, rng_2 = jax.random.split(rng, 2) indices = jax.random.choice( @@ -176,12 +194,11 @@ def _sample_conditional_indices_from_tmap( shape=(len(left_marginals),) ) else: - indices = jnp.arange(batch_size) - tmat_adapted = tmat[indices] + indices = jnp.arange(n_src) + tmat_adapted = conditional_distributions[indices] indices_per_row = jax.vmap( - lambda row: jax.random.choice( - key=rng, a=jnp.arange(batch_size), p=row, shape=(k_samples_per_x,) - ), + lambda row: jax.random. + choice(key=rng, a=jnp.arange(n_tgt), p=row, shape=(k_samples_per_x,)), in_axes=0, out_axes=0, )( @@ -190,16 +207,16 @@ def _sample_conditional_indices_from_tmap( indices_source = jnp.repeat(indices, k_samples_per_x) indices_target = jnp.reshape( - indices_per_row % tmat.shape[1], (batch_size * k_samples_per_x,) + indices_per_row % n_tgt, (n_src * k_samples_per_x,) ) - return tuple( - jnp.reshape(b[indices_source], (k_samples_per_x, batch_size, - -1)) if b is not None else None - for b in source_arrays - ), tuple( - jnp.reshape(b[indices_target], (k_samples_per_x, batch_size, - -1)) if b is not None else None - for b in target_arrays + return tree_util.tree_map( + lambda b: jnp. + reshape(b[indices_source], (k_samples_per_x, n_src, *b.shape[1:])) + if b is not None else None, source_arrays + ), tree_util.tree_map( + lambda b: jnp. + reshape(b[indices_target], (k_samples_per_x, n_src, *b.shape[1:])) + if b is not None else b, target_arrays ) @@ -409,17 +426,26 @@ def compute_unbalanced_marginals_quad(*args, **kwargs): return compute_unbalanced_marginals_quad @jax.jit - def _resample_unbalanced( + def resample_unbalanced( self, rng: jax.Array, - batch: Tuple[jnp.ndarray, ...], - marginals: jnp.ndarray, + arrays: Tuple[jnp.ndarray, ...], + p: jnp.ndarray, ) -> Tuple[jnp.ndarray, ...]: - """Resample a batch based on marginals.""" - indices = jax.random.choice( - rng, a=len(marginals), p=jnp.squeeze(marginals), shape=[len(marginals)] + """Resample a batch based on marginals. + + Args: + rng: Random number generator. + arrays: Arrays to resample from. + p: Probabilities according to which `arrays` are resampled. + + Returns: + Resampled arrays. + """ + indices = jax.random.choice(rng, a=len(p), p=jnp.squeeze(p), shape=[len(p)]) + return tree_util.tree_map( + lambda b: b[indices] if b is not None else b, arrays ) - return tuple(b[indices] if b is not None else None for b in batch) def setup(self, source_dim: int, target_dim: int, cond_dim: int): """Setup the model. diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index d44db4476..9480eb3cd 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -22,9 +22,9 @@ import optax from ott.geometry import costs -from ott.neural.flows.genot import GENOTLin, GENOTQuad -from ott.neural.flows.models import VelocityField -from ott.neural.flows.samplers import uniform_sampler +from ott.neural.flow_models.genot import GENOTLin, GENOTQuad +from ott.neural.flow_models.models import VelocityField +from ott.neural.flow_models.samplers import uniform_sampler from ott.neural.models import base_solver from ott.neural.models.nets import RescalingMLP from ott.solvers.linear import sinkhorn diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index d660ec33f..d66ea1611 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -21,7 +21,7 @@ import optax -from ott.neural.flows import flows, models, otfm, samplers +from ott.neural.flow_models import flows, models, otfm, samplers from ott.neural.models import base_solver, nets from ott.solvers.linear import sinkhorn From 1e21afb26016a1f250c0b4a8edf445e8c4694404 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 16 Feb 2024 01:34:50 +0100 Subject: [PATCH 093/186] add docs --- docs/neural/data.rst | 21 ++++++++++++++++ docs/neural/duality.rst | 37 ++++++++++++++++++++++++++++ docs/neural/flow_models.rst | 48 +++++++++++++++++++++++++++++++++++++ docs/neural/gap_models.rst | 26 ++++++++++++++++++++ docs/neural/index.rst | 31 ++++-------------------- docs/neural/models.rst | 0 6 files changed, 137 insertions(+), 26 deletions(-) create mode 100644 docs/neural/data.rst create mode 100644 docs/neural/duality.rst create mode 100644 docs/neural/flow_models.rst create mode 100644 docs/neural/gap_models.rst create mode 100644 docs/neural/models.rst diff --git a/docs/neural/data.rst b/docs/neural/data.rst new file mode 100644 index 000000000..8e13e7631 --- /dev/null +++ b/docs/neural/data.rst @@ -0,0 +1,21 @@ +ott.neural.data +=============== +.. module:: ott.neural.data +.. currentmodule:: ott.neural.data + +The :mod:`ott.problems.data` contains data sets and data loaders needed +for solving (conditional) neural optimal transport problems. + +Datasets +-------- +.. autosummary:: + :toctree: _autosummary + + dataloaders.OTDataset + +Dataloaders +----------- +.. autosummary:: + :toctree: _autosummary + + dataloaders.ConditionalOTDataLoader diff --git a/docs/neural/duality.rst b/docs/neural/duality.rst new file mode 100644 index 000000000..ea3f67bdf --- /dev/null +++ b/docs/neural/duality.rst @@ -0,0 +1,37 @@ +ott.neural.duality +================== +.. module:: ott.neural.duality +.. currentmodule:: ott.neural.duality + +This module implements various solvers to estimate optimal transport between +two probability measures, through samples, parameterized as neural networks. +These solvers build uponn dual formulation of the optimal transport problem. + +Solvers +------- +.. autosummary:: + :toctree: _autosummary + + neuraldual.W2NeuralDual + neuraldual.BaseW2NeuralDual + +Conjugate Solvers +----------------- +.. autosummary:: + :toctree: _autosummary + + conjugate.FenchelConjugateLBFGS + conjugate.FenchelConjugateSolver + conjugate.ConjugateResults + +Models +------ +.. autosummary:: + :toctree: _autosummary + + neuraldual.W2NeuralTrainState + neuraldual.BaseW2NeuralDual + neuraldual.W2NeuralDual + models.ICNN + models.PotentialMLP + models.MetaInitializer diff --git a/docs/neural/flow_models.rst b/docs/neural/flow_models.rst new file mode 100644 index 000000000..5d9d1f594 --- /dev/null +++ b/docs/neural/flow_models.rst @@ -0,0 +1,48 @@ +ott.neural.flow_models +====================== +.. module:: ott.neural.flow_models +.. currentmodule:: ott.neural.flow_models + +This module implements various solvers building upon flow matching +:cite:`lipman:22` to match distributions. + +Flows +----- +.. autosummary:: + :toctree: _autosummary + + flows.BaseFlow + flows.StraightFlow + flows.ConstantNoiseFlow + flows.BrownianNoiseFlow + +Optimal Transport Flow Matching +------------------------------- +.. autosummary:: + :toctree: _autosummary + + otfm.OTFlowMatching + +GENOT +----- +.. autosummary:: + :toctree: _autosummary + + genot.GENOTBase + genot.GENOTLin + genot.GENOTQuad + +Models +------ +.. autosummary:: + :toctree: _autosummary + + models.VelocityField + +Utils +----- +.. autosummary:: + :toctree: _autosummary + + layers.CyclicalTimeEncoder + samplers.uniform_sampler diff --git a/docs/neural/gap_models.rst b/docs/neural/gap_models.rst new file mode 100644 index 000000000..bacc93c71 --- /dev/null +++ b/docs/neural/gap_models.rst @@ -0,0 +1,26 @@ +ott.neural.models +================= +.. module:: ott.neural.models +.. currentmodule:: ott.neural.models + +This module implements models, network architectures and helper +functions which apply to various neural optimal transport solvers. + +Utils +----- +.. autosummary:: + :toctree: _autosummary + + base_solver.BaseOTMatcher + base_solver.OTMatcherLinear + base_solver.OTMatcherQuad + base_solver.UnbalancednessHandler + + +Neural networks +--------------- +.. autosummary:: + :toctree: _autosummary + + layers.MLPBlock + nets.RescalingMLP diff --git a/docs/neural/index.rst b/docs/neural/index.rst index d0315edae..06d9fd97b 100644 --- a/docs/neural/index.rst +++ b/docs/neural/index.rst @@ -13,29 +13,8 @@ and solvers to estimate such neural networks. .. toctree:: :maxdepth: 2 - solvers - -Models ------- -.. autosummary:: - :toctree: _autosummary - - models.ICNN - models.MLP - models.MetaInitializer - -Losses ------- -.. autosummary:: - :toctree: _autosummary - - losses.monge_gap - losses.monge_gap_from_samples - -Layers ------- -.. autosummary:: - :toctree: _autosummary - - layers.PositiveDense - layers.PosDefPotentials + data + duality + flow_models + gaps + models diff --git a/docs/neural/models.rst b/docs/neural/models.rst new file mode 100644 index 000000000..e69de29bb From 1afb922ca24a4bb91e1f95bde65c11d01fe90b30 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 16 Feb 2024 12:43:52 +0100 Subject: [PATCH 094/186] incorporate more changes --- docs/neural/data.rst | 2 +- docs/neural/gap_models.rst | 26 ------- docs/neural/gaps.rst | 15 ++++ docs/neural/models.rst | 26 +++++++ docs/neural/solvers.rst | 28 ------- pyproject.toml | 1 + src/ott/neural/duality/neuraldual.py | 8 +- src/ott/neural/flow_models/genot.py | 56 +++++++------- src/ott/neural/models/base_solver.py | 105 ++++++++++++--------------- tests/neural/genot_test.py | 64 ++++++++++++---- tests/neural/otfm_test.py | 22 +++--- 11 files changed, 185 insertions(+), 168 deletions(-) delete mode 100644 docs/neural/gap_models.rst create mode 100644 docs/neural/gaps.rst delete mode 100644 docs/neural/solvers.rst diff --git a/docs/neural/data.rst b/docs/neural/data.rst index 8e13e7631..970499ff5 100644 --- a/docs/neural/data.rst +++ b/docs/neural/data.rst @@ -11,7 +11,7 @@ Datasets .. autosummary:: :toctree: _autosummary - dataloaders.OTDataset + dataloaders.OTDataSet Dataloaders ----------- diff --git a/docs/neural/gap_models.rst b/docs/neural/gap_models.rst deleted file mode 100644 index bacc93c71..000000000 --- a/docs/neural/gap_models.rst +++ /dev/null @@ -1,26 +0,0 @@ -ott.neural.models -================= -.. module:: ott.neural.models -.. currentmodule:: ott.neural.models - -This module implements models, network architectures and helper -functions which apply to various neural optimal transport solvers. - -Utils ------ -.. autosummary:: - :toctree: _autosummary - - base_solver.BaseOTMatcher - base_solver.OTMatcherLinear - base_solver.OTMatcherQuad - base_solver.UnbalancednessHandler - - -Neural networks ---------------- -.. autosummary:: - :toctree: _autosummary - - layers.MLPBlock - nets.RescalingMLP diff --git a/docs/neural/gaps.rst b/docs/neural/gaps.rst new file mode 100644 index 000000000..abf621e24 --- /dev/null +++ b/docs/neural/gaps.rst @@ -0,0 +1,15 @@ +ott.neural.gaps +=============== +.. module:: ott.neural.gaps +.. currentmodule:: ott.neural.gaps + +This module implements gap models. + +Monge gap +--------- +.. autosummary:: + :toctree: _autosummary + + map_estimator.MapEstimator + monge_gap.monge_gap + monge_gap.monge_gap_from_samples diff --git a/docs/neural/models.rst b/docs/neural/models.rst index e69de29bb..bacc93c71 100644 --- a/docs/neural/models.rst +++ b/docs/neural/models.rst @@ -0,0 +1,26 @@ +ott.neural.models +================= +.. module:: ott.neural.models +.. currentmodule:: ott.neural.models + +This module implements models, network architectures and helper +functions which apply to various neural optimal transport solvers. + +Utils +----- +.. autosummary:: + :toctree: _autosummary + + base_solver.BaseOTMatcher + base_solver.OTMatcherLinear + base_solver.OTMatcherQuad + base_solver.UnbalancednessHandler + + +Neural networks +--------------- +.. autosummary:: + :toctree: _autosummary + + layers.MLPBlock + nets.RescalingMLP diff --git a/docs/neural/solvers.rst b/docs/neural/solvers.rst deleted file mode 100644 index c405d89ba..000000000 --- a/docs/neural/solvers.rst +++ /dev/null @@ -1,28 +0,0 @@ -ott.neural.solvers -================== -.. module:: ott.neural.solvers -.. currentmodule:: ott.neural.solvers - -This module implements various solvers to estimate optimal transport between -two probability measures, through samples, parameterized as neural networks. -These neural networks are described in :mod:`ott.neural.models`, borrowing -lower-level components from :mod:`ott.neural.layers` using -`flax `__. - -Solvers -------- -.. autosummary:: - :toctree: _autosummary - - map_estimator.MapEstimator - neuraldual.W2NeuralDual - neuraldual.BaseW2NeuralDual - -Conjugate Solvers ------------------ -.. autosummary:: - :toctree: _autosummary - - conjugate.FenchelConjugateLBFGS - conjugate.FenchelConjugateSolver - conjugate.ConjugateResults diff --git a/pyproject.toml b/pyproject.toml index 7306525b1..1c71241f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,6 +79,7 @@ test = [ "tslearn>=0.5; python_version < '3.12'", "lineax; python_version >= '3.9'", "matplotlib", + "torch" ] docs = [ "sphinx>=4.0", diff --git a/src/ott/neural/duality/neuraldual.py b/src/ott/neural/duality/neuraldual.py index 3ea88f74a..c00acb76c 100644 --- a/src/ott/neural/duality/neuraldual.py +++ b/src/ott/neural/duality/neuraldual.py @@ -53,7 +53,7 @@ class W2NeuralTrainState(train_state.TrainState): This extends :class:`~flax.training.train_state.TrainState` to include the potential methods from the - :class:`~ott.neural.solvers.neuraldual.BaseW2NeuralDual` used during training. + :class:`~ott.neural.duality.neuraldual.BaseW2NeuralDual` used during training. Args: potential_value_fn: the potential's value function @@ -186,10 +186,10 @@ class W2NeuralDual: transport map from :math:`\beta` to :math:`\alpha`. This solver estimates the conjugate :math:`f^\star` with a neural approximation :math:`g` that is fine-tuned - with :class:`~ott.neural.solvers.conjugate.FenchelConjugateSolver`, + with :class:`~ott.neural.duality.conjugate.FenchelConjugateSolver`, which is a combination further described in :cite:`amos:23`. - The :class:`~ott.neural.solvers.neuraldual.BaseW2NeuralDual` potentials for + The :class:`~ott.neural.duality.neuraldual.BaseW2NeuralDual` potentials for ``neural_f`` and ``neural_g`` can 1. both provide the values of the potentials :math:`f` and :math:`g`, or @@ -198,7 +198,7 @@ class W2NeuralDual: via the Fenchel conjugate as discussed in :cite:`amos:23`. The potential's value or gradient mapping is specified via - :attr:`~ott.neural.solvers.neuraldual.BaseW2NeuralDual.is_potential`. + :attr:`~ott.neural.duality.neuraldual.BaseW2NeuralDual.is_potential`. Args: dim_data: input dimensionality of data required for network init diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index c900d1268..ff9b12c82 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -84,6 +84,7 @@ def __init__( velocity_field: Callable[[ jnp.ndarray, jnp.ndarray, Optional[jnp.ndarray], Optional[jnp.ndarray] ], jnp.ndarray], + *, input_dim: int, output_dim: int, cond_dim: int, @@ -194,6 +195,8 @@ def transport( condition: Optional[jnp.ndarray] = None, rng: Optional[jax.Array] = None, forward: bool = True, + t_0: float = 0.0, + t_1: float = 1.0, **kwargs: Any, ) -> Union[jnp.array, diffrax.Solution, Optional[jnp.ndarray]]: """Transport data with the learnt plan. @@ -207,6 +210,8 @@ def transport( condition: Condition of the input data. rng: random seed for sampling from the latent distribution. forward: If `True` integrates forward, otherwise backwards. + t_0: Starting time of integration of neural ODE. + t_1: End time of integration of neural ODE. kwargs: Keyword arguments for the ODE solver. Returns: @@ -217,36 +222,37 @@ def transport( rng = utils.default_prng_key(rng) if not forward: raise NotImplementedError - assert len(source) == len(condition) if condition is not None else True - + if condition is not None: + assert len(source) == len(condition), (len(source), len(condition)) latent_batch = self.latent_noise_fn(rng, shape=(len(source),)) - cond_input = source if condition is None else jnp.concatenate([ - source, condition - ], - axis=-1) - t0, t1 = (0.0, 1.0) + cond_input = source if condition is None else ( + jnp.concatenate([source, condition], axis=-1) + ) @jax.jit def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: - return diffrax.diffeqsolve( - diffrax.ODETerm( - lambda t, x, args: self.state_velocity_field. - apply_fn({"params": self.state_velocity_field.params}, - t=t, - x=x, - condition=cond) - ), - kwargs.pop("solver", diffrax.Tsit5()), - t0=t0, - t1=t1, + ode_term = diffrax.ODETerm( + lambda t, x, args: self.state_velocity_field. + apply_fn({"params": self.state_velocity_field.params}, + t=t, + x=x, + condition=cond) + ), + solver = kwargs.pop("solver", diffrax.Tsit5()) + stepsize_controller = kwargs.pop( + "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) + ) + sol = diffrax.diffeqsolve( + ode_term, + solver, + t0=t_0, + t1=t_1, dt0=kwargs.pop("dt0", None), y0=input, - stepsize_controller=kwargs.pop( - "stepsize_controller", - diffrax.PIDController(rtol=1e-5, atol=1e-5) - ), + stepsize_controller=stepsize_controller, **kwargs, - ).ys[0] + ) + return sol.ys[0] return jax.vmap(solve_ode)(latent_batch, cond_input) @@ -264,8 +270,8 @@ def learn_rescaling(self) -> bool: def _reshape_samples(self, arrays: Tuple[jnp.ndarray, ...], batch_size: int) -> Tuple[jnp.ndarray, ...]: return jax.tree_util.tree_map( - lambda x: jnp.reshape(x, (batch_size * self.k_samples_per_x, -1)) - if x is not None else None, arrays + lambda x: jnp.reshape(x, (batch_size * self.k_samples_per_x, -1)), + arrays ) diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index c0da4db6a..042e9fd0c 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -25,7 +25,17 @@ from ott.problems.quadratic import quadratic_problem from ott.solvers import was_solver from ott.solvers.linear import sinkhorn -from ott.solvers.quadratic import gromov_wasserstein +from ott.solvers.quadratic import gromov_wasserstein, gromov_wasserstein_lr + +Scale_cost_lin_t = Union[bool, int, float, Literal["mean", "max_cost", + "median"]] +Scale_cost_quad_t = Union[Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", + "max_cost", "median"]], + Dict[str, + Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", + "max_cost", "median"]]]], __all__ = [ "BaseOTMatcher", "OTMatcherLinear", "OTMatcherQuad", "UnbalancednessHandler" @@ -57,12 +67,7 @@ def match_pairs(x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: def _get_gromov_match_fn( ot_solver: Any, cost_fn: Union[Any, Mapping[str, Any]], - scale_cost: Union[Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", "max_cost", - "median"]], - Dict[str, Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", - "max_cost", "median"]]]], + scale_cost: Scale_cost_quad_t, tau_a: float, tau_b: float, fused_penalty: float, @@ -150,18 +155,17 @@ def sample_joint( ) indices_source = indices // n_tgt indices_target = indices % n_tgt - return tree_util.tree_map( - lambda b: b[indices_source] if b is not None else b, source_arrays - ), tree_util.tree_map( - lambda b: b[indices_target] if b is not None else b, target_arrays - ) + return tree_util.tree_map(lambda b: b[indices_source], + source_arrays), tree_util.tree_map( + lambda b: b[indices_target], target_arrays + ) def sample_conditional_indices_from_tmap( self, rng: jax.Array, conditional_distributions: jnp.ndarray, *, - k_samples_per_x: Union[int, jnp.ndarray], + k_samples_per_x: int, source_arrays: Tuple[Optional[jnp.ndarray], ...], target_arrays: Tuple[Optional[jnp.ndarray], ...], source_is_balanced: bool, @@ -198,7 +202,7 @@ def sample_conditional_indices_from_tmap( tmat_adapted = conditional_distributions[indices] indices_per_row = jax.vmap( lambda row: jax.random. - choice(key=rng, a=jnp.arange(n_tgt), p=row, shape=(k_samples_per_x,)), + choice(key=rng, a=n_tgt, p=row, shape=(k_samples_per_x,)), in_axes=0, out_axes=0, )( @@ -211,12 +215,12 @@ def sample_conditional_indices_from_tmap( ) return tree_util.tree_map( lambda b: jnp. - reshape(b[indices_source], (k_samples_per_x, n_src, *b.shape[1:])) - if b is not None else None, source_arrays + reshape(b[indices_source], + (k_samples_per_x, n_src, *b.shape[1:])), source_arrays ), tree_util.tree_map( lambda b: jnp. - reshape(b[indices_target], (k_samples_per_x, n_src, *b.shape[1:])) - if b is not None else b, target_arrays + reshape(b[indices_target], + (k_samples_per_x, n_src, *b.shape[1:])), target_arrays ) @@ -232,12 +236,12 @@ class OTMatcherLinear(BaseOTMatcher): def __init__( self, - ot_solver: was_solver.WassersteinSolver, + ot_solver: sinkhorn.Sinkhorn, epsilon: float = 1e-2, cost_fn: Optional[costs.CostFn] = None, scale_cost: Union[bool, int, float, Literal["mean", "max_norm", "max_bound", "max_cost", - "median"]] = "mean", + "median"]] = 1.0, tau_a: float = 1.0, tau_b: float = 1.0, ) -> None: @@ -283,11 +287,10 @@ class OTMatcherQuad(BaseOTMatcher): def __init__( self, - ot_solver: was_solver.WassersteinSolver, + ot_solver: Union[gromov_wasserstein.GromovWasserstein, + gromov_wasserstein_lr.LRGromovWasserstein], cost_fn: Optional[costs.CostFn] = None, - scale_cost: Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", "max_cost", - "median"]] = "mean", + scale_cost: Scale_cost_quad_t = 1.0, tau_a: float = 1.0, tau_b: float = 1.0, fused_penalty: float = 0.0, @@ -347,7 +350,8 @@ class UnbalancednessHandler: is not available, and hence the unbalanced marginals must be computed by the neural solver. kwargs: Additional keyword arguments. - """ # noqa: E501 # TODO(MUCDK): fix me + + """ # noqa: E501 def __init__( self, @@ -364,8 +368,7 @@ def __init__( opt_eta: Optional[optax.GradientTransformation] = None, opt_xi: Optional[optax.GradientTransformation] = None, resample_epsilon: float = 1e-2, - scale_cost: Union[bool, int, float, Literal["mean", "max_cost", - "median"]] = "mean", + scale_cost: Union[Scale_cost_lin_t, Scale_cost_quad_t] = 1.0, ot_solver: Optional[was_solver.WassersteinSolver] = None, **kwargs: Mapping[str, Any], ): @@ -443,9 +446,7 @@ def resample_unbalanced( Resampled arrays. """ indices = jax.random.choice(rng, a=len(p), p=jnp.squeeze(p), shape=[len(p)]) - return tree_util.tree_map( - lambda b: b[indices] if b is not None else b, arrays - ) + return tree_util.tree_map(lambda b: b[indices], arrays) def setup(self, source_dim: int, target_dim: int, cond_dim: int): """Setup the model. @@ -479,37 +480,19 @@ def setup(self, source_dim: int, target_dim: int, cond_dim: int): def _get_rescaling_step_fn(self) -> Callable: # type:ignore[type-arg] - def loss_a_fn( - params_eta: Optional[jnp.ndarray], - apply_fn_eta: Callable[[Dict[str, jnp.ndarray], jnp.ndarray], - jnp.ndarray], + def loss_marginal_fn( + params: jnp.ndarray, + apply_fn: Callable[[Dict[str, jnp.ndarray], jnp.ndarray], + Optional[jnp.ndarray]], x: jnp.ndarray, condition: Optional[jnp.ndarray], - a: jnp.ndarray, + true_marginals: jnp.ndarray, expectation_reweighting: float, ) -> Tuple[float, jnp.ndarray]: - eta_predictions = apply_fn_eta({"params": params_eta}, x, condition) - return ( - optax.l2_loss(eta_predictions[:, 0], a).mean() + - optax.l2_loss(jnp.mean(eta_predictions) - expectation_reweighting), - eta_predictions, - ) - - def loss_b_fn( - params_xi: Optional[jnp.ndarray], - apply_fn_xi: Callable[[Dict[str, jnp.ndarray], jnp.ndarray], - jnp.ndarray], - x: jnp.ndarray, - condition: Optional[jnp.ndarray], - b: jnp.ndarray, - expectation_reweighting: float, - ) -> Tuple[float, jnp.ndarray]: - xi_predictions = apply_fn_xi({"params": params_xi}, x, condition) - return ( - optax.l2_loss(xi_predictions[:, 0], b).mean() + - optax.l2_loss(jnp.mean(xi_predictions) - expectation_reweighting), - xi_predictions, - ) + predictions = apply_fn({"params": params}, x, condition) + pred_loss = optax.l2_loss(jnp.squeeze(predictions), true_marginals).mean() + exp_loss = optax.l2_loss(jnp.mean(predictions) - expectation_reweighting) + return (pred_loss + exp_loss, predictions) @jax.jit def step_fn( @@ -524,7 +507,9 @@ def step_fn( is_training: bool = True, ): if state_eta is not None: - grad_a_fn = jax.value_and_grad(loss_a_fn, argnums=0, has_aux=True) + grad_a_fn = jax.value_and_grad( + loss_marginal_fn, argnums=0, has_aux=True + ) (loss_a, eta_predictions), grads_eta = grad_a_fn( state_eta.params, state_eta.apply_fn, @@ -540,7 +525,9 @@ def step_fn( else: new_state_eta = eta_predictions = loss_a = None if state_xi is not None: - grad_b_fn = jax.value_and_grad(loss_b_fn, argnums=0, has_aux=True) + grad_b_fn = jax.value_and_grad( + loss_marginal_fn, argnums=0, has_aux=True + ) (loss_b, xi_predictions), grads_xi = grad_b_fn( state_xi.params, state_xi.apply_fn, diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 9480eb3cd..e924edd9a 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -27,8 +27,8 @@ from ott.neural.flow_models.samplers import uniform_sampler from ott.neural.models import base_solver from ott.neural.models.nets import RescalingMLP -from ott.solvers.linear import sinkhorn -from ott.solvers.quadratic import gromov_wasserstein +from ott.solvers.linear import sinkhorn, sinkhorn_lr +from ott.solvers.quadratic import gromov_wasserstein, gromov_wasserstein_lr class TestGENOTLin: @@ -36,10 +36,14 @@ class TestGENOTLin: @pytest.mark.parametrize("scale_cost", ["mean", 2.0]) @pytest.mark.parametrize("k_samples_per_x", [1, 3]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) + @pytest.mark.parametrize("solver", ["sinkhorn", "lr_sinkhorn"]) def test_genot_linear_unconditional( - self, genot_data_loader_linear: Iterator, - scale_cost: Union[float, Literal["mean"]], k_samples_per_x: int, - solver_latent_to_data: Optional[str] + self, + genot_data_loader_linear: Iterator, + scale_cost: Union[float, Literal["mean"]], + k_samples_per_x: int, + solver_latent_to_data: Optional[str], + solver: Literal["sinkhorn", "lr_sinkhorn"], ): matcher_latent_to_data = ( None if solver_latent_to_data is None else @@ -62,7 +66,8 @@ def test_genot_linear_unconditional( condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) - ot_solver = sinkhorn.Sinkhorn() + ot_solver = sinkhorn.Sinkhorn( + ) if solver == "sinkhorn" else sinkhorn_lr.LRSinkhorn(rank=3) ot_matcher = base_solver.OTMatcherLinear( ot_solver, cost_fn=costs.SqEuclidean(), scale_cost=scale_cost ) @@ -96,9 +101,11 @@ def test_genot_linear_unconditional( @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) + @pytest.mark.parametrize("solver", ["sinkhorn", "lr_sinkhorn"]) def test_genot_linear_conditional( self, genot_data_loader_linear_conditional: Iterator, - k_samples_per_x: int, solver_latent_to_data: Optional[str] + k_samples_per_x: int, solver_latent_to_data: Optional[str], + solver: Literal["sinkhorn", "lr_sinkhorn"] ): matcher_latent_to_data = ( None if solver_latent_to_data is None else @@ -121,7 +128,8 @@ def test_genot_linear_conditional( condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) - ot_solver = sinkhorn.Sinkhorn() + ot_solver = sinkhorn.Sinkhorn( + ) if solver == "sinkhorn" else sinkhorn_lr.LRSinkhorn(rank=3) ot_matcher = base_solver.OTMatcherLinear( ot_solver, cost_fn=costs.SqEuclidean() ) @@ -243,9 +251,11 @@ class TestGENOTQuad: @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) + @pytest.mark.parametrize("solver", ["gromov", "gromov_lr"]) def test_genot_quad_unconditional( self, genot_data_loader_quad: Iterator, k_samples_per_x: int, - solver_latent_to_data: Optional[str] + solver_latent_to_data: Optional[str], solver: Literal["gromov", + "gromov_lr"] ): matcher_latent_to_data = ( None if solver_latent_to_data is None else @@ -266,7 +276,11 @@ def test_genot_quad_unconditional( condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) - ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + ot_solver = gromov_wasserstein.GromovWasserstein( + epsilon=1e-2 + ) if solver == "gromov" else gromov_wasserstein_lr.LRGromovWasserstein( + rank=3, epsilon=1e-2 + ) ot_matcher = base_solver.OTMatcherQuad( ot_solver, cost_fn=costs.SqEuclidean() ) @@ -301,9 +315,11 @@ def test_genot_quad_unconditional( @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) + @pytest.mark.parametrize("solver", ["gromov", "gromov_lr"]) def test_genot_fused_unconditional( self, genot_data_loader_fused: Iterator, k_samples_per_x: int, - solver_latent_to_data: Optional[str] + solver_latent_to_data: Optional[str], solver: Literal["gromov", + "gromov_lr"] ): matcher_latent_to_data = ( None if solver_latent_to_data is None else @@ -326,7 +342,11 @@ def test_genot_fused_unconditional( condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) - ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + ot_solver = gromov_wasserstein.GromovWasserstein( + epsilon=1e-2 + ) if solver == "gromov" else gromov_wasserstein_lr.LRGromovWasserstein( + rank=3, epsilon=1e-2 + ) ot_matcher = base_solver.OTMatcherQuad( ot_solver, cost_fn=costs.SqEuclidean(), fused_penalty=0.5 ) @@ -361,9 +381,11 @@ def test_genot_fused_unconditional( @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) + @pytest.mark.parametrize("solver", ["gromov", "gromov_lr"]) def test_genot_quad_conditional( self, genot_data_loader_quad_conditional: Iterator, k_samples_per_x: int, - solver_latent_to_data: Optional[str] + solver_latent_to_data: Optional[str], solver: Literal["gromov", + "gromov_lr"] ): matcher_latent_to_data = ( None if solver_latent_to_data is None else @@ -385,7 +407,11 @@ def test_genot_quad_conditional( condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) - ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + ot_solver = gromov_wasserstein.GromovWasserstein( + epsilon=1e-2 + ) if solver == "gromov" else gromov_wasserstein_lr.LRGromovWasserstein( + rank=3, epsilon=1e-2 + ) ot_matcher = base_solver.OTMatcherQuad( ot_solver, cost_fn=costs.SqEuclidean() ) @@ -421,9 +447,11 @@ def test_genot_quad_conditional( @pytest.mark.parametrize("k_samples_per_x", [1, 2]) @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) + @pytest.mark.parametrize("solver", ["gromov", "gromov_lr"]) def test_genot_fused_conditional( self, genot_data_loader_fused_conditional: Iterator, k_samples_per_x: int, - solver_latent_to_data: Optional[str] + solver_latent_to_data: Optional[str], solver: Literal["gromov", + "gromov_lr"] ): solver_latent_to_data = ( None if solver_latent_to_data is None else sinkhorn.Sinkhorn() @@ -448,7 +476,11 @@ def test_genot_fused_conditional( condition_dim=source_dim + condition_dim, latent_embed_dim=5, ) - ot_solver = gromov_wasserstein.GromovWasserstein(epsilon=1e-2) + ot_solver = gromov_wasserstein.GromovWasserstein( + epsilon=1e-2 + ) if solver == "gromov" else gromov_wasserstein_lr.LRGromovWasserstein( + rank=3, epsilon=1e-2 + ) ot_matcher = base_solver.OTMatcherQuad( ot_solver, cost_fn=costs.SqEuclidean(), fused_penalty=0.5 ) diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index d66ea1611..4e68315ff 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Iterator, Type +from typing import Iterator, Literal, Type import pytest @@ -87,8 +87,8 @@ def test_flow_matching_unconditional( @pytest.mark.parametrize( "flow", [ flows.ConstantNoiseFlow(0.0), - flows.ConstantNoiseFlow(1.0), - flows.BrownianNoiseFlow(0.2) + flows.ConstantNoiseFlow(1.1), + flows.BrownianNoiseFlow(2.2) ] ) def test_flow_matching_with_conditions( @@ -145,14 +145,17 @@ def test_flow_matching_with_conditions( assert jnp.sum(jnp.isnan(result_backward)) == 0 @pytest.mark.parametrize( - "flow", [ + "flow", + [ flows.ConstantNoiseFlow(0.0), - flows.ConstantNoiseFlow(1.0), - flows.BrownianNoiseFlow(0.2) - ] + flows.ConstantNoiseFlow(13.0), + flows.BrownianNoiseFlow(0.12) + ], ) + @pytest.mark.parametrize("solver", ["sinkhorn", "lr_sinkhorn"]) def test_flow_matching_conditional( - self, data_loader_gaussian_conditional, flow: Type[flows.BaseFlow] + self, data_loader_gaussian_conditional, flow: Type[flows.BaseFlow], + solver: Literal["sinkhorn", "lr_sinkhorn"] ): dim = 2 condition_dim = 0 @@ -161,7 +164,8 @@ def test_flow_matching_conditional( condition_dim=condition_dim, latent_embed_dim=5, ) - ot_solver = sinkhorn.Sinkhorn() + ot_solver = sinkhorn.Sinkhorn( + ) if solver == "sinkhorn" else sinkhorn.LRSinkhorn() ot_matcher = base_solver.OTMatcherLinear(ot_solver) time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) From dc436f4daff37315ff7ecd313e4f5fb696061ce3 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 16 Feb 2024 12:48:05 +0100 Subject: [PATCH 095/186] problem with custom type --- src/ott/neural/models/base_solver.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 042e9fd0c..1742a2a2b 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -27,15 +27,13 @@ from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein, gromov_wasserstein_lr -Scale_cost_lin_t = Union[bool, int, float, Literal["mean", "max_cost", - "median"]] -Scale_cost_quad_t = Union[Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", - "max_cost", "median"]], - Dict[str, - Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", - "max_cost", "median"]]]], +ScaleCostLin_t = Union[bool, int, float, Literal["mean", "max_cost", "median"]] +ScaleCostQuad_t = Union[Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", + "max_cost", "median"]], + Dict[str, Union[bool, int, float, + Literal["mean", "max_norm", "max_bound", + "max_cost", "median"]]]], __all__ = [ "BaseOTMatcher", "OTMatcherLinear", "OTMatcherQuad", "UnbalancednessHandler" @@ -67,7 +65,7 @@ def match_pairs(x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: def _get_gromov_match_fn( ot_solver: Any, cost_fn: Union[Any, Mapping[str, Any]], - scale_cost: Scale_cost_quad_t, + scale_cost: ScaleCostQuad_t, tau_a: float, tau_b: float, fused_penalty: float, @@ -290,7 +288,7 @@ def __init__( ot_solver: Union[gromov_wasserstein.GromovWasserstein, gromov_wasserstein_lr.LRGromovWasserstein], cost_fn: Optional[costs.CostFn] = None, - scale_cost: Scale_cost_quad_t = 1.0, + scale_cost: ScaleCostQuad_t = 1.0, tau_a: float = 1.0, tau_b: float = 1.0, fused_penalty: float = 0.0, @@ -368,7 +366,7 @@ def __init__( opt_eta: Optional[optax.GradientTransformation] = None, opt_xi: Optional[optax.GradientTransformation] = None, resample_epsilon: float = 1e-2, - scale_cost: Union[Scale_cost_lin_t, Scale_cost_quad_t] = 1.0, + scale_cost: Union[ScaleCostLin_t, ScaleCostQuad_t] = 1.0, ot_solver: Optional[was_solver.WassersteinSolver] = None, **kwargs: Mapping[str, Any], ): From 8bfe1a34e930e5eb18d9ab35bc1be5fc39a2ee7f Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 16 Feb 2024 13:29:16 +0100 Subject: [PATCH 096/186] fix scale cost bug --- src/ott/neural/models/base_solver.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 1742a2a2b..5bfa8caa0 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -27,13 +27,8 @@ from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein, gromov_wasserstein_lr -ScaleCostLin_t = Union[bool, int, float, Literal["mean", "max_cost", "median"]] -ScaleCostQuad_t = Union[Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", - "max_cost", "median"]], - Dict[str, Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", - "max_cost", "median"]]]], +ScaleCost_t = Union[int, float, Literal["mean", "max_cost", "median"]] +ScaleCostQuad_t = Union[ScaleCost_t, Dict[str, ScaleCost_t]] __all__ = [ "BaseOTMatcher", "OTMatcherLinear", "OTMatcherQuad", "UnbalancednessHandler" @@ -44,8 +39,7 @@ def _get_sinkhorn_match_fn( ot_solver: Any, epsilon: float = 1e-2, cost_fn: Optional[costs.CostFn] = None, - scale_cost: Union[bool, int, float, Literal["mean", "max_norm", "max_bound", - "max_cost", "median"]] = "mean", + scale_cost: ScaleCost_t = 1.0, tau_a: float = 1.0, tau_b: float = 1.0, ) -> Callable: @@ -237,9 +231,7 @@ def __init__( ot_solver: sinkhorn.Sinkhorn, epsilon: float = 1e-2, cost_fn: Optional[costs.CostFn] = None, - scale_cost: Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", "max_cost", - "median"]] = 1.0, + scale_cost: ScaleCost_t = 1.0, tau_a: float = 1.0, tau_b: float = 1.0, ) -> None: @@ -366,7 +358,7 @@ def __init__( opt_eta: Optional[optax.GradientTransformation] = None, opt_xi: Optional[optax.GradientTransformation] = None, resample_epsilon: float = 1e-2, - scale_cost: Union[ScaleCostLin_t, ScaleCostQuad_t] = 1.0, + scale_cost: Union[ScaleCost_t, ScaleCostQuad_t] = 1.0, ot_solver: Optional[was_solver.WassersteinSolver] = None, **kwargs: Mapping[str, Any], ): From 2a1f23addb2567cccce007df6be8c8efe8111713 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Fri, 16 Feb 2024 14:14:35 +0100 Subject: [PATCH 097/186] fix bugs --- src/ott/neural/flow_models/genot.py | 2 ++ src/ott/neural/models/base_solver.py | 4 ++-- tests/neural/otfm_test.py | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index ff9b12c82..101c32978 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -290,6 +290,7 @@ def __call__(self, train_loader, valid_loader): valid_loader: Data loader for the validation data. """ iter = -1 + stop = False while True: for batch in train_loader: iter += 1 @@ -388,6 +389,7 @@ def __call__(self, train_loader, valid_loader): """ batch: Dict[str, jnp.array] = {} iter = -1 + stop = False while True: for batch in train_loader: iter += 1 diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index 5bfa8caa0..e9be6baa8 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -88,8 +88,8 @@ def _get_gromov_match_fn( @jax.jit def match_pairs( - x_quad: Tuple[jnp.ndarray, jnp.ndarray], - y_quad: Tuple[jnp.ndarray, jnp.ndarray], + x_quad: jnp.ndarray, + y_quad: jnp.ndarray, x_lin: Optional[jnp.ndarray], y_lin: Optional[jnp.ndarray], ) -> jnp.ndarray: diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 4e68315ff..6f8d14879 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -23,7 +23,7 @@ from ott.neural.flow_models import flows, models, otfm, samplers from ott.neural.models import base_solver, nets -from ott.solvers.linear import sinkhorn +from ott.solvers.linear import sinkhorn, sinkhorn_lr class TestOTFlowMatching: @@ -165,7 +165,7 @@ def test_flow_matching_conditional( latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn( - ) if solver == "sinkhorn" else sinkhorn.LRSinkhorn() + ) if solver == "sinkhorn" else sinkhorn_lr.LRSinkhorn() ot_matcher = base_solver.OTMatcherLinear(ot_solver) time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) From a46405c96f90d2aa5740f1c4e9cbbade00019520 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 18 Feb 2024 18:48:12 +0100 Subject: [PATCH 098/186] fux bug in unbalancedness/rescalingMlp --- src/ott/neural/models/nets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ott/neural/models/nets.py b/src/ott/neural/models/nets.py index 0acb7daae..cad4e84c2 100644 --- a/src/ott/neural/models/nets.py +++ b/src/ott/neural/models/nets.py @@ -89,7 +89,7 @@ def __call__( out_layer = layers.MLPBlock( dim=self.hidden_dim, - out_dim=self.hidden_dim, + out_dim=1, num_layers=self.num_layers_per_block, act_fn=self.act_fn ) From 7afcac456a47a62330a4fc6b1a550d6c4dd360d3 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 18 Feb 2024 18:56:40 +0100 Subject: [PATCH 099/186] unify unbalancedness step in GENOT --- src/ott/neural/flow_models/genot.py | 52 +++++++++++++++++------------ 1 file changed, 30 insertions(+), 22 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 101c32978..4c5db7fa2 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -274,6 +274,25 @@ def _reshape_samples(self, arrays: Tuple[jnp.ndarray, ...], arrays ) + def _learn_rescaling( + self, source: jnp.ndarray, target: jnp.ndarray, + source_conditions: Optional[jnp.ndarray], tmat: jnp.ndarray + ) -> Tuple[jnp.ndarray, jnp.ndarray, float, float]: + + ( + self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, + loss_b + ) = self.unbalancedness_handler.step_fn( + source=source, + target=target, + condition=source_conditions, + a=tmat.sum(axis=1), + b=tmat.sum(axis=0), + state_eta=self.unbalancedness_handler.state_eta, + state_xi=self.unbalancedness_handler.state_xi, + ) + return eta_predictions, xi_predictions, float(loss_a), float(loss_b) + class GENOTLin(GENOTBase): """Implementation of GENOT-L (:cite:`klein:23`). @@ -304,7 +323,7 @@ def __call__(self, train_loader, valid_loader): source, source_conditions, target = jnp.array( batch["source_lin"] ), jnp.array(batch["source_conditions"] - ) if len(batch["source_conditions"]) else None, jnp.array( + ) if "source_conditions" in batch else None, jnp.array( batch["target_lin"] ) @@ -353,18 +372,13 @@ def __call__(self, train_loader, valid_loader): latent, source_conditions ) if self.learn_rescaling: - ( - self.state_eta, self.state_xi, eta_predictions, xi_predictions, - loss_a, loss_b - ) = self.unbalancedness_handler.step_fn( + eta_preds, xi_preds, loss_a, loss_b = self._learn_rescaling( source=source, target=target, condition=source_conditions, - a=tmat.sum(axis=1), - b=tmat.sum(axis=0), - state_eta=self.unbalancedness_handler.state_eta, - state_xi=self.unbalancedness_handler.state_xi, + tmat=tmat ) + if iter % self.valid_freq == 0: self._valid_step(valid_loader, iter) if stop: @@ -403,12 +417,12 @@ def __call__(self, train_loader, valid_loader): ) = jax.random.split(self.rng, 6) (source_lin, source_quad, source_conditions, target_lin, target_quad) = ( - jnp.array(batch["source_lin"]) if len(batch["source_lin"]) else - None, jnp.array(batch["source_quad"]), + jnp.array(batch["source_lin"]) if "source_lin" in batch else None, + jnp.array(batch["source_quad"]), jnp.array(batch["source_conditions"]) - if len(batch["source_conditions"]) else None, - jnp.array(batch["target_lin"]) if len(batch["target_lin"]) else - None, jnp.array(batch["target_quad"]) + if "source_conditions" in batch else None, + jnp.array(batch["target_lin"]) if "target_lin" in batch else None, + jnp.array(batch["target_quad"]) ) batch_size = len(source_quad) n_samples = batch_size * self.k_samples_per_x @@ -464,17 +478,11 @@ def __call__(self, train_loader, valid_loader): latent, source_conditions ) if self.learn_rescaling: - ( - self.state_eta, self.state_xi, eta_predictions, xi_predictions, - loss_a, loss_b - ) = self.unbalancedness_handler.step_fn( + eta_preds, xi_preds, loss_a, loss_b = self._learn_rescaling( source=source, target=target, condition=source_conditions, - a=tmat.sum(axis=1), - b=tmat.sum(axis=0), - state_eta=self.unbalancedness_handler.state_eta, - state_xi=self.unbalancedness_handler.state_xi, + tmat=tmat ) if iter % self.valid_freq == 0: self._valid_step(valid_loader, iter) From 4fc8fe625c876209f24f9f281036fd4ff75b5b34 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Sun, 18 Feb 2024 19:20:43 +0100 Subject: [PATCH 100/186] change OTDataSet and OTFlowMatching to 4 data loaderes --- src/ott/neural/data/dataloaders.py | 83 ++++++++++------------------- src/ott/neural/flow_models/genot.py | 5 ++ src/ott/neural/flow_models/otfm.py | 24 +++++---- tests/neural/conftest.py | 59 ++++++++++---------- tests/neural/otfm_test.py | 20 ++++--- 5 files changed, 90 insertions(+), 101 deletions(-) diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py index e063deefd..8083a744c 100644 --- a/src/ott/neural/data/dataloaders.py +++ b/src/ott/neural/data/dataloaders.py @@ -14,6 +14,7 @@ from typing import Any, List, Mapping, Optional import numpy as np +from jax import tree_util __all__ = ["OTDataSet", "ConditionalOTDataLoader"] @@ -22,72 +23,44 @@ class OTDataSet: """Data set for OT problems. Args: - source_lin: Linear part of the source measure. - source_quad: Quadratic part of the source measure. - target_lin: Linear part of the target measure. - target_quad: Quadratic part of the target measure. - source_conditions: Conditions of the source measure. - target_conditions: Conditions of the target measure. + lin: Linear part of the measure. + quad: Quadratic part of the measure. + conditions: Conditions of the source measure. """ def __init__( self, - source_lin: Optional[np.ndarray] = None, - source_quad: Optional[np.ndarray] = None, - target_lin: Optional[np.ndarray] = None, - target_quad: Optional[np.ndarray] = None, - source_conditions: Optional[np.ndarray] = None, - target_conditions: Optional[np.ndarray] = None, + lin: Optional[np.ndarray] = None, + quad: Optional[np.ndarray] = None, + conditions: Optional[np.ndarray] = None, ): - if source_lin is not None: - if source_quad is not None: - assert len(source_lin) == len(source_quad) - self.n_source = len(source_lin) + if lin is not None: + if quad is not None: + assert len(lin) == len(quad) + self.n_samples = len(lin) else: - self.n_source = len(source_lin) + self.n_samples = len(lin) else: - self.n_source = len(source_quad) - if source_conditions is not None: - assert len(source_conditions) == self.n_source - if target_lin is not None: - if target_quad is not None: - assert len(target_lin) == len(target_quad) - self.n_target = len(target_lin) - else: - self.n_target = len(target_lin) - else: - self.n_target = len(target_quad) - if target_conditions is not None: - assert len(target_conditions) == self.n_target - - self.source_lin = source_lin - self.target_lin = target_lin - self.source_quad = source_quad - self.target_quad = target_quad - self.source_conditions = source_conditions - self.target_conditions = target_conditions + self.n_samples = len(quad) + if conditions is not None: + assert len(conditions) == self.n_samples + + self.lin = lin + self.quad = quad + self.conditions = conditions + self._tree = {} + if lin is not None: + self._tree["lin"] = lin + if quad is not None: + self._tree["quad"] = quad + if conditions is not None: + self._tree["conditions"] = conditions def __getitem__(self, idx: np.ndarray) -> Mapping[str, np.ndarray]: - return { - "source_lin": - self.source_lin[idx] if self.source_lin is not None else [], - "source_quad": - self.source_quad[idx] if self.source_quad is not None else [], - "target_lin": - self.target_lin[idx] if self.target_lin is not None else [], - "target_quad": - self.target_quad[idx] if self.target_quad is not None else [], - "source_conditions": - self.source_conditions[idx] - if self.source_conditions is not None else [], - "target_conditions": - self.target_conditions[idx] - if self.target_conditions is not None else [], - } + return tree_util.tree_map(lambda x: x[idx], self._tree) def __len__(self): - return len(self.source_lin - ) if self.source_lin is not None else len(self.source_quad) + return self.n_samples class ConditionalOTDataLoader: diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 4c5db7fa2..d466f015d 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -339,6 +339,11 @@ def __call__(self, train_loader, valid_loader): target, ) + jax.debug.print("source.shape {x}", x=source.shape) + jax.debug.print( + "source_conditions.shape {x}", x=source_conditions.shape + ) + jax.debug.print("target.shape {x}", x=target.shape) (source, source_conditions ), (target,) = self.ot_matcher.sample_conditional_indices_from_tmap( rng=rng_resample, diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index dca7bea60..6c519f4da 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -153,28 +153,30 @@ def loss_fn( return step_fn - def __call__(self, train_loader, valid_loader): + def __call__( + self, train_loader_source, train_loader_target, valid_loader_source, + valid_loader_target + ): """Train :class:`OTFlowMatching`. Args; train_loader: Dataloader for the training data. valid_loader: Dataloader for the validation data. """ - batch: Mapping[str, jnp.ndarray] = {} - iter = -1 while True: - for batch in train_loader: + for batch_source, batch_target in zip( + train_loader_source, train_loader_target + ): iter += 1 if iter >= self.iterations: stop = True break rng_resample, rng_step_fn, self.rng = jax.random.split(self.rng, 3) - source, source_conditions, target = jnp.array( - batch["source_lin"] - ), jnp.array(batch["source_conditions"]) if len( - batch["source_conditions"] - ) > 0 else None, jnp.array(batch["target_lin"]) + source, source_conditions = jnp.array(batch_source["lin"]), jnp.array( + batch_source["conditions"] + ) if "conditions" in batch_source else None + target = jnp.array(batch_target["lin"]) if self.ot_matcher is not None: tmat = self.ot_matcher.match_fn(source, target) (source, source_conditions), (target,) = self.ot_matcher.sample_joint( @@ -200,7 +202,7 @@ def __call__(self, train_loader, valid_loader): state_xi=self.unbalancedness_handler.state_xi, ) if iter % self.valid_freq == 0: - self._valid_step(valid_loader, iter) + self._valid_step(valid_loader_source, valid_loader_target, iter) if stop: break @@ -258,7 +260,7 @@ def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: return jax.vmap(solve_ode)(data, condition) - def _valid_step(self, valid_loader, iter): + def _valid_step(self, valid_loader_source, valid_loader_target, iter): pass @property diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index f33252f07..e40f93c16 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -11,6 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import Tuple + import pytest import numpy as np @@ -21,13 +23,16 @@ @pytest.fixture(scope="module") -def data_loader_gaussian(): +def data_loaders_gaussian() -> Tuple[Torch_loader, Torch_loader]: """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - dataset = dataloaders.OTDataSet(source_lin=source, target_lin=target) - return Torch_loader(dataset, batch_size=16, shuffle=True) + src_dataset = dataloaders.OTDataSet(lin=source) + tgt_dataset = dataloaders.OTDataSet(lin=target) + loader_src = Torch_loader(src_dataset, batch_size=16, shuffle=True) + loader_tgt = Torch_loader(tgt_dataset, batch_size=16, shuffle=True) + return loader_src, loader_tgt @pytest.fixture(scope="module") @@ -40,14 +45,14 @@ def data_loader_gaussian_conditional(): source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 2)) - 2.0 ds0 = dataloaders.OTDataSet( - source_lin=source_0, + lin=source_0, target_lin=target_0, - source_conditions=np.zeros_like(source_0) * 0.0 + conditions=np.zeros_like(source_0) * 0.0 ) ds1 = dataloaders.OTDataSet( - source_lin=source_1, + lin=source_1, target_lin=target_1, - source_conditions=np.ones_like(source_1) * 1.0 + conditions=np.ones_like(source_1) * 1.0 ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) @@ -67,9 +72,9 @@ def data_loader_gaussian_with_conditions(): target_conditions = rng.normal(size=(100, 1)) - 1.0 dataset = dataloaders.OTDataSet( - source_lin=source, + lin=source, target_lin=target, - source_conditions=source_conditions, + conditions=source_conditions, target_conditions=target_conditions ) return Torch_loader(dataset, batch_size=16, shuffle=True) @@ -81,7 +86,7 @@ def genot_data_loader_linear(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - dataset = dataloaders.OTDataSet(source_lin=source, target_lin=target) + dataset = dataloaders.OTDataSet(lin=source, target_lin=target) return Torch_loader(dataset, batch_size=16, shuffle=True) @@ -94,14 +99,14 @@ def genot_data_loader_linear_conditional(): source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 2)) + 1.0 ds0 = dataloaders.OTDataSet( - source_lin=source_0, + lin=source_0, target_lin=target_0, - source_conditions=np.zeros_like(source_0) * 0.0 + conditions=np.zeros_like(source_0) * 0.0 ) ds1 = dataloaders.OTDataSet( - source_lin=source_1, + lin=source_1, target_lin=target_1, - source_conditions=np.ones_like(source_1) * 1.0 + conditions=np.ones_like(source_1) * 1.0 ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) @@ -117,7 +122,7 @@ def genot_data_loader_quad(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 1)) + 1.0 - dataset = dataloaders.OTDataSet(source_quad=source, target_quad=target) + dataset = dataloaders.OTDataSet(quad=source, target_quad=target) return Torch_loader(dataset, batch_size=16, shuffle=True) @@ -130,14 +135,14 @@ def genot_data_loader_quad_conditional(): source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 1)) + 1.0 ds0 = dataloaders.OTDataSet( - source_quad=source_0, + quad=source_0, target_quad=target_0, - source_conditions=np.zeros_like(source_0) * 0.0 + conditions=np.zeros_like(source_0) * 0.0 ) ds1 = dataloaders.OTDataSet( - source_quad=source_1, + quad=source_1, target_quad=target_1, - source_conditions=np.ones_like(source_1) * 1.0 + conditions=np.ones_like(source_1) * 1.0 ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) @@ -156,8 +161,8 @@ def genot_data_loader_fused(): source_lin = rng.normal(size=(100, 2)) target_lin = rng.normal(size=(100, 2)) + 1.0 dataset = dataloaders.OTDataSet( - source_lin=source_lin, - source_quad=source_q, + lin=source_lin, + quad=source_q, target_lin=target_lin, target_quad=target_q ) @@ -179,18 +184,18 @@ def genot_data_loader_fused_conditional(): target_lin_1 = 2 * rng.normal(size=(100, 2)) + 1.0 ds0 = dataloaders.OTDataSet( - source_lin=source_lin_0, + lin=source_lin_0, target_lin=target_lin_0, - source_quad=source_q_0, + quad=source_q_0, target_quad=target_q_0, - source_conditions=np.zeros_like(source_lin_0) * 0.0 + conditions=np.zeros_like(source_lin_0) * 0.0 ) ds1 = dataloaders.OTDataSet( - source_lin=source_lin_1, + lin=source_lin_1, target_lin=target_lin_1, - source_quad=source_q_1, + quad=source_q_1, target_quad=target_q_1, - source_conditions=np.ones_like(source_lin_1) * 1.0 + conditions=np.ones_like(source_lin_1) * 1.0 ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 6f8d14879..e57fce89c 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -36,7 +36,7 @@ class TestOTFlowMatching: ] ) def test_flow_matching_unconditional( - self, data_loader_gaussian, flow: Type[flows.BaseFlow] + self, data_loaders_gaussian, flow: Type[flows.BaseFlow] ): input_dim = 2 condition_dim = 0 @@ -64,14 +64,18 @@ def test_flow_matching_unconditional( optimizer=optimizer, unbalancedness_handler=unbalancedness_handler ) - fm(data_loader_gaussian, data_loader_gaussian) + fm( + data_loaders_gaussian[0], data_loaders_gaussian[1], + data_loaders_gaussian[0], data_loaders_gaussian[1] + ) - batch = next(iter(data_loader_gaussian)) - source = jnp.asarray(batch["source_lin"]) - target = jnp.asarray(batch["target_lin"]) - source_conditions = jnp.asarray(batch["source_conditions"]) if len( - batch["source_conditions"] - ) > 0 else None + batch_src = next(iter(data_loaders_gaussian[0])) + source = jnp.asarray(batch_src["lin"]) + batch_tgt = next(iter(data_loaders_gaussian[1])) + target = jnp.asarray(batch_tgt["lin"]) + source_conditions = jnp.asarray( + batch_src["conditions"] + ) if "conditions" in batch_src else None result_forward = fm.transport( source, condition=source_conditions, forward=True ) From 43d37f7eda3d4d08e820fa6ece1955593f6a2256 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 19 Feb 2024 13:26:39 +0100 Subject: [PATCH 101/186] Fix bug in the `ConditionalOTDataset` --- docs/neural/data.rst | 10 +--- src/ott/neural/data/__init__.py | 2 +- src/ott/neural/data/dataloaders.py | 91 ------------------------------ src/ott/neural/data/datasets.py | 87 ++++++++++++++++++++++++++++ tests/neural/conftest.py | 70 +++++++++++------------ 5 files changed, 125 insertions(+), 135 deletions(-) delete mode 100644 src/ott/neural/data/dataloaders.py create mode 100644 src/ott/neural/data/datasets.py diff --git a/docs/neural/data.rst b/docs/neural/data.rst index 970499ff5..95f05f93f 100644 --- a/docs/neural/data.rst +++ b/docs/neural/data.rst @@ -11,11 +11,5 @@ Datasets .. autosummary:: :toctree: _autosummary - dataloaders.OTDataSet - -Dataloaders ------------ -.. autosummary:: - :toctree: _autosummary - - dataloaders.ConditionalOTDataLoader + datasets.OTDataset + datasets.ConditionalOTDataset diff --git a/src/ott/neural/data/__init__.py b/src/ott/neural/data/__init__.py index 51f8dd2af..785604b21 100644 --- a/src/ott/neural/data/__init__.py +++ b/src/ott/neural/data/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import dataloaders +from . import datasets diff --git a/src/ott/neural/data/dataloaders.py b/src/ott/neural/data/dataloaders.py deleted file mode 100644 index 8083a744c..000000000 --- a/src/ott/neural/data/dataloaders.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Any, List, Mapping, Optional - -import numpy as np -from jax import tree_util - -__all__ = ["OTDataSet", "ConditionalOTDataLoader"] - - -class OTDataSet: - """Data set for OT problems. - - Args: - lin: Linear part of the measure. - quad: Quadratic part of the measure. - conditions: Conditions of the source measure. - """ - - def __init__( - self, - lin: Optional[np.ndarray] = None, - quad: Optional[np.ndarray] = None, - conditions: Optional[np.ndarray] = None, - ): - if lin is not None: - if quad is not None: - assert len(lin) == len(quad) - self.n_samples = len(lin) - else: - self.n_samples = len(lin) - else: - self.n_samples = len(quad) - if conditions is not None: - assert len(conditions) == self.n_samples - - self.lin = lin - self.quad = quad - self.conditions = conditions - self._tree = {} - if lin is not None: - self._tree["lin"] = lin - if quad is not None: - self._tree["quad"] = quad - if conditions is not None: - self._tree["conditions"] = conditions - - def __getitem__(self, idx: np.ndarray) -> Mapping[str, np.ndarray]: - return tree_util.tree_map(lambda x: x[idx], self._tree) - - def __len__(self): - return self.n_samples - - -class ConditionalOTDataLoader: - """Data loader for OT problems with conditions. - - This data loader wraps several data loaders and samples from them. - - Args: - dataloaders: List of data loaders. - seed: Random seed. - """ - - def __init__( - self, - dataloaders: List[Any], - seed: int = 0 # dataloader should subclass torch dataloader - ): - super().__init__() - self.dataloaders = dataloaders - self.conditions = list(dataloaders) - self.rng = np.random.default_rng(seed=seed) - - def __next__(self) -> Mapping[str, np.ndarray]: - idx = self.rng.choice(len(self.conditions)) - return next(iter(self.dataloaders[idx])) - - def __iter__(self) -> "ConditionalOTDataLoader": - return self diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py new file mode 100644 index 000000000..990c27a2a --- /dev/null +++ b/src/ott/neural/data/datasets.py @@ -0,0 +1,87 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Dict, List, Optional + +import jax.tree_util as jtu +import numpy as np + +__all__ = ["OTDataset", "ConditionalOTDataset"] + + +class OTDataset: + """Dataset for Optimal transport problems. + + Args: + lin: Linear part of the measure. + quad: Quadratic part of the measure. + conditions: Conditions of the source measure. + """ + + def __init__( + self, + lin: Optional[np.ndarray] = None, + quad: Optional[np.ndarray] = None, + conditions: Optional[np.ndarray] = None, + ): + self.data = {} + if lin is not None: + self.data["lin"] = lin + if quad is not None: + self.data["quad"] = quad + if conditions is not None: + self.data["conditions"] = conditions + self._check_sizes() + + def _check_sizes(self) -> None: + sizes = {k: len(v) for k, v in self.data.items()} + if not len(set(sizes.values())) == 1: + raise ValueError(f"Not all arrays have the same size: {sizes}.") + + def __getitem__(self, idx: np.ndarray) -> Dict[str, np.ndarray]: + return jtu.tree_map(lambda x: x[idx], self.data)["lin"] + + def __len__(self) -> int: + for v in self.data.values(): + return len(v) + return 0 + + +# TODO(michalk8): rename +class ConditionalOTDataset: + """Dataset for OT problems with conditions. + + This data loader wraps several data loaders and samples from them. + + Args: + datasets: Datasets to sample from. + seed: Random seed. + """ + + def __init__( + self, + # TODO(michalk8): allow for dict with weights + datasets: List[OTDataset], + seed: Optional[int] = None, + ): + self.datasets = tuple(datasets) + self._rng = np.random.default_rng(seed=seed) + self._iterators = () + + def __next__(self) -> Dict[str, np.ndarray]: + idx = self._rng.choice(len(self._iterators)) + return next(self._iterators[idx]) + + def __iter__(self) -> "ConditionalOTDataset": + self._iterators = tuple(iter(ds) for ds in self.datasets) + return self diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index e40f93c16..f5c48e924 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -17,21 +17,21 @@ import numpy as np import torch -from torch.utils.data import DataLoader as Torch_loader +from torch.utils.data import DataLoader -from ott.neural.data import dataloaders +from ott.neural.data import datasets @pytest.fixture(scope="module") -def data_loaders_gaussian() -> Tuple[Torch_loader, Torch_loader]: +def data_loaders_gaussian() -> Tuple[DataLoader, DataLoader]: """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - src_dataset = dataloaders.OTDataSet(lin=source) - tgt_dataset = dataloaders.OTDataSet(lin=target) - loader_src = Torch_loader(src_dataset, batch_size=16, shuffle=True) - loader_tgt = Torch_loader(tgt_dataset, batch_size=16, shuffle=True) + src_dataset = datasets.OTDataset(lin=source) + tgt_dataset = datasets.OTDataset(lin=target) + loader_src = DataLoader(src_dataset, batch_size=16, shuffle=True) + loader_tgt = DataLoader(tgt_dataset, batch_size=16, shuffle=True) return loader_src, loader_tgt @@ -44,22 +44,22 @@ def data_loader_gaussian_conditional(): source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 2)) - 2.0 - ds0 = dataloaders.OTDataSet( + ds0 = datasets.OTDataset( lin=source_0, target_lin=target_0, conditions=np.zeros_like(source_0) * 0.0 ) - ds1 = dataloaders.OTDataSet( + ds1 = datasets.OTDataset( lin=source_1, target_lin=target_1, conditions=np.ones_like(source_1) * 1.0 ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) - dl0 = Torch_loader(ds0, batch_size=16, sampler=sampler0) - dl1 = Torch_loader(ds1, batch_size=16, sampler=sampler1) + dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) + dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) - return dataloaders.ConditionalOTDataLoader((dl0, dl1)) + return datasets.ConditionalOTDataset((dl0, dl1)) @pytest.fixture(scope="module") @@ -71,13 +71,13 @@ def data_loader_gaussian_with_conditions(): source_conditions = rng.normal(size=(100, 1)) target_conditions = rng.normal(size=(100, 1)) - 1.0 - dataset = dataloaders.OTDataSet( + dataset = datasets.OTDataset( lin=source, target_lin=target, conditions=source_conditions, target_conditions=target_conditions ) - return Torch_loader(dataset, batch_size=16, shuffle=True) + return DataLoader(dataset, batch_size=16, shuffle=True) @pytest.fixture(scope="module") @@ -86,8 +86,8 @@ def genot_data_loader_linear(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 2)) + 1.0 - dataset = dataloaders.OTDataSet(lin=source, target_lin=target) - return Torch_loader(dataset, batch_size=16, shuffle=True) + dataset = datasets.OTDataset(lin=source, target_lin=target) + return DataLoader(dataset, batch_size=16, shuffle=True) @pytest.fixture(scope="module") @@ -98,22 +98,22 @@ def genot_data_loader_linear_conditional(): target_0 = rng.normal(size=(100, 2)) + 1.0 source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 2)) + 1.0 - ds0 = dataloaders.OTDataSet( + ds0 = datasets.OTDataset( lin=source_0, target_lin=target_0, conditions=np.zeros_like(source_0) * 0.0 ) - ds1 = dataloaders.OTDataSet( + ds1 = datasets.OTDataset( lin=source_1, target_lin=target_1, conditions=np.ones_like(source_1) * 1.0 ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) - dl0 = Torch_loader(ds0, batch_size=16, sampler=sampler0) - dl1 = Torch_loader(ds1, batch_size=16, sampler=sampler1) + dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) + dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) - return dataloaders.ConditionalOTDataLoader((dl0, dl1)) + return datasets.ConditionalOTDataset((dl0, dl1)) @pytest.fixture(scope="module") @@ -122,8 +122,8 @@ def genot_data_loader_quad(): rng = np.random.default_rng(seed=0) source = rng.normal(size=(100, 2)) target = rng.normal(size=(100, 1)) + 1.0 - dataset = dataloaders.OTDataSet(quad=source, target_quad=target) - return Torch_loader(dataset, batch_size=16, shuffle=True) + dataset = datasets.OTDataset(quad=source, target_quad=target) + return DataLoader(dataset, batch_size=16, shuffle=True) @pytest.fixture(scope="module") @@ -134,22 +134,22 @@ def genot_data_loader_quad_conditional(): target_0 = rng.normal(size=(100, 1)) + 1.0 source_1 = rng.normal(size=(100, 2)) target_1 = rng.normal(size=(100, 1)) + 1.0 - ds0 = dataloaders.OTDataSet( + ds0 = datasets.OTDataset( quad=source_0, target_quad=target_0, conditions=np.zeros_like(source_0) * 0.0 ) - ds1 = dataloaders.OTDataSet( + ds1 = datasets.OTDataset( quad=source_1, target_quad=target_1, conditions=np.ones_like(source_1) * 1.0 ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) - dl0 = Torch_loader(ds0, batch_size=16, sampler=sampler0) - dl1 = Torch_loader(ds1, batch_size=16, sampler=sampler1) + dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) + dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) - return dataloaders.ConditionalOTDataLoader((dl0, dl1)) + return datasets.ConditionalOTDataset((dl0, dl1)) @pytest.fixture(scope="module") @@ -160,13 +160,13 @@ def genot_data_loader_fused(): target_q = rng.normal(size=(100, 1)) + 1.0 source_lin = rng.normal(size=(100, 2)) target_lin = rng.normal(size=(100, 2)) + 1.0 - dataset = dataloaders.OTDataSet( + dataset = datasets.OTDataset( lin=source_lin, quad=source_q, target_lin=target_lin, target_quad=target_q ) - return Torch_loader(dataset, batch_size=16, shuffle=True) + return DataLoader(dataset, batch_size=16, shuffle=True) @pytest.fixture(scope="module") @@ -183,14 +183,14 @@ def genot_data_loader_fused_conditional(): source_lin_1 = 2 * rng.normal(size=(100, 2)) target_lin_1 = 2 * rng.normal(size=(100, 2)) + 1.0 - ds0 = dataloaders.OTDataSet( + ds0 = datasets.OTDataset( lin=source_lin_0, target_lin=target_lin_0, quad=source_q_0, target_quad=target_q_0, conditions=np.zeros_like(source_lin_0) * 0.0 ) - ds1 = dataloaders.OTDataSet( + ds1 = datasets.OTDataset( lin=source_lin_1, target_lin=target_lin_1, quad=source_q_1, @@ -199,6 +199,6 @@ def genot_data_loader_fused_conditional(): ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) - dl0 = Torch_loader(ds0, batch_size=16, sampler=sampler0) - dl1 = Torch_loader(ds1, batch_size=16, sampler=sampler1) - return dataloaders.ConditionalOTDataLoader((dl0, dl1)) + dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) + dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) + return datasets.ConditionalOTDataset((dl0, dl1)) From 86f6e7a49b687eb2d1fa0c1a03d7781e569aa3fb Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 19 Feb 2024 13:38:14 +0100 Subject: [PATCH 102/186] Polish docs in the `flows.py` --- src/ott/neural/flow_models/flows.py | 38 ++++++++--------------------- 1 file changed, 10 insertions(+), 28 deletions(-) diff --git a/src/ott/neural/flow_models/flows.py b/src/ott/neural/flow_models/flows.py index fd1009cef..b2e4970bf 100644 --- a/src/ott/neural/flow_models/flows.py +++ b/src/ott/neural/flow_models/flows.py @@ -38,9 +38,9 @@ def __init__(self, sigma: float): def compute_mu_t( self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray ) -> jnp.ndarray: - """Compute the mean of the probablitiy path. + """Compute the mean of the probability path. - Compute the mean of the probablitiy path between :math:`x_0` and :math:`x_1` + Compute the mean of the probability path between :math:`x_0` and :math:`x_1` at time :math:`t`. Args: @@ -51,10 +51,13 @@ def compute_mu_t( @abc.abstractmethod def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: - """Compute the standard deviation of the probablity path at time :math:`t`. + """Compute the standard deviation of the probability path at time :math:`t`. Args: t: Time :math:`t` of shape `(batch_size, 1)`. + + Returns: + Standard deviation of the probability path at time :math:`t`. """ @abc.abstractmethod @@ -67,7 +70,7 @@ def compute_ut( :math:`x_1` at time :math:`t`. Args: - t: Time :math:`t` of shape `(batch_size, 1)`.. + t: Time :math:`t` of shape `(batch_size, 1)`. src: Sample from the source distribution of shape `(batch_size, ...)`. tgt: Sample from the target distribution of shape `(batch_size, ...)`. @@ -107,22 +110,9 @@ def compute_mu_t( # noqa: D102 ) -> jnp.ndarray: return (1.0 - t) * src + t * tgt - def compute_ut( + def compute_ut( # noqa: D102 self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray ) -> jnp.ndarray: - """Evaluate the conditional vector field. - - Evaluate the conditional vector field defined between :math:`x_0` and - :math:`x_1` at time :math:`t`. - - Args: - t: Time :math:`t` of shape `(batch_size, 1)`. - src: Sample from the source distribution of shape `(batch_size, ...)`. - tgt: Sample from the target distribution of shape `(batch_size, ...)`.. - - Returns: - Conditional vector field evaluated at time :math:`t`. - """ del t return tgt - src @@ -134,7 +124,7 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: r"""Compute noise of the flow at time :math:`t`. Args: - t: Time :math:`t` of shape `(batch_size, 1)`.. + t: Time :math:`t` of shape `(batch_size, 1)`. Returns: Constant, time-independent standard deviation :math:`\sigma`. @@ -154,13 +144,5 @@ class BrownianNoiseFlow(StraightFlow): at time :math:`t`. """ - def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: - """Compute the standard deviation of the probablity path at time :math:`t`. - - Args: - t: Time :math:`t` of shape `(batch_size, 1)`.. - - Returns: - Standard deviation of the probablity path at time :math:`t`. - """ + def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: # noqa: D102 return self.sigma * jnp.sqrt(t * (1.0 - t)) From ae37132e494766fcd58d1372e4e55b533ebf8355 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 19 Feb 2024 14:47:20 +0100 Subject: [PATCH 103/186] Update `OTFM` --- src/ott/neural/flow_models/otfm.py | 232 +++++++++---------------- src/ott/neural/flow_models/samplers.py | 3 +- 2 files changed, 87 insertions(+), 148 deletions(-) diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index 6c519f4da..3e6d821a1 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -11,106 +11,66 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import collections import functools -from typing import ( - Any, - Callable, - Dict, - Literal, - Mapping, - Optional, - Tuple, - Type, - Union, -) +from typing import Any, Callable, Dict, Optional, Tuple import jax import jax.numpy as jnp +import jax.tree_util as jtu import diffrax import optax from flax.training import train_state from ott import utils -from ott.geometry import costs -from ott.neural.flow_models import flows +from ott.neural.flow_models import flows, models from ott.neural.models import base_solver __all__ = ["OTFlowMatching"] class OTFlowMatching: - """(Optimal transport) flow matching class. + """(Optimal transport) flow matching :cite:`lipman:22`. - Flow matching as introduced in :cite:`lipman:22`, with extension to OT-FM - (:cite`tong:23`, :cite:`pooladian:23`). + Includes an extension to OT-FM :cite`tong:23`, :cite:`pooladian:23`. Args: - velocity_field: Neural vector field parameterized by a neural network. input_dim: Dimension of the input data. - cond_dim: Dimension of the conditioning variable. - iterations: Number of iterations. - valid_freq: Frequency of validation. + velocity_field: Neural vector field parameterized by a neural network. flow: Flow between source and target distribution. time_sampler: Sampler for the time. - optimizer: Optimizer for `velocity_field`. - callback_fn: Callback function. - num_eval_samples: Number of samples to evaluate on during evaluation. + optimizer: Optimizer for the ``velocity_field``. + ot_matcher: TODO. + unbalancedness_handler: TODO. rng: Random number generator. """ + # TODO(michalk8): in the future, `input_dim`, `optimizer` and `rng` will be + # in a separate function def __init__( self, - velocity_field: Callable[[ - jnp.ndarray, jnp.ndarray, Optional[jnp.ndarray], Optional[jnp.ndarray] - ], jnp.ndarray], input_dim: int, - cond_dim: int, - iterations: int, - flow: Type[flows.BaseFlow], + velocity_field: models.VelocityField, + flow: flows.BaseFlow, time_sampler: Callable[[jax.Array, int], jnp.ndarray], optimizer: optax.GradientTransformation, - ot_matcher: Optional[base_solver.OTMatcherLinear], - unbalancedness_handler: base_solver.UnbalancednessHandler, - epsilon: float = 1e-2, - cost_fn: Optional[Type[costs.CostFn]] = None, - scale_cost: Union[bool, int, float, - Literal["mean", "max_norm", "max_bound", "max_cost", - "median"]] = "mean", - callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], - Any]] = None, - logging_freq: int = 100, - valid_freq: int = 5000, - num_eval_samples: int = 1000, + ot_matcher: Optional[base_solver.OTMatcherLinear] = None, + unbalancedness_handler: Optional[base_solver.UnbalancednessHandler + ] = None, rng: Optional[jax.Array] = None, ): - rng = utils.default_prng_key(rng) - self.unbalancedness_handler = unbalancedness_handler - self.iterations = iterations - self.valid_freq = valid_freq - self.velocity_field = velocity_field self.input_dim = input_dim - self.ot_matcher = ot_matcher + self.velocity_field = velocity_field self.flow = flow self.time_sampler = time_sampler + self.unbalancedness_handler = unbalancedness_handler + self.ot_matcher = ot_matcher self.optimizer = optimizer - self.epsilon = epsilon - self.cost_fn = cost_fn - self.scale_cost = scale_cost - self.callback_fn = callback_fn - self.rng = rng - self.logging_freq = logging_freq - self.num_eval_samples = num_eval_samples - self._training_logs: Mapping[str, Any] = collections.defaultdict(list) - - self.setup() - - def setup(self) -> None: - """Setup :class:`OTFlowMatching`.""" + + rng = utils.default_prng_key(rng) self.state_velocity_field = ( self.velocity_field.create_train_state( - self.rng, self.optimizer, self.input_dim + rng, self.optimizer, self.input_dim ) ) @@ -153,41 +113,46 @@ def loss_fn( return step_fn - def __call__( - self, train_loader_source, train_loader_target, valid_loader_source, - valid_loader_target - ): - """Train :class:`OTFlowMatching`. + # TODO(michalk8): refactor in the future PR to just do one step + def __call__( # noqa: D102 + self, + n_iters: int, + train_source, + train_target, + valid_source, + valid_target, + valid_freq: int = 5000, + rng: Optional[jax.Array] = None, + ) -> Dict[str, Any]: + rng = utils.default_prng_key(rng) + training_logs = {"loss": []} + + for it in range(n_iters): + for batch_source, batch_target in zip(train_source, train_target): + rng, rng_resample, rng_step_fn = jax.random.split(rng, 3) + + batch_source = jtu.tree_map(jnp.asarray, batch_source) + batch_target = jtu.tree_map(jnp.asarray, batch_target) + + source = batch_source["lin"] + source_conditions = batch_source.get("conditions", None) + target = batch_target["lin"] - Args; - train_loader: Dataloader for the training data. - valid_loader: Dataloader for the validation data. - """ - iter = -1 - while True: - for batch_source, batch_target in zip( - train_loader_source, train_loader_target - ): - iter += 1 - if iter >= self.iterations: - stop = True - break - rng_resample, rng_step_fn, self.rng = jax.random.split(self.rng, 3) - source, source_conditions = jnp.array(batch_source["lin"]), jnp.array( - batch_source["conditions"] - ) if "conditions" in batch_source else None - target = jnp.array(batch_target["lin"]) if self.ot_matcher is not None: tmat = self.ot_matcher.match_fn(source, target) (source, source_conditions), (target,) = self.ot_matcher.sample_joint( rng_resample, tmat, (source, source_conditions), (target,) ) + else: + tmat = None + self.state_velocity_field, loss = self.step_fn( rng_step_fn, self.state_velocity_field, source, target, source_conditions ) - self._training_logs["loss"].append(loss) - if self.learn_rescaling: + training_logs["loss"].append(loss) + + if self.unbalancedness_handler is not None and tmat is not None: ( self.unbalancedness_handler.state_eta, self.unbalancedness_handler.state_xi, eta_predictions, @@ -201,23 +166,24 @@ def __call__( state_eta=self.unbalancedness_handler.state_eta, state_xi=self.unbalancedness_handler.state_xi, ) - if iter % self.valid_freq == 0: - self._valid_step(valid_loader_source, valid_loader_target, iter) - if stop: - break + + if it % valid_freq == 0: + self._valid_step(valid_source, valid_target, it) + + return training_logs def transport( self, data: jnp.array, condition: Optional[jnp.ndarray] = None, forward: bool = True, - t_0: float = 0.0, - t_1: float = 1.0, + t0: float = 0.0, + t1: float = 1.0, **kwargs: Any, - ) -> diffrax.Solution: + ) -> jnp.ndarray: """Transport data with the learnt map. - This method pushes-forward the `source` by + This method pushes-forward the ``data`` by solving the neural ODE parameterized by the :attr:`~ott.neural.flows.OTFlowMatching.velocity_field`. @@ -225,72 +191,44 @@ def transport( data: Initial condition of the ODE. condition: Condition of the input data. forward: If `True` integrates forward, otherwise backwards. - t_0: Starting point of integration. - t_1: End point of integration. + t0: Starting point of integration. + t1: End point of integration. kwargs: Keyword arguments for the ODE solver. Returns: The push-forward or pull-back distribution defined by the learnt transport plan. - """ - t0, t1 = (t_0, t_1) if forward else (t_1, t_0) - @jax.jit def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: - return diffrax.diffeqsolve( - diffrax.ODETerm( - lambda t, x, args: self.state_velocity_field. - apply_fn({"params": self.state_velocity_field.params}, - t=t, - x=x, - condition=cond) - ), - kwargs.pop("solver", diffrax.Tsit5()), + ode_term = diffrax.ODETerm( + lambda t, x, args: self.state_velocity_field. + apply_fn({"params": self.state_velocity_field.params}, + t=t, + x=x, + condition=cond) + ) + + result = diffrax.diffeqsolve( + ode_term, + solver, t0=t0, t1=t1, dt0=kwargs.pop("dt0", None), y0=input, - stepsize_controller=kwargs.pop( - "stepsize_controller", - diffrax.PIDController(rtol=1e-5, atol=1e-5) - ), + stepsize_controller=stepsize_controller, **kwargs, - ).ys[0] - - return jax.vmap(solve_ode)(data, condition) - - def _valid_step(self, valid_loader_source, valid_loader_target, iter): - pass + ) + return result.ys[0] - @property - def learn_rescaling(self) -> bool: - """Whether to learn at least one rescaling factor.""" - return ( - self.unbalancedness_handler.rescaling_a is not None or - self.unbalancedness_handler.rescaling_b is not None + if not forward: + t0, t1 = t1, t0 + solver = kwargs.pop("solver", diffrax.Tsit5()), + stepsize_controller = kwargs.pop( + "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) ) - def save(self, path: str): - """Save the model. + return jax.jit(jax.vmap(solve_ode))(data, condition) - Args: - path: Where to save the model to. - """ - raise NotImplementedError - - def load(self, path: str) -> "OTFlowMatching": - """Load a model. - - Args: - path: Where to load the model from. - - Returns: - An instance of :class:`ott.neural.solvers.OTFlowMatching`. - """ - raise NotImplementedError - - @property - def training_logs(self) -> Dict[str, Any]: - """Logs of the training.""" - raise NotImplementedError + def _valid_step(self, it: int, valid_source, valid_target) -> None: + pass diff --git a/src/ott/neural/flow_models/samplers.py b/src/ott/neural/flow_models/samplers.py index 34a28c2d2..9bd85d8b0 100644 --- a/src/ott/neural/flow_models/samplers.py +++ b/src/ott/neural/flow_models/samplers.py @@ -42,10 +42,11 @@ def uniform_sampler( used. Returns: - An array with `num_samples` samples of the time `math`:t:. + An array with `num_samples` samples of the time :math:`t`. """ if offset is None: return jax.random.uniform(rng, (num_samples, 1), minval=low, maxval=high) + t = jax.random.uniform(rng, (1, 1), minval=low, maxval=high) mod_term = ((high - low) - offset) return (t + jnp.arange(num_samples)[:, None] / num_samples) % mod_term From de323d2ecda7b08f1d612d983b93c54417cd7015 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 19 Feb 2024 16:25:48 +0100 Subject: [PATCH 104/186] Fix small bugs in `OTFM` --- docs/neural/duality.rst | 2 +- src/ott/neural/data/datasets.py | 2 +- src/ott/neural/flow_models/otfm.py | 65 +++++++++++++----------------- 3 files changed, 31 insertions(+), 38 deletions(-) diff --git a/docs/neural/duality.rst b/docs/neural/duality.rst index ea3f67bdf..25dc89daa 100644 --- a/docs/neural/duality.rst +++ b/docs/neural/duality.rst @@ -5,7 +5,7 @@ ott.neural.duality This module implements various solvers to estimate optimal transport between two probability measures, through samples, parameterized as neural networks. -These solvers build uponn dual formulation of the optimal transport problem. +These solvers build upon dual formulation of the optimal transport problem. Solvers ------- diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index 990c27a2a..5a12ed2c0 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -49,7 +49,7 @@ def _check_sizes(self) -> None: raise ValueError(f"Not all arrays have the same size: {sizes}.") def __getitem__(self, idx: np.ndarray) -> Dict[str, np.ndarray]: - return jtu.tree_map(lambda x: x[idx], self.data)["lin"] + return jtu.tree_map(lambda x: x[idx], self.data) def __len__(self) -> int: for v in self.data.values(): diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index 3e6d821a1..53788fc37 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -59,21 +59,19 @@ def __init__( ] = None, rng: Optional[jax.Array] = None, ): + rng = utils.default_prng_key(rng) + self.input_dim = input_dim - self.velocity_field = velocity_field + self.vf = velocity_field self.flow = flow self.time_sampler = time_sampler self.unbalancedness_handler = unbalancedness_handler self.ot_matcher = ot_matcher self.optimizer = optimizer - rng = utils.default_prng_key(rng) - self.state_velocity_field = ( - self.velocity_field.create_train_state( - rng, self.optimizer, self.input_dim - ) + self.vf_state = self.vf.create_train_state( + rng, self.optimizer, self.input_dim ) - self.step_fn = self._get_step_fn() def _get_step_fn(self) -> Callable: @@ -146,11 +144,10 @@ def __call__( # noqa: D102 else: tmat = None - self.state_velocity_field, loss = self.step_fn( - rng_step_fn, self.state_velocity_field, source, target, - source_conditions + self.vf_state, loss = self.step_fn( + rng_step_fn, self.vf_state, source, target, source_conditions ) - training_logs["loss"].append(loss) + training_logs["loss"].append(float(loss)) if self.unbalancedness_handler is not None and tmat is not None: ( @@ -174,23 +171,20 @@ def __call__( # noqa: D102 def transport( self, - data: jnp.array, + x: jnp.ndarray, condition: Optional[jnp.ndarray] = None, - forward: bool = True, t0: float = 0.0, t1: float = 1.0, **kwargs: Any, ) -> jnp.ndarray: """Transport data with the learnt map. - This method pushes-forward the ``data`` by - solving the neural ODE parameterized by the - :attr:`~ott.neural.flows.OTFlowMatching.velocity_field`. + This method pushes-forward the data by solving the neural ODE + parameterized by the velocity field. Args: - data: Initial condition of the ODE. - condition: Condition of the input data. - forward: If `True` integrates forward, otherwise backwards. + x: Initial condition of the ODE of shape `(batch_size, ...)`. + condition: Condition of the input data of shape `(batch_size, ...)`. t0: Starting point of integration. t1: End point of integration. kwargs: Keyword arguments for the ODE solver. @@ -200,35 +194,34 @@ def transport( transport plan. """ - def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: - ode_term = diffrax.ODETerm( - lambda t, x, args: self.state_velocity_field. - apply_fn({"params": self.state_velocity_field.params}, - t=t, - x=x, - condition=cond) - ) + def vf( + t: jnp.ndarray, x: jnp.ndarray, cond: Optional[jnp.ndarray] + ) -> jnp.ndarray: + return self.vf_state.apply_fn({"params": self.vf_state.params}, + t=t, + x=x, + condition=cond) + def solve_ode(x: jnp.ndarray, cond: Optional[jnp.ndarray]) -> jnp.ndarray: + ode_term = diffrax.ODETerm(vf) result = diffrax.diffeqsolve( ode_term, - solver, t0=t0, t1=t1, - dt0=kwargs.pop("dt0", None), - y0=input, - stepsize_controller=stepsize_controller, + y0=x, + args=cond, **kwargs, ) return result.ys[0] - if not forward: - t0, t1 = t1, t0 - solver = kwargs.pop("solver", diffrax.Tsit5()), - stepsize_controller = kwargs.pop( + kwargs.setdefault("dt0", None) + kwargs.setdefault("solver", diffrax.Tsit5()) + kwargs.setdefault( "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) ) - return jax.jit(jax.vmap(solve_ode))(data, condition) + in_axes = [0, None if condition is None else 0] + return jax.jit(jax.vmap(solve_ode, in_axes))(x, condition) def _valid_step(self, it: int, valid_source, valid_target) -> None: pass From 4408cc236edd60adacfabdf8fe5e106fea3b9840 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 19 Feb 2024 16:36:21 +0100 Subject: [PATCH 105/186] Polish layers --- src/ott/neural/flow_models/layers.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/ott/neural/flow_models/layers.py b/src/ott/neural/flow_models/layers.py index d18980c38..6f04b4e54 100644 --- a/src/ott/neural/flow_models/layers.py +++ b/src/ott/neural/flow_models/layers.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import jax.numpy as jnp import flax.linen as nn @@ -22,9 +21,8 @@ class CyclicalTimeEncoder(nn.Module): r"""A cyclical time encoder. - Encodes time :math:`t` as - :math:`cos(\tilde{t})` and :math:`sin(\tilde{t})` - where :math:`\tilde{t} = [2\\pi t, 2\\pi 2 t,\\ldots, 2\\pi n_frequencies t]` + Encodes time :math:`t` as :math:`cos(\tilde{t})` and :math:`sin(\tilde{t})` + where :math:`\tilde{t} = [2\pi t, 2\pi 2 t,\ldots, 2\pi n_frequencies t]`. Args: n_frequencies: Frequency of cyclical encoding. @@ -39,8 +37,8 @@ def __call__(self, t: jnp.ndarray) -> jnp.ndarray: # noqa: D102 t: Time of shape ``[n, 1]``. Returns: - Encoded time of shape ``[n, 2 * n_frequencies]`` + Encoded time of shape ``[n, 2 * n_frequencies]``. """ freq = 2 * jnp.arange(self.n_frequencies) * jnp.pi t = freq * t - return jnp.concatenate((jnp.cos(t), jnp.sin(t)), axis=-1) + return jnp.concatenate([jnp.cos(t), jnp.sin(t)], axis=-1) From 451f069df5507d67c15dc013f1b5752e65b1f8db Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 19 Feb 2024 17:40:18 +0100 Subject: [PATCH 106/186] Fix typo in citation --- src/ott/neural/flow_models/flows.py | 2 +- src/ott/neural/flow_models/layers.py | 10 +++---- src/ott/neural/flow_models/models.py | 39 +++++++++++++--------------- src/ott/neural/flow_models/otfm.py | 4 +-- 4 files changed, 26 insertions(+), 29 deletions(-) diff --git a/src/ott/neural/flow_models/flows.py b/src/ott/neural/flow_models/flows.py index b2e4970bf..d434e91f1 100644 --- a/src/ott/neural/flow_models/flows.py +++ b/src/ott/neural/flow_models/flows.py @@ -88,7 +88,7 @@ def compute_xt( Args: rng: Random number generator. - t: Time :math:`t` of shape `(batch_size, 1)`.. + t: Time :math:`t` of shape `(batch_size, 1)`. src: Sample from the source distribution of shape `(batch_size, ...)`. tgt: Sample from the target distribution of shape `(batch_size, ...)`. diff --git a/src/ott/neural/flow_models/layers.py b/src/ott/neural/flow_models/layers.py index 6f04b4e54..9ec703c4c 100644 --- a/src/ott/neural/flow_models/layers.py +++ b/src/ott/neural/flow_models/layers.py @@ -22,12 +22,12 @@ class CyclicalTimeEncoder(nn.Module): r"""A cyclical time encoder. Encodes time :math:`t` as :math:`cos(\tilde{t})` and :math:`sin(\tilde{t})` - where :math:`\tilde{t} = [2\pi t, 2\pi 2 t,\ldots, 2\pi n_frequencies t]`. + where :math:`\hat{t} = [2\pi t, 2\pi 2 t,\dots, 2\pi n_f t]`. Args: - n_frequencies: Frequency of cyclical encoding. + n_freqs: Frequency :math:`n_f` of the cyclical encoding. """ - n_frequencies: int = 128 + n_freqs: int = 128 @nn.compact def __call__(self, t: jnp.ndarray) -> jnp.ndarray: # noqa: D102 @@ -37,8 +37,8 @@ def __call__(self, t: jnp.ndarray) -> jnp.ndarray: # noqa: D102 t: Time of shape ``[n, 1]``. Returns: - Encoded time of shape ``[n, 2 * n_frequencies]``. + Encoded time of shape ``[n, 2 * n_freqs]``. """ - freq = 2 * jnp.arange(self.n_frequencies) * jnp.pi + freq = 2 * jnp.arange(self.n_freqs) * jnp.pi t = freq * t return jnp.concatenate([jnp.cos(t), jnp.sin(t)], axis=-1) diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index ebb29aa99..c71fff2c2 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -29,20 +29,18 @@ class VelocityField(nn.Module): r"""Parameterized neural vector field. - The `VelocityField` learns a map - :math:`v: \\mathbb{R}\times \\mathbb{R}^d\rightarrow \\mathbb{R}^d` solving - the ODE :math:`\frac{dx}{dt} = v(t, x)`. Given a source distribution at time - :math:`t=0`, the `VelocityField` can be used to transport the source - distribution given at :math:`t_0` to a target distribution given at - :math:`t_1` by integrating :math:`v(t, x)` from :math:`t=t_0` to - :math:`t=t_1`. + The `VelocityField` learns a map :math:`v: \mathbb{R}\times \mathbb{R}^d + \rightarrow \mathbb{R}^d` solving the ODE :math:`\frac{dx}{dt} = v(t, x)`. + Given a source distribution at time :math:`t_0`, the velocity field can be + used to transport the source distribution given at :math:`t_0` to + a target distribution given at :math:`t_1` by integrating :math:`v(t, x)` + from :math:`t=t_0` to :math:`t=t_1`. Each of the input, condition, and time embeddings are passed through a block consisting of ``num_layers_per_block`` layers of dimension ``latent_embed_dim``, ``condition_embed_dim``, and ``time_embed_dim``, - respectively. - The output of each block is concatenated and passed through a final block of - dimension ``joint_hidden_dim``. + respectively. The output of each block is concatenated and passed through + a final block of dimension ``joint_hidden_dim``. Args: output_dim: Dimensionality of the neural vector field. @@ -57,8 +55,7 @@ class VelocityField(nn.Module): t_embed_dim``. num_layers_per_block: Number of layers per block. act_fn: Activation function. - n_frequencies: Number of frequencies to use for the time embedding. - + n_freqs: Number of frequencies to use for the time embedding. """ output_dim: int latent_embed_dim: int @@ -68,7 +65,7 @@ class VelocityField(nn.Module): joint_hidden_dim: Optional[int] = None num_layers_per_block: int = 3 act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu - n_frequencies: int = 128 + n_freqs: int = 128 def __post_init__(self) -> None: if self.condition_embed_dim is None: @@ -81,8 +78,8 @@ def __post_init__(self) -> None: ) if self.joint_hidden_dim is not None: assert (self.joint_hidden_dim >= concat_embed_dim), ( - "joint_hidden_dim must be greater than or equal to the sum of " - "all embedded dimensions. " + "joint_hidden_dim must be greater than or equal to the sum of" + " all embedded dimensions." ) self.joint_hidden_dim = self.latent_embed_dim else: @@ -99,14 +96,14 @@ def __call__( """Forward pass through the neural vector field. Args: - t: Time of shape (batch_size, 1). - x: Data of shape (batch_size, output_dim). + t: Time of shape `(batch_size, 1)`. + x: Data of shape `(batch_size, output_dim)`. condition: Conditioning vector. Returns: Output of the neural vector field. """ - t = flow_layers.CyclicalTimeEncoder(n_frequencies=self.n_frequencies)(t) + t = flow_layers.CyclicalTimeEncoder(self.n_freqs)(t) t_layer = layers.MLPBlock( dim=self.t_embed_dim, out_dim=self.t_embed_dim, @@ -131,9 +128,9 @@ def __call__( act_fn=self.act_fn ) condition = condition_layer(condition) - concatenated = jnp.concatenate((t, x, condition), axis=-1) + concatenated = jnp.concatenate([t, x, condition], axis=-1) else: - concatenated = jnp.concatenate((t, x), axis=-1) + concatenated = jnp.concatenate([t, x], axis=-1) out_layer = layers.MLPBlock( dim=self.joint_hidden_dim, @@ -158,7 +155,7 @@ def create_train_state( input_dim: Dimensionality of the input. Returns: - Training state. + The training state. """ params = self.init( rng, jnp.ones((1, 1)), jnp.ones((1, input_dim)), diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index 53788fc37..3cd4d2cee 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -32,14 +32,14 @@ class OTFlowMatching: """(Optimal transport) flow matching :cite:`lipman:22`. - Includes an extension to OT-FM :cite`tong:23`, :cite:`pooladian:23`. + With an extension to OT-FM :cite:`tong:23`, :cite:`pooladian:23`. Args: input_dim: Dimension of the input data. velocity_field: Neural vector field parameterized by a neural network. flow: Flow between source and target distribution. time_sampler: Sampler for the time. - optimizer: Optimizer for the ``velocity_field``. + optimizer: Optimizer for the velocity field's parameters. ot_matcher: TODO. unbalancedness_handler: TODO. rng: Random number generator. From 5e10d3a5598425a37d6cbe664e8d46f11e7be0a4 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 19 Feb 2024 18:04:12 +0100 Subject: [PATCH 107/186] More polish for the docs --- docs/neural/flow_models.rst | 4 ++-- src/ott/neural/flow_models/flows.py | 34 ++++++++++++++++++++-------- src/ott/neural/flow_models/layers.py | 2 +- 3 files changed, 28 insertions(+), 12 deletions(-) diff --git a/docs/neural/flow_models.rst b/docs/neural/flow_models.rst index 5d9d1f594..5f9799292 100644 --- a/docs/neural/flow_models.rst +++ b/docs/neural/flow_models.rst @@ -16,8 +16,8 @@ Flows flows.ConstantNoiseFlow flows.BrownianNoiseFlow -Optimal Transport Flow Matching -------------------------------- +OT Flow Matching +---------------- .. autosummary:: :toctree: _autosummary diff --git a/src/ott/neural/flow_models/flows.py b/src/ott/neural/flow_models/flows.py index d434e91f1..150e2086e 100644 --- a/src/ott/neural/flow_models/flows.py +++ b/src/ott/neural/flow_models/flows.py @@ -28,7 +28,7 @@ class BaseFlow(abc.ABC): """Base class for all flows. Args: - sigma: Constant noise used for computing time-dependent noise schedule. + sigma: Noise used for computing time-dependent noise schedule. """ def __init__(self, sigma: float): @@ -103,7 +103,11 @@ def compute_xt( class StraightFlow(BaseFlow, abc.ABC): - """Base class for flows with straight paths.""" + """Base class for flows with straight paths. + + Args: + sigma: Noise used for computing time-dependent noise schedule. + """ def compute_mu_t( # noqa: D102 self, t: jnp.ndarray, src: jnp.ndarray, tgt: jnp.ndarray @@ -118,7 +122,11 @@ def compute_ut( # noqa: D102 class ConstantNoiseFlow(StraightFlow): - r"""Flow with straight paths and constant flow noise :math:`\sigma`.""" + r"""Flow with straight paths and constant flow noise :math:`\sigma`. + + Args: + sigma: Constant noise used for computing time-independent noise schedule. + """ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: r"""Compute noise of the flow at time :math:`t`. @@ -135,14 +143,22 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: class BrownianNoiseFlow(StraightFlow): r"""Brownian Bridge Flow. - Sampler for sampling noise implicitly defined by a Schroedinger Bridge + Sampler for sampling noise implicitly defined by a Schrödinger Bridge problem with parameter :math:`\sigma` such that - :math:`\sigma_t = \sigma * \sqrt(t * (1-t))` (:cite:`tong:23`). + :math:`\sigma_t = \sigma \cdot \sqrt{t \cdot (1 - t)}` :cite:`tong:23`. - Returns: - Samples from the probability path between :math:`x_0` and :math:`x_1` - at time :math:`t`. + Args: + sigma: Noise used for computing time-dependent noise schedule. """ - def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: # noqa: D102 + def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: + r"""Compute noise of the flow at time :math:`t`. + + Args: + t: Time :math:`t` of shape `(batch_size, 1)`. + + Returns: + Samples from the probability path between :math:`x_0` and :math:`x_1` + at time :math:`t`. + """ return self.sigma * jnp.sqrt(t * (1.0 - t)) diff --git a/src/ott/neural/flow_models/layers.py b/src/ott/neural/flow_models/layers.py index 9ec703c4c..2f87f6cfc 100644 --- a/src/ott/neural/flow_models/layers.py +++ b/src/ott/neural/flow_models/layers.py @@ -21,7 +21,7 @@ class CyclicalTimeEncoder(nn.Module): r"""A cyclical time encoder. - Encodes time :math:`t` as :math:`cos(\tilde{t})` and :math:`sin(\tilde{t})` + Encodes time :math:`t` as :math:`cos(\hat{t})` and :math:`sin(\hat{t})` where :math:`\hat{t} = [2\pi t, 2\pi 2 t,\dots, 2\pi n_f t]`. Args: From 5edc66d4f595aca1047360990e611597b16b9174 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 6 Mar 2024 09:47:12 +0100 Subject: [PATCH 108/186] remove print statements and unbalancednesshandler --- docs/neural/models.rst | 1 - src/ott/neural/flow_models/genot.py | 34 +--- src/ott/neural/flow_models/otfm.py | 19 -- src/ott/neural/models/base_solver.py | 280 +-------------------------- tests/neural/genot_test.py | 59 +----- tests/neural/otfm_test.py | 46 +---- 6 files changed, 13 insertions(+), 426 deletions(-) diff --git a/docs/neural/models.rst b/docs/neural/models.rst index bacc93c71..af6d4e33a 100644 --- a/docs/neural/models.rst +++ b/docs/neural/models.rst @@ -14,7 +14,6 @@ Utils base_solver.BaseOTMatcher base_solver.OTMatcherLinear base_solver.OTMatcherQuad - base_solver.UnbalancednessHandler Neural networks diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index d466f015d..ba2c0f6a0 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -66,7 +66,6 @@ class GENOTBase: optimizer: Optimizer for `velocity_field`. flow: Flow between latent distribution and target distribution. time_sampler: Sampler for the time. - unbalancedness_handler: Handler for unbalancedness. k_samples_per_x: Number of samples drawn from the conditional distribution of an input sample, see algorithm TODO. solver_latent_to_data: Linear OT solver to match the latent distribution @@ -91,7 +90,6 @@ def __init__( iterations: int, valid_freq: int, ot_matcher: base_solver.BaseOTMatcher, - unbalancedness_handler: base_solver.UnbalancednessHandler, optimizer: optax.GradientTransformation, flow: Type[flows.BaseFlow] = flows.ConstantNoiseFlow(0.0), # noqa: B008 time_sampler: Callable[[jax.Array, int], @@ -125,9 +123,6 @@ def __init__( self.cond_dim = cond_dim self.k_samples_per_x = k_samples_per_x - # unbalancedness - self.unbalancedness_handler = unbalancedness_handler - # OT data-data matching parameters self.fused_penalty = fused_penalty @@ -262,10 +257,7 @@ def _valid_step(self, valid_loader, iter): @property def learn_rescaling(self) -> bool: """Whether to learn at least one rescaling factor.""" - return ( - self.unbalancedness_handler.rescaling_a is not None or - self.unbalancedness_handler.rescaling_b is not None - ) + return False def _reshape_samples(self, arrays: Tuple[jnp.ndarray, ...], batch_size: int) -> Tuple[jnp.ndarray, ...]: @@ -278,20 +270,7 @@ def _learn_rescaling( self, source: jnp.ndarray, target: jnp.ndarray, source_conditions: Optional[jnp.ndarray], tmat: jnp.ndarray ) -> Tuple[jnp.ndarray, jnp.ndarray, float, float]: - - ( - self.state_eta, self.state_xi, eta_predictions, xi_predictions, loss_a, - loss_b - ) = self.unbalancedness_handler.step_fn( - source=source, - target=target, - condition=source_conditions, - a=tmat.sum(axis=1), - b=tmat.sum(axis=0), - state_eta=self.unbalancedness_handler.state_eta, - state_xi=self.unbalancedness_handler.state_xi, - ) - return eta_predictions, xi_predictions, float(loss_a), float(loss_b) + raise NotImplementedError class GENOTLin(GENOTBase): @@ -339,11 +318,6 @@ def __call__(self, train_loader, valid_loader): target, ) - jax.debug.print("source.shape {x}", x=source.shape) - jax.debug.print( - "source_conditions.shape {x}", x=source_conditions.shape - ) - jax.debug.print("target.shape {x}", x=target.shape) (source, source_conditions ), (target,) = self.ot_matcher.sample_conditional_indices_from_tmap( rng=rng_resample, @@ -351,7 +325,7 @@ def __call__(self, train_loader, valid_loader): k_samples_per_x=self.k_samples_per_x, source_arrays=(source, source_conditions), target_arrays=(target,), - source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) + source_is_balanced=(self.ot_matcher.tau_a == 1.0) ) if self.matcher_latent_to_data is not None: @@ -454,7 +428,7 @@ def __call__(self, train_loader, valid_loader): k_samples_per_x=self.k_samples_per_x, source_arrays=(source, source_conditions), target_arrays=(target,), - source_is_balanced=(self.unbalancedness_handler.tau_a == 1.0) + source_is_balanced=(self.ot_matcher.tau_a == 1.0) ) ) diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index 3cd4d2cee..e426a6f5c 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -41,7 +41,6 @@ class OTFlowMatching: time_sampler: Sampler for the time. optimizer: Optimizer for the velocity field's parameters. ot_matcher: TODO. - unbalancedness_handler: TODO. rng: Random number generator. """ @@ -55,8 +54,6 @@ def __init__( time_sampler: Callable[[jax.Array, int], jnp.ndarray], optimizer: optax.GradientTransformation, ot_matcher: Optional[base_solver.OTMatcherLinear] = None, - unbalancedness_handler: Optional[base_solver.UnbalancednessHandler - ] = None, rng: Optional[jax.Array] = None, ): rng = utils.default_prng_key(rng) @@ -65,7 +62,6 @@ def __init__( self.vf = velocity_field self.flow = flow self.time_sampler = time_sampler - self.unbalancedness_handler = unbalancedness_handler self.ot_matcher = ot_matcher self.optimizer = optimizer @@ -149,21 +145,6 @@ def __call__( # noqa: D102 ) training_logs["loss"].append(float(loss)) - if self.unbalancedness_handler is not None and tmat is not None: - ( - self.unbalancedness_handler.state_eta, - self.unbalancedness_handler.state_xi, eta_predictions, - xi_predictions, loss_a, loss_b - ) = self.unbalancedness_handler.step_fn( - source=source, - target=target, - condition=source_conditions, - a=tmat.sum(axis=1), - b=tmat.sum(axis=0), - state_eta=self.unbalancedness_handler.state_eta, - state_xi=self.unbalancedness_handler.state_xi, - ) - if it % valid_freq == 0: self._valid_step(valid_source, valid_target, it) diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py index e9be6baa8..5ddfd5ef5 100644 --- a/src/ott/neural/models/base_solver.py +++ b/src/ott/neural/models/base_solver.py @@ -17,13 +17,9 @@ import jax.numpy as jnp from jax import tree_util -import optax -from flax.training import train_state - from ott.geometry import costs, pointcloud from ott.problems.linear import linear_problem from ott.problems.quadratic import quadratic_problem -from ott.solvers import was_solver from ott.solvers.linear import sinkhorn from ott.solvers.quadratic import gromov_wasserstein, gromov_wasserstein_lr @@ -31,7 +27,9 @@ ScaleCostQuad_t = Union[ScaleCost_t, Dict[str, ScaleCost_t]] __all__ = [ - "BaseOTMatcher", "OTMatcherLinear", "OTMatcherQuad", "UnbalancednessHandler" + "BaseOTMatcher", + "OTMatcherLinear", + "OTMatcherQuad", ] @@ -308,275 +306,3 @@ def match_pairs(*args, **kwargs): return fn(*args, **kwargs).matrix return match_pairs - - -class UnbalancednessHandler: - """Class to incorporate unbalancedness into neural OT models. - - This class implements the concepts introduced in :cite:`eyring:23` - in the Monge Map scenario and :cite:`klein:23` for the entropic OT case - for linear and quadratic cases. - - Args: - rng: Random number generator. - source_dim: Dimension of the source domain. - target_dim: Dimension of the target domain. - cond_dim: Dimension of the conditioning variable. - If :obj:`None`, no conditioning is used. - tau_a: Unbalancedness parameter for the source distribution. - Only used if `ot_solver` is not :obj:`None`. - tau_b: Unbalancedness parameter for the target distribution. - Only used if `ot_solver` is not :obj:`None`. - rescaling_a: Rescaling function for the source distribution. - If :obj:`None`, the left rescaling factor is not learnt. - rescaling_b: Rescaling function for the target distribution. - If :obj:`None`, the right rescaling factor is not learnt. - opt_eta: Optimizer for the left rescaling function. - opt_xi: Optimzier for the right rescaling function. - resample_epsilon: Epsilon for resampling. - scale_cost: Scaling of the cost matrix for estimating the rescaling factors. - ot_solver: Solver to compute unbalanced marginals. If `ot_solver` is `None`, - the method :meth:`ott.neural.models.base_solver.UnbalancednessHandler.compute_unbalanced_marginals` - is not available, and hence the unbalanced marginals must be computed - by the neural solver. - kwargs: Additional keyword arguments. - - """ # noqa: E501 - - def __init__( - self, - rng: jax.Array, - source_dim: int, - target_dim: int, - cond_dim: Optional[int], - tau_a: float = 1.0, - tau_b: float = 1.0, - rescaling_a: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], - jnp.ndarray]] = None, - rescaling_b: Optional[Callable[[jnp.ndarray, Optional[jnp.ndarray]], - jnp.ndarray]] = None, - opt_eta: Optional[optax.GradientTransformation] = None, - opt_xi: Optional[optax.GradientTransformation] = None, - resample_epsilon: float = 1e-2, - scale_cost: Union[ScaleCost_t, ScaleCostQuad_t] = 1.0, - ot_solver: Optional[was_solver.WassersteinSolver] = None, - **kwargs: Mapping[str, Any], - ): - self.rng_unbalanced = rng - self.source_dim = source_dim - self.target_dim = target_dim - self.cond_dim = cond_dim - self.tau_a = tau_a - self.tau_b = tau_b - self.rescaling_a = rescaling_a - self.rescaling_b = rescaling_b - self.opt_eta = opt_eta - self.opt_xi = opt_xi - self.resample_epsilon = resample_epsilon - self.scale_cost = scale_cost - self.ot_solver = ot_solver - - if isinstance(ot_solver, sinkhorn.Sinkhorn): - self.compute_unbalanced_marginals = ( - self._get_compute_unbalanced_marginals_lin( - tau_a=tau_a, - tau_b=tau_b, - resample_epsilon=resample_epsilon, - scale_cost=scale_cost, - **kwargs - ) - ) - elif isinstance(ot_solver, gromov_wasserstein.GromovWasserstein): - raise NotImplementedError - else: - self.compute_unbalanced_marginals = None - self.setup(source_dim=source_dim, target_dim=target_dim, cond_dim=cond_dim) - - def _get_compute_unbalanced_marginals_lin( - self, *args: Any, **kwargs: Mapping[str, Any] - ) -> Tuple[jnp.ndarray, jnp.ndarray]: - """Compute the unbalanced source and target marginals for a batch.""" - fn = _get_sinkhorn_match_fn(*args, **kwargs) - - @jax.jit - def compute_unbalanced_marginals_lin(*args, **kwargs): - out = fn(*args, **kwargs) - return out.marginals(axis=1), out.marginals(axis=0) - - return compute_unbalanced_marginals_lin - - def _get_compute_unbalanced_marginals_quad( - self, *args: Any, **kwargs: Mapping[str, Any] - ) -> Tuple[jnp.ndarray, jnp.ndarray]: - """Compute the unbalanced source and target marginals for a batch.""" - fn = _get_sinkhorn_match_fn(*args, **kwargs) - - @jax.jit - def compute_unbalanced_marginals_quad(*args, **kwargs): - out = fn(*args, **kwargs) - return out.marginals(axis=1), out.marginals(axis=0) - - return compute_unbalanced_marginals_quad - - @jax.jit - def resample_unbalanced( - self, - rng: jax.Array, - arrays: Tuple[jnp.ndarray, ...], - p: jnp.ndarray, - ) -> Tuple[jnp.ndarray, ...]: - """Resample a batch based on marginals. - - Args: - rng: Random number generator. - arrays: Arrays to resample from. - p: Probabilities according to which `arrays` are resampled. - - Returns: - Resampled arrays. - """ - indices = jax.random.choice(rng, a=len(p), p=jnp.squeeze(p), shape=[len(p)]) - return tree_util.tree_map(lambda b: b[indices], arrays) - - def setup(self, source_dim: int, target_dim: int, cond_dim: int): - """Setup the model. - - Args: - source_dim: Dimension of the source domain. - target_dim: Dimension of the target domain. - cond_dim: Dimension of the conditioning variable. - If :obj:`None`, no conditioning is used. - """ - self.rng_unbalanced, rng_eta, rng_xi = jax.random.split( - self.rng_unbalanced, 3 - ) - self.step_fn = self._get_rescaling_step_fn() - if self.rescaling_a is not None: - self.opt_eta = ( - self.opt_eta if self.opt_eta is not None else - optax.adamw(learning_rate=1e-4, weight_decay=1e-10) - ) - self.state_eta = self.rescaling_a.create_train_state( - rng_eta, self.opt_eta, source_dim - ) - if self.rescaling_b is not None: - self.opt_xi = ( - self.opt_xi if self.opt_xi is not None else - optax.adamw(learning_rate=1e-4, weight_decay=1e-10) - ) - self.state_xi = self.rescaling_b.create_train_state( - rng_xi, self.opt_xi, target_dim - ) - - def _get_rescaling_step_fn(self) -> Callable: # type:ignore[type-arg] - - def loss_marginal_fn( - params: jnp.ndarray, - apply_fn: Callable[[Dict[str, jnp.ndarray], jnp.ndarray], - Optional[jnp.ndarray]], - x: jnp.ndarray, - condition: Optional[jnp.ndarray], - true_marginals: jnp.ndarray, - expectation_reweighting: float, - ) -> Tuple[float, jnp.ndarray]: - predictions = apply_fn({"params": params}, x, condition) - pred_loss = optax.l2_loss(jnp.squeeze(predictions), true_marginals).mean() - exp_loss = optax.l2_loss(jnp.mean(predictions) - expectation_reweighting) - return (pred_loss + exp_loss, predictions) - - @jax.jit - def step_fn( - source: jnp.ndarray, - target: jnp.ndarray, - condition: Optional[jnp.ndarray], - a: jnp.ndarray, - b: jnp.ndarray, - state_eta: Optional[train_state.TrainState] = None, - state_xi: Optional[train_state.TrainState] = None, - *, - is_training: bool = True, - ): - if state_eta is not None: - grad_a_fn = jax.value_and_grad( - loss_marginal_fn, argnums=0, has_aux=True - ) - (loss_a, eta_predictions), grads_eta = grad_a_fn( - state_eta.params, - state_eta.apply_fn, - source, - condition, - a * len(a), - jnp.sum(b), - ) - new_state_eta = state_eta.apply_gradients( - grads=grads_eta - ) if is_training else None - - else: - new_state_eta = eta_predictions = loss_a = None - if state_xi is not None: - grad_b_fn = jax.value_and_grad( - loss_marginal_fn, argnums=0, has_aux=True - ) - (loss_b, xi_predictions), grads_xi = grad_b_fn( - state_xi.params, - state_xi.apply_fn, - target, - condition, - b * len(b), - jnp.sum(a), - ) - new_state_xi = state_xi.apply_gradients( - grads=grads_xi - ) if is_training else None - else: - new_state_xi = xi_predictions = loss_b = None - - return ( - new_state_eta, new_state_xi, eta_predictions, xi_predictions, loss_a, - loss_b - ) - - return step_fn - - def evaluate_eta( - self, - source: jnp.ndarray, - condition: Optional[jnp.ndarray] = None - ) -> jnp.ndarray: - """Evaluate the left learnt rescaling factor. - - Args: - source: Samples from the source distribution to evaluate rescaling - function on. - condition: Condition belonging to the samples in the source distribution. - - Returns: - Learnt left rescaling factors. - """ - if self.state_eta is None: - raise ValueError("The left rescaling factor was not parameterized.") - return self.state_eta.apply_fn({"params": self.state_eta.params}, - x=source, - condition=condition) - - def evaluate_xi( - self, - target: jnp.ndarray, - condition: Optional[jnp.ndarray] = None - ) -> jnp.ndarray: - """Evaluate the right learnt rescaling factor. - - Args: - target: Samples from the target distribution to evaluate the rescaling - function on. - condition: Condition belonging to the samples in the target distribution. - - Returns: - Learnt right rescaling factors. - """ - if self.state_xi is None: - raise ValueError("The right rescaling factor was not parameterized.") - return self.state_xi.apply_fn({"params": self.state_xi.params}, - x=target, - condition=condition) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index e924edd9a..7156dec3d 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -17,7 +17,6 @@ import pytest import jax.numpy as jnp -from jax import random import optax @@ -26,7 +25,6 @@ from ott.neural.flow_models.models import VelocityField from ott.neural.flow_models.samplers import uniform_sampler from ott.neural.models import base_solver -from ott.neural.models.nets import RescalingMLP from ott.solvers.linear import sinkhorn, sinkhorn_lr from ott.solvers.quadratic import gromov_wasserstein, gromov_wasserstein_lr @@ -71,9 +69,6 @@ def test_genot_linear_unconditional( ot_matcher = base_solver.OTMatcherLinear( ot_solver, cost_fn=costs.SqEuclidean(), scale_cost=scale_cost ) - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), source_dim, target_dim, condition_dim - ) time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) genot = GENOTLin( @@ -86,7 +81,6 @@ def test_genot_linear_unconditional( ot_matcher=ot_matcher, optimizer=optimizer, time_sampler=time_sampler, - unbalancedness_handler=unbalancedness_handler, k_samples_per_x=k_samples_per_x, matcher_latent_to_data=matcher_latent_to_data, ) @@ -134,9 +128,6 @@ def test_genot_linear_conditional( ot_solver, cost_fn=costs.SqEuclidean() ) time_sampler = uniform_sampler - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), source_dim, target_dim, condition_dim - ) optimizer = optax.adam(learning_rate=1e-3) genot = GENOTLin( @@ -147,7 +138,6 @@ def test_genot_linear_conditional( iterations=3, valid_freq=2, ot_matcher=ot_matcher, - unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, @@ -198,26 +188,12 @@ def test_genot_linear_learn_rescaling( ot_matcher = base_solver.OTMatcherLinear( ot_solver, cost_fn=costs.SqEuclidean(), + tau_a=0.2, + tau_b=0.9, ) time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) - tau_a = 0.9 - tau_b = 0.2 - rescaling_a = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - rescaling_b = RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), - source_dim, - target_dim, - condition_dim, - tau_a=tau_a, - tau_b=tau_b, - rescaling_a=rescaling_a, - rescaling_b=rescaling_b - ) - genot = GENOTLin( neural_vf, input_dim=source_dim, @@ -228,24 +204,11 @@ def test_genot_linear_learn_rescaling( ot_matcher=ot_matcher, optimizer=optimizer, time_sampler=time_sampler, - unbalancedness_handler=unbalancedness_handler, matcher_latent_to_data=matcher_latent_to_data, ) genot(data_loader, data_loader) - result_eta = genot.unbalancedness_handler.evaluate_eta( - source_lin, condition=source_condition - ) - assert isinstance(result_eta, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_eta)) == 0 - - result_xi = genot.unbalancedness_handler.evaluate_xi( - target_lin, condition=source_condition - ) - assert isinstance(result_xi, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_xi)) == 0 - class TestGENOTQuad: @@ -285,10 +248,6 @@ def test_genot_quad_unconditional( ot_solver, cost_fn=costs.SqEuclidean() ) - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), source_dim, target_dim, condition_dim - ) - time_sampler = functools.partial(uniform_sampler, offset=1e-2) optimizer = optax.adam(learning_rate=1e-3) genot = GENOTQuad( @@ -299,7 +258,6 @@ def test_genot_quad_unconditional( iterations=3, valid_freq=2, ot_matcher=ot_matcher, - unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, @@ -351,10 +309,6 @@ def test_genot_fused_unconditional( ot_solver, cost_fn=costs.SqEuclidean(), fused_penalty=0.5 ) - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), source_dim, target_dim, condition_dim - ) - optimizer = optax.adam(learning_rate=1e-3) genot = GENOTQuad( neural_vf, @@ -364,7 +318,6 @@ def test_genot_fused_unconditional( iterations=3, valid_freq=2, ot_matcher=ot_matcher, - unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, k_samples_per_x=k_samples_per_x, matcher_latent_to_data=matcher_latent_to_data, @@ -416,9 +369,6 @@ def test_genot_quad_conditional( ot_solver, cost_fn=costs.SqEuclidean() ) time_sampler = uniform_sampler - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), source_dim, target_dim, condition_dim - ) optimizer = optax.adam(learning_rate=1e-3) genot = GENOTQuad( @@ -429,7 +379,6 @@ def test_genot_quad_conditional( iterations=3, valid_freq=2, ot_matcher=ot_matcher, - unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, @@ -486,9 +435,6 @@ def test_genot_fused_conditional( ) time_sampler = uniform_sampler optimizer = optax.adam(learning_rate=1e-3) - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), source_dim, target_dim, condition_dim - ) genot = GENOTQuad( neural_vf, @@ -498,7 +444,6 @@ def test_genot_fused_conditional( iterations=3, valid_freq=2, ot_matcher=ot_matcher, - unbalancedness_handler=unbalancedness_handler, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index e57fce89c..5c53db325 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -17,12 +17,11 @@ import pytest import jax.numpy as jnp -from jax import random import optax from ott.neural.flow_models import flows, models, otfm, samplers -from ott.neural.models import base_solver, nets +from ott.neural.models import base_solver from ott.solvers.linear import sinkhorn, sinkhorn_lr @@ -49,9 +48,7 @@ def test_flow_matching_unconditional( ot_matcher = base_solver.OTMatcherLinear(ot_solver) time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), input_dim, input_dim, condition_dim - ) + fm = otfm.OTFlowMatching( neural_vf, input_dim=input_dim, @@ -62,7 +59,6 @@ def test_flow_matching_unconditional( flow=flow, time_sampler=time_sampler, optimizer=optimizer, - unbalancedness_handler=unbalancedness_handler ) fm( data_loaders_gaussian[0], data_loaders_gaussian[1], @@ -101,17 +97,14 @@ def test_flow_matching_with_conditions( input_dim = 2 condition_dim = 1 neural_vf = models.VelocityField( - output_dim=2, - condition_dim=1, + output_dim=input_dim, + condition_dim=condition_dim, latent_embed_dim=5, ) ot_solver = sinkhorn.Sinkhorn() ot_matcher = base_solver.OTMatcherLinear(ot_solver) time_sampler = functools.partial(samplers.uniform_sampler, offset=1e-5) optimizer = optax.adam(learning_rate=1e-3) - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), input_dim, input_dim, condition_dim - ) fm = otfm.OTFlowMatching( neural_vf, @@ -123,7 +116,6 @@ def test_flow_matching_with_conditions( flow=flow, time_sampler=time_sampler, optimizer=optimizer, - unbalancedness_handler=unbalancedness_handler ) fm( data_loader_gaussian_with_conditions, @@ -173,9 +165,6 @@ def test_flow_matching_conditional( ot_matcher = base_solver.OTMatcherLinear(ot_solver) time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), dim, dim, condition_dim - ) fm = otfm.OTFlowMatching( neural_vf, @@ -187,7 +176,6 @@ def test_flow_matching_conditional( flow=flow, time_sampler=time_sampler, optimizer=optimizer, - unbalancedness_handler=unbalancedness_handler ) fm(data_loader_gaussian_conditional, data_loader_gaussian_conditional) @@ -220,7 +208,6 @@ def test_flow_matching_learn_rescaling( ) batch = next(iter(data_loader)) source = jnp.asarray(batch["source_lin"]) - target = jnp.asarray(batch["target_lin"]) source_conditions = jnp.asarray(batch["source_conditions"]) if len( batch["source_conditions"] ) > 0 else None @@ -239,23 +226,11 @@ def test_flow_matching_learn_rescaling( tau_a = 0.9 tau_b = 0.2 - rescaling_a = nets.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) - rescaling_b = nets.RescalingMLP(hidden_dim=4, condition_dim=condition_dim) ot_matcher = base_solver.OTMatcherLinear( ot_solver, tau_a=tau_a, tau_b=tau_b, ) - unbalancedness_handler = base_solver.UnbalancednessHandler( - random.PRNGKey(0), - source_dim, - source_dim, - condition_dim, - tau_a=tau_a, - tau_b=tau_b, - rescaling_a=rescaling_a, - rescaling_b=rescaling_b - ) fm = otfm.OTFlowMatching( neural_vf, @@ -267,18 +242,5 @@ def test_flow_matching_learn_rescaling( flow=flow, time_sampler=time_sampler, optimizer=optimizer, - unbalancedness_handler=unbalancedness_handler, ) fm(data_loader, data_loader) - - result_eta = fm.unbalancedness_handler.evaluate_eta( - source, condition=source_conditions - ) - assert isinstance(result_eta, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_eta)) == 0 - - result_xi = fm.unbalancedness_handler.evaluate_xi( - target, condition=source_conditions - ) - assert isinstance(result_xi, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_xi)) == 0 From 23eca2caa762e4207f55e0f300a81a048f4e4fe5 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 6 Mar 2024 09:50:22 +0100 Subject: [PATCH 109/186] remove tests --- tests/neural/genot_test.py | 56 -------------------------------------- tests/neural/otfm_test.py | 50 +--------------------------------- 2 files changed, 1 insertion(+), 105 deletions(-) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 7156dec3d..c8d4a48f7 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -153,62 +153,6 @@ def test_genot_linear_conditional( assert isinstance(result_forward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_forward)) == 0 - @pytest.mark.parametrize("conditional", [False, True]) - @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - def test_genot_linear_learn_rescaling( - self, conditional: bool, genot_data_loader_linear: Iterator, - solver_latent_to_data: Optional[str], - genot_data_loader_linear_conditional: Iterator - ): - matcher_latent_to_data = ( - None if solver_latent_to_data is None else - base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) - ) - - data_loader = ( - genot_data_loader_linear_conditional - if conditional else genot_data_loader_linear - ) - - batch = next(iter(data_loader)) - source_lin, target_lin, source_condition = jnp.array( - batch["source_lin"] - ), jnp.array(batch["target_lin"]), jnp.array(batch["source_conditions"]) - - source_dim = source_lin.shape[1] - target_dim = target_lin.shape[1] - condition_dim = source_condition.shape[1] if conditional else 0 - - neural_vf = VelocityField( - output_dim=target_dim, - condition_dim=source_dim + condition_dim, - latent_embed_dim=5, - ) - ot_solver = sinkhorn.Sinkhorn() - ot_matcher = base_solver.OTMatcherLinear( - ot_solver, - cost_fn=costs.SqEuclidean(), - tau_a=0.2, - tau_b=0.9, - ) - time_sampler = uniform_sampler - optimizer = optax.adam(learning_rate=1e-3) - - genot = GENOTLin( - neural_vf, - input_dim=source_dim, - output_dim=target_dim, - cond_dim=condition_dim, - iterations=3, - valid_freq=2, - ot_matcher=ot_matcher, - optimizer=optimizer, - time_sampler=time_sampler, - matcher_latent_to_data=matcher_latent_to_data, - ) - - genot(data_loader, data_loader) - class TestGENOTQuad: diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 5c53db325..f43403054 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Iterator, Literal, Type +from typing import Literal, Type import pytest @@ -196,51 +196,3 @@ def test_flow_matching_conditional( ) assert isinstance(result_backward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_backward)) == 0 - - @pytest.mark.parametrize("conditional", [False, True]) - def test_flow_matching_learn_rescaling( - self, conditional: bool, data_loader_gaussian: Iterator, - data_loader_gaussian_conditional: Iterator - ): - data_loader = ( - data_loader_gaussian_conditional - if conditional else data_loader_gaussian - ) - batch = next(iter(data_loader)) - source = jnp.asarray(batch["source_lin"]) - source_conditions = jnp.asarray(batch["source_conditions"]) if len( - batch["source_conditions"] - ) > 0 else None - - source_dim = source.shape[1] - condition_dim = source_conditions.shape[1] if conditional else 0 - neural_vf = models.VelocityField( - output_dim=2, - condition_dim=0, - latent_embed_dim=5, - ) - ot_solver = sinkhorn.Sinkhorn() - time_sampler = samplers.uniform_sampler - flow = flows.ConstantNoiseFlow(1.0) - optimizer = optax.adam(learning_rate=1e-3) - - tau_a = 0.9 - tau_b = 0.2 - ot_matcher = base_solver.OTMatcherLinear( - ot_solver, - tau_a=tau_a, - tau_b=tau_b, - ) - - fm = otfm.OTFlowMatching( - neural_vf, - input_dim=source_dim, - cond_dim=condition_dim, - iterations=3, - valid_freq=2, - ot_matcher=ot_matcher, - flow=flow, - time_sampler=time_sampler, - optimizer=optimizer, - ) - fm(data_loader, data_loader) From 85427bac1eda6e85c45b6e6347b4f3676c7cc3e7 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 6 Mar 2024 10:06:43 +0100 Subject: [PATCH 110/186] make genot training loops more similar to otfm training loop --- src/ott/neural/flow_models/genot.py | 120 ++++++++++++++-------------- 1 file changed, 60 insertions(+), 60 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index ba2c0f6a0..7291b8fdc 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -17,6 +17,7 @@ import jax import jax.numpy as jnp +import jax.tree_util as jtu import diffrax import optax @@ -43,7 +44,6 @@ class GENOTBase: input_dim: Dimension of the data in the source distribution. output_dim: Dimension of the data in the target distribution. cond_dim: Dimension of the conditioning variable. - iterations: Number of iterations. valid_freq: Frequency of validation. ot_solver: OT solver to match samples from the source and the target distribution. @@ -87,7 +87,6 @@ def __init__( input_dim: int, output_dim: int, cond_dim: int, - iterations: int, valid_freq: int, ot_matcher: base_solver.BaseOTMatcher, optimizer: optax.GradientTransformation, @@ -105,7 +104,6 @@ def __init__( rng = utils.default_prng_key(rng) self.rng = utils.default_prng_key(rng) - self.iterations = iterations self.valid_freq = valid_freq self.velocity_field = velocity_field self.state_velocity_field: Optional[train_state.TrainState] = None @@ -280,31 +278,33 @@ class GENOTLin(GENOTBase): neural solver for entropic (linear) OT problems. """ - def __call__(self, train_loader, valid_loader): - """Train GENOT. + def __call__( + self, + n_iters: int, + train_source, + train_target, + valid_source, + valid_target, + valid_freq: int = 5000, + rng: Optional[jax.Array] = None, + ): + """Train GENOTLin.""" + rng = utils.default_prng_key(rng) + training_logs = {"loss": []} - Args: - train_loader: Data loader for the training data. - valid_loader: Data loader for the validation data. - """ - iter = -1 - stop = False - while True: - for batch in train_loader: - iter += 1 - if iter >= self.iterations: - stop = True - break + for it in range(n_iters): + for batch_source, batch_target in zip(train_source, train_target): ( - self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, + rng, rng_resample, rng_noise, rng_time, rng_latent_data_match, rng_step_fn - ) = jax.random.split(self.rng, 6) - source, source_conditions, target = jnp.array( - batch["source_lin"] - ), jnp.array(batch["source_conditions"] - ) if "source_conditions" in batch else None, jnp.array( - batch["target_lin"] - ) + ) = jax.random.split(rng, 6) + + batch_source = jtu.tree_map(jnp.asarray, batch_source) + batch_target = jtu.tree_map(jnp.asarray, batch_target) + + source = batch_source["lin"] + source_conditions = batch_source.get("conditions", None) + target = batch_target["lin"] batch_size = len(source) n_samples = batch_size * self.k_samples_per_x @@ -358,10 +358,10 @@ def __call__(self, train_loader, valid_loader): tmat=tmat ) - if iter % self.valid_freq == 0: - self._valid_step(valid_loader, iter) - if stop: - break + training_logs["loss"].append(float(loss)) + + if it % valid_freq == 0: + self._valid_step(valid_source, valid_target, it) class GENOTQuad(GENOTBase): @@ -373,36 +373,36 @@ class GENOTQuad(GENOTBase): problems, respectively. """ - def __call__(self, train_loader, valid_loader): - """Train GENOT. - - Args: - train_loader: Data loader for the training data. - valid_loader: Data loader for the validation data. - """ - batch: Dict[str, jnp.array] = {} - iter = -1 - stop = False - while True: - for batch in train_loader: - iter += 1 - if iter >= self.iterations: - stop = True - break + def __call__( + self, + n_iters: int, + train_source, + train_target, + valid_source, + valid_target, + valid_freq: int = 5000, + rng: Optional[jax.Array] = None, + ): + """Train GENOTQuad.""" + rng = utils.default_prng_key(rng) + training_logs = {"loss": []} + for it in range(n_iters): + for batch_source, batch_target in zip(train_source, train_target): ( - self.rng, rng_time, rng_resample, rng_noise, rng_latent_data_match, + rng, rng_resample, rng_noise, rng_time, rng_latent_data_match, rng_step_fn - ) = jax.random.split(self.rng, 6) - (source_lin, source_quad, source_conditions, target_lin, - target_quad) = ( - jnp.array(batch["source_lin"]) if "source_lin" in batch else None, - jnp.array(batch["source_quad"]), - jnp.array(batch["source_conditions"]) - if "source_conditions" in batch else None, - jnp.array(batch["target_lin"]) if "target_lin" in batch else None, - jnp.array(batch["target_quad"]) - ) + ) = jax.random.split(rng, 6) + + batch_source = jtu.tree_map(jnp.asarray, batch_source) + batch_target = jtu.tree_map(jnp.asarray, batch_target) + + source_lin = batch_source.get("lin", None) + source_quad = batch_source["quad"] + source_conditions = batch_source.get("conditions", None) + target_lin = batch_target.get("lin", None) + target_quad = batch_target["quad"] + batch_size = len(source_quad) n_samples = batch_size * self.k_samples_per_x time = self.time_sampler(rng_time, n_samples) @@ -463,7 +463,7 @@ def __call__(self, train_loader, valid_loader): condition=source_conditions, tmat=tmat ) - if iter % self.valid_freq == 0: - self._valid_step(valid_loader, iter) - if stop: - break + training_logs["loss"].append(float(loss)) + + if it % valid_freq == 0: + self._valid_step(valid_source, valid_target, it) From 5a2424abaf3cee8d7b663a1ca782d49f7c8e30bc Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 6 Mar 2024 10:23:39 +0100 Subject: [PATCH 111/186] adapt tests to the extent possible --- tests/neural/genot_test.py | 44 ++++++++++++++++++++++---------------- tests/neural/otfm_test.py | 25 +++++++++++++--------- 2 files changed, 41 insertions(+), 28 deletions(-) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index c8d4a48f7..f938728c1 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -76,15 +76,18 @@ def test_genot_linear_unconditional( input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, - iterations=3, - valid_freq=2, ot_matcher=ot_matcher, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, matcher_latent_to_data=matcher_latent_to_data, ) - genot(genot_data_loader_linear, genot_data_loader_linear) + genot( + genot_data_loader_linear, + genot_data_loader_linear, + n_iters=2, + valid_freq=3 + ) batch = next(iter(genot_data_loader_linear)) result_forward = genot.transport( @@ -135,8 +138,6 @@ def test_genot_linear_conditional( input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, - iterations=3, - valid_freq=2, ot_matcher=ot_matcher, optimizer=optimizer, time_sampler=time_sampler, @@ -145,7 +146,9 @@ def test_genot_linear_conditional( ) genot( genot_data_loader_linear_conditional, - genot_data_loader_linear_conditional + genot_data_loader_linear_conditional, + n_iters=2, + valid_freq=3 ) result_forward = genot.transport( source_lin, condition=source_conditions, forward=True @@ -199,15 +202,15 @@ def test_genot_quad_unconditional( input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, - iterations=3, - valid_freq=2, ot_matcher=ot_matcher, optimizer=optimizer, time_sampler=time_sampler, k_samples_per_x=k_samples_per_x, matcher_latent_to_data=matcher_latent_to_data, ) - genot(genot_data_loader_quad, genot_data_loader_quad) + genot( + genot_data_loader_quad, genot_data_loader_quad, n_iters=2, valid_freq=3 + ) result_forward = genot.transport( source_quad, condition=source_conditions, forward=True @@ -259,14 +262,17 @@ def test_genot_fused_unconditional( input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, - iterations=3, - valid_freq=2, ot_matcher=ot_matcher, optimizer=optimizer, k_samples_per_x=k_samples_per_x, matcher_latent_to_data=matcher_latent_to_data, ) - genot(genot_data_loader_fused, genot_data_loader_fused) + genot( + genot_data_loader_fused, + genot_data_loader_fused, + n_iters=2, + valid_freq=3 + ) result_forward = genot.transport( jnp.concatenate((source_lin, source_quad), axis=1), @@ -320,8 +326,6 @@ def test_genot_quad_conditional( input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, - iterations=3, - valid_freq=2, ot_matcher=ot_matcher, optimizer=optimizer, time_sampler=time_sampler, @@ -329,7 +333,10 @@ def test_genot_quad_conditional( matcher_latent_to_data=matcher_latent_to_data, ) genot( - genot_data_loader_quad_conditional, genot_data_loader_quad_conditional + genot_data_loader_quad_conditional, + genot_data_loader_quad_conditional, + n_iters=2, + valid_freq=3 ) result_forward = genot.transport( @@ -385,8 +392,6 @@ def test_genot_fused_conditional( input_dim=source_dim, output_dim=target_dim, cond_dim=condition_dim, - iterations=3, - valid_freq=2, ot_matcher=ot_matcher, optimizer=optimizer, time_sampler=time_sampler, @@ -394,7 +399,10 @@ def test_genot_fused_conditional( matcher_latent_to_data=matcher_latent_to_data, ) genot( - genot_data_loader_fused_conditional, genot_data_loader_fused_conditional + genot_data_loader_fused_conditional, + genot_data_loader_fused_conditional, + n_iters=2, + valid_freq=3 ) result_forward = genot.transport( diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index f43403054..14af037db 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -53,16 +53,18 @@ def test_flow_matching_unconditional( neural_vf, input_dim=input_dim, cond_dim=condition_dim, - iterations=3, - valid_freq=2, ot_matcher=ot_matcher, flow=flow, time_sampler=time_sampler, optimizer=optimizer, ) fm( - data_loaders_gaussian[0], data_loaders_gaussian[1], - data_loaders_gaussian[0], data_loaders_gaussian[1] + data_loaders_gaussian[0], + data_loaders_gaussian[1], + data_loaders_gaussian[0], + data_loaders_gaussian[1], + n_iters=2, + valid_freq=3 ) batch_src = next(iter(data_loaders_gaussian[0])) @@ -110,8 +112,6 @@ def test_flow_matching_with_conditions( neural_vf, input_dim=2, cond_dim=1, - iterations=3, - valid_freq=2, ot_matcher=ot_matcher, flow=flow, time_sampler=time_sampler, @@ -119,7 +119,9 @@ def test_flow_matching_with_conditions( ) fm( data_loader_gaussian_with_conditions, - data_loader_gaussian_with_conditions + data_loader_gaussian_with_conditions, + n_iters=2, + valid_freq=3 ) batch = next(iter(data_loader_gaussian_with_conditions)) @@ -170,14 +172,17 @@ def test_flow_matching_conditional( neural_vf, input_dim=dim, cond_dim=condition_dim, - iterations=3, - valid_freq=2, ot_matcher=ot_matcher, flow=flow, time_sampler=time_sampler, optimizer=optimizer, ) - fm(data_loader_gaussian_conditional, data_loader_gaussian_conditional) + fm( + data_loader_gaussian_conditional, + data_loader_gaussian_conditional, + n_iters=2, + valid_freq=3 + ) batch = next(iter(data_loader_gaussian_conditional)) source = jnp.asarray(batch["source_lin"]) From c4a187e014b9cfd2af087499efef50c96d24c028 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 11 Mar 2024 19:28:25 +0100 Subject: [PATCH 112/186] Add weights to sampling --- src/ott/neural/data/datasets.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index 5a12ed2c0..d13237cc9 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Optional +from typing import Dict, Iterable, Optional import jax.tree_util as jtu import numpy as np @@ -65,21 +65,29 @@ class ConditionalOTDataset: Args: datasets: Datasets to sample from. + weights: TODO. seed: Random seed. """ def __init__( self, - # TODO(michalk8): allow for dict with weights - datasets: List[OTDataset], + datasets: Iterable[OTDataset], + weights: Iterable[float] = None, seed: Optional[int] = None, ): self.datasets = tuple(datasets) - self._rng = np.random.default_rng(seed=seed) + + if weights is None: + weights = np.ones(len(self.datasets)) + weights = np.asarray(weights) + self.weights = weights / np.sum(weights) + assert len(self.weights) == len(self.datasets), "TODO" + + self._rng = np.random.default_rng(seed) self._iterators = () def __next__(self) -> Dict[str, np.ndarray]: - idx = self._rng.choice(len(self._iterators)) + idx = self._rng.choice(len(self._iterators), p=self.weights) return next(self._iterators[idx]) def __iter__(self) -> "ConditionalOTDataset": From 30f2324ed251d52b3f5a94308bc16ab2865fdfa0 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 11 Mar 2024 19:58:41 +0100 Subject: [PATCH 113/186] Start cleaning matchers --- src/ott/neural/flow_models/otfm.py | 33 ++++++-------- src/ott/neural/flow_models/utils.py | 36 ++++++++++++++++ tests/neural/otfm_test.py | 67 +++++++++++------------------ 3 files changed, 73 insertions(+), 63 deletions(-) create mode 100644 src/ott/neural/flow_models/utils.py diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index e426a6f5c..699bf2960 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -24,7 +24,7 @@ from ott import utils from ott.neural.flow_models import flows, models -from ott.neural.models import base_solver +from ott.neural.flow_models.utils import sample_joint __all__ = ["OTFlowMatching"] @@ -40,7 +40,7 @@ class OTFlowMatching: flow: Flow between source and target distribution. time_sampler: Sampler for the time. optimizer: Optimizer for the velocity field's parameters. - ot_matcher: TODO. + match_fn: TODO. rng: Random number generator. """ @@ -53,7 +53,8 @@ def __init__( flow: flows.BaseFlow, time_sampler: Callable[[jax.Array, int], jnp.ndarray], optimizer: optax.GradientTransformation, - ot_matcher: Optional[base_solver.OTMatcherLinear] = None, + match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], + jnp.ndarray]] = None, rng: Optional[jax.Array] = None, ): rng = utils.default_prng_key(rng) @@ -62,7 +63,7 @@ def __init__( self.vf = velocity_field self.flow = flow self.time_sampler = time_sampler - self.ot_matcher = ot_matcher + self.match_fn = match_fn self.optimizer = optimizer self.vf_state = self.vf.create_train_state( @@ -113,15 +114,12 @@ def __call__( # noqa: D102 n_iters: int, train_source, train_target, - valid_source, - valid_target, - valid_freq: int = 5000, rng: Optional[jax.Array] = None, ) -> Dict[str, Any]: rng = utils.default_prng_key(rng) training_logs = {"loss": []} - for it in range(n_iters): + for _ in range(n_iters): for batch_source, batch_target in zip(train_source, train_target): rng, rng_resample, rng_step_fn = jax.random.split(rng, 3) @@ -132,22 +130,18 @@ def __call__( # noqa: D102 source_conditions = batch_source.get("conditions", None) target = batch_target["lin"] - if self.ot_matcher is not None: - tmat = self.ot_matcher.match_fn(source, target) - (source, source_conditions), (target,) = self.ot_matcher.sample_joint( - rng_resample, tmat, (source, source_conditions), (target,) - ) - else: - tmat = None + if self.match_fn is not None: + tmat = self.match_fn(source, target) + src_ixs, tgt_ixs = sample_joint(rng_resample, tmat) + source, target = source[src_ixs], target[tgt_ixs] + if source_conditions is not None: + source_conditions = source_conditions[src_ixs] self.vf_state, loss = self.step_fn( rng_step_fn, self.vf_state, source, target, source_conditions ) training_logs["loss"].append(float(loss)) - if it % valid_freq == 0: - self._valid_step(valid_source, valid_target, it) - return training_logs def transport( @@ -203,6 +197,3 @@ def solve_ode(x: jnp.ndarray, cond: Optional[jnp.ndarray]) -> jnp.ndarray: in_axes = [0, None if condition is None else 0] return jax.jit(jax.vmap(solve_ode, in_axes))(x, condition) - - def _valid_step(self, it: int, valid_source, valid_target) -> None: - pass diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py new file mode 100644 index 000000000..797370dce --- /dev/null +++ b/src/ott/neural/flow_models/utils.py @@ -0,0 +1,36 @@ +from typing import Any, Optional, Tuple + +import jax +import jax.numpy as jnp + +from ott.geometry import costs, pointcloud +from ott.solvers import linear + +__all__ = ["match_linear", "sample_joint"] + + +def match_linear( + x: jnp.ndarray, + y: jnp.ndarray, + cost_fn: Optional[costs.CostFn] = None, + epsilon: Optional[float] = None, + # TODO(michalk8): expose rest of the geom arguments + **kwargs: Any +) -> jnp.ndarray: + """TODO.""" + geom = pointcloud.PointCloud(x, y, cost_fn=cost_fn, epsilon=epsilon) + out = linear.solve(geom, **kwargs) + return out.matrix + + +def sample_joint(rng: jax.Array, + tmat: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: + """TODO.""" + n, m = tmat.shape + tmat_flattened = tmat.flatten() + indices = jax.random.choice( + rng, len(tmat_flattened), p=tmat_flattened, shape=[n] + ) + src_ixs = indices // m + tgt_ixs = indices % m + return src_ixs, tgt_ixs diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 14af037db..01042ec39 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -20,7 +20,7 @@ import optax -from ott.neural.flow_models import flows, models, otfm, samplers +from ott.neural.flow_models import flows, models, otfm, samplers, utils from ott.neural.models import base_solver from ott.solvers.linear import sinkhorn, sinkhorn_lr @@ -31,42 +31,35 @@ class TestOTFlowMatching: "flow", [ flows.ConstantNoiseFlow(0.0), flows.ConstantNoiseFlow(1.0), - flows.BrownianNoiseFlow(0.2) + flows.BrownianNoiseFlow(0.2), ] ) def test_flow_matching_unconditional( - self, data_loaders_gaussian, flow: Type[flows.BaseFlow] + self, data_loaders_gaussian, flow: flows.BaseFlow ): input_dim = 2 - condition_dim = 0 neural_vf = models.VelocityField( output_dim=2, condition_dim=0, latent_embed_dim=5, ) - ot_solver = sinkhorn.Sinkhorn() - ot_matcher = base_solver.OTMatcherLinear(ot_solver) - time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) fm = otfm.OTFlowMatching( + input_dim, neural_vf, - input_dim=input_dim, - cond_dim=condition_dim, - ot_matcher=ot_matcher, flow=flow, - time_sampler=time_sampler, + time_sampler=samplers.uniform_sampler, + match_fn=utils.match_linear, optimizer=optimizer, ) - fm( - data_loaders_gaussian[0], - data_loaders_gaussian[1], - data_loaders_gaussian[0], - data_loaders_gaussian[1], + _ = fm( n_iters=2, - valid_freq=3 + train_source=data_loaders_gaussian[0], + train_target=data_loaders_gaussian[1], ) + # TODO(michalk8): nicer batch_src = next(iter(data_loaders_gaussian[0])) source = jnp.asarray(batch_src["lin"]) batch_tgt = next(iter(data_loaders_gaussian[1])) @@ -74,16 +67,14 @@ def test_flow_matching_unconditional( source_conditions = jnp.asarray( batch_src["conditions"] ) if "conditions" in batch_src else None - result_forward = fm.transport( - source, condition=source_conditions, forward=True - ) - assert isinstance(result_forward, jnp.ndarray) + + result_forward = fm.transport(source, condition=source_conditions) + # TODO(michalk8): better condition assert jnp.sum(jnp.isnan(result_forward)) == 0 result_backward = fm.transport( - target, condition=source_conditions, forward=False + target, condition=source_conditions, t0=1.0, t1=0.0 ) - assert isinstance(result_backward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_backward)) == 0 @pytest.mark.parametrize( @@ -94,34 +85,29 @@ def test_flow_matching_unconditional( ] ) def test_flow_matching_with_conditions( - self, data_loader_gaussian_with_conditions, flow: Type[flows.BaseFlow] + self, data_loader_gaussian_with_conditions, flow: flows.BaseFlow ): - input_dim = 2 - condition_dim = 1 + input_dim, cond_dim = 2, 1 neural_vf = models.VelocityField( output_dim=input_dim, - condition_dim=condition_dim, + condition_dim=cond_dim, latent_embed_dim=5, ) - ot_solver = sinkhorn.Sinkhorn() - ot_matcher = base_solver.OTMatcherLinear(ot_solver) time_sampler = functools.partial(samplers.uniform_sampler, offset=1e-5) optimizer = optax.adam(learning_rate=1e-3) fm = otfm.OTFlowMatching( + 2, neural_vf, - input_dim=2, - cond_dim=1, - ot_matcher=ot_matcher, + match_fn=utils.match_linear, flow=flow, time_sampler=time_sampler, optimizer=optimizer, ) - fm( - data_loader_gaussian_with_conditions, - data_loader_gaussian_with_conditions, + _ = fm( n_iters=2, - valid_freq=3 + train_source=data_loader_gaussian_with_conditions, + train_target=data_loader_gaussian_with_conditions, ) batch = next(iter(data_loader_gaussian_with_conditions)) @@ -130,16 +116,13 @@ def test_flow_matching_with_conditions( source_conditions = jnp.asarray(batch["source_conditions"]) if len( batch["source_conditions"] ) > 0 else None - result_forward = fm.transport( - source, condition=source_conditions, forward=True - ) - assert isinstance(result_forward, jnp.ndarray) + + result_forward = fm.transport(source, condition=source_conditions) assert jnp.sum(jnp.isnan(result_forward)) == 0 result_backward = fm.transport( - target, condition=source_conditions, forward=False + target, condition=source_conditions, t0=1.0, t1=0.0 ) - assert isinstance(result_backward, jnp.ndarray) assert jnp.sum(jnp.isnan(result_backward)) == 0 @pytest.mark.parametrize( From 82bc7e687eb23cc98a8ce0e3d1f95494db6b7fb5 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 11 Mar 2024 20:47:18 +0100 Subject: [PATCH 114/186] Add conditional sampling + resampling --- src/ott/neural/flow_models/otfm.py | 9 +++--- src/ott/neural/flow_models/utils.py | 44 ++++++++++++++++++++++++++++- 2 files changed, 48 insertions(+), 5 deletions(-) diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index 699bf2960..a1f60f849 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -24,7 +24,7 @@ from ott import utils from ott.neural.flow_models import flows, models -from ott.neural.flow_models.utils import sample_joint +from ott.neural.flow_models.utils import resample_data, sample_joint __all__ = ["OTFlowMatching"] @@ -133,9 +133,10 @@ def __call__( # noqa: D102 if self.match_fn is not None: tmat = self.match_fn(source, target) src_ixs, tgt_ixs = sample_joint(rng_resample, tmat) - source, target = source[src_ixs], target[tgt_ixs] - if source_conditions is not None: - source_conditions = source_conditions[src_ixs] + source, source_conditions = resample_data( + source, source_conditions, ixs=src_ixs + ) + target = resample_data(target, ixs=tgt_ixs) self.vf_state, loss = self.step_fn( rng_step_fn, self.vf_state, source, target, source_conditions diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index 797370dce..25598c62e 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -2,11 +2,14 @@ import jax import jax.numpy as jnp +import jax.tree_util as jtu from ott.geometry import costs, pointcloud from ott.solvers import linear -__all__ = ["match_linear", "sample_joint"] +__all__ = [ + "match_linear", "sample_joint", "sample_conditional", "resample_data" +] def match_linear( @@ -34,3 +37,42 @@ def sample_joint(rng: jax.Array, src_ixs = indices // m tgt_ixs = indices % m return src_ixs, tgt_ixs + + +def sample_conditional( + rng: jax.Array, + tmat: jnp.ndarray, + *, + k: int = 1, + uniform_marginals: bool = False, +) -> Tuple[jnp.ndarray, jnp.ndarray]: + """TODO.""" + assert k > 0, "Number of samples per row must be positive." + n, m = tmat.shape + + if uniform_marginals: + indices = jnp.arange(n) + else: + src_marginals = tmat.sum(axis=1) + rng, rng_ixs = jax.random.split(rng, 2) + indices = jax.random.choice( + rng_ixs, a=n, p=src_marginals, shape=(len(src_marginals),) + ) + tmat = tmat[indices] + + tgt_ixs = jax.vmap( + lambda row: jax.random.choice(rng, a=m, p=row, shape=(k,)) + )(tmat) # (m, k) + + src_ixs = jnp.repeat(indices[:, None], k, axis=1) # (n, k) + return src_ixs, tgt_ixs + + +def resample_data(*data: Optional[jnp.ndarray], + ixs: jnp.ndarray) -> Tuple[Optional[jnp.ndarray], ...]: + """TODO.""" + if ixs.ndim == 2: + ixs = ixs.reshape(-1) + assert ixs.ndim == 1, ixs.shape + data = jtu.tree_map(lambda arr: None if arr is None else arr[ixs], data) + return data[0] if len(data) == 1 else data From f430c2937112ef86e50438465cf8d41ad74c5ed1 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 11 Mar 2024 20:54:07 +0100 Subject: [PATCH 115/186] Add initial quad matcher --- src/ott/neural/flow_models/utils.py | 36 +++++++++++++++++++++++++---- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index 25598c62e..6e1c01f17 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -5,10 +5,14 @@ import jax.tree_util as jtu from ott.geometry import costs, pointcloud -from ott.solvers import linear +from ott.solvers import linear, quadratic __all__ = [ - "match_linear", "sample_joint", "sample_conditional", "resample_data" + "match_linear", + "match_quadratic", + "sample_joint", + "sample_conditional", + "resample_data", ] @@ -17,15 +21,39 @@ def match_linear( y: jnp.ndarray, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, - # TODO(michalk8): expose rest of the geom arguments + # TODO(michalk8): type this correctly + scale_cost: float = 1.0, **kwargs: Any ) -> jnp.ndarray: """TODO.""" - geom = pointcloud.PointCloud(x, y, cost_fn=cost_fn, epsilon=epsilon) + geom = pointcloud.PointCloud( + x, y, cost_fn=cost_fn, epsilon=epsilon, scale_cost=scale_cost + ) out = linear.solve(geom, **kwargs) return out.matrix +def match_quadratic( + xx: jnp.ndarray, + yy: jnp.ndarray, + xy: Optional[jnp.ndarray] = None, + # TODO(michalk8): expose for all the costs + scale_cost: float = 1.0, + cost_fn: Optional[costs.CostFn] = None, + **kwargs: Any +) -> jnp.ndarray: + """TODO.""" + geom_xx = pointcloud.PointCloud(xx, cost_fn=cost_fn, scale_cost=scale_cost) + geom_yy = pointcloud.PointCloud(yy, cost_fn=cost_fn, scale_cost=scale_cost) + if xy is None: + geom_xy = None + else: + geom_xy = pointcloud.PointCloud(xy, cost_fn=cost_fn, scale_cost=scale_cost) + + out = quadratic.solve(geom_xx, geom_yy, geom_xy, **kwargs) + return out.matrix + + def sample_joint(rng: jax.Array, tmat: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: """TODO.""" From 4b41f0ccbc7d60e9bdd96bc540d957de38d05712 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 11 Mar 2024 20:54:54 +0100 Subject: [PATCH 116/186] Improve typing --- src/ott/neural/flow_models/utils.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index 6e1c01f17..d6d023266 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -1,4 +1,4 @@ -from typing import Any, Optional, Tuple +from typing import Any, Literal, Optional, Tuple, Union import jax import jax.numpy as jnp @@ -15,14 +15,15 @@ "resample_data", ] +ScaleCost_t = Union[float, Literal["mean", "max_cost", "median"]] + def match_linear( x: jnp.ndarray, y: jnp.ndarray, cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, - # TODO(michalk8): type this correctly - scale_cost: float = 1.0, + scale_cost: ScaleCost_t = 1.0, **kwargs: Any ) -> jnp.ndarray: """TODO.""" @@ -38,7 +39,7 @@ def match_quadratic( yy: jnp.ndarray, xy: Optional[jnp.ndarray] = None, # TODO(michalk8): expose for all the costs - scale_cost: float = 1.0, + scale_cost: ScaleCost_t = 1.0, cost_fn: Optional[costs.CostFn] = None, **kwargs: Any ) -> jnp.ndarray: From cc2746b5de1377028f83bec3c69c3663ff0aaba7 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 11 Mar 2024 21:02:06 +0100 Subject: [PATCH 117/186] Remove `base_solver.py` --- src/ott/neural/flow_models/genot.py | 7 +- src/ott/neural/models/__init__.py | 2 +- src/ott/neural/models/base_solver.py | 308 --------------------------- tests/neural/otfm_test.py | 7 +- 4 files changed, 7 insertions(+), 317 deletions(-) delete mode 100644 src/ott/neural/models/base_solver.py diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 7291b8fdc..88be0da38 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -25,7 +25,6 @@ from ott import utils from ott.neural.flow_models import flows, samplers -from ott.neural.models import base_solver __all__ = ["GENOTBase", "GENOTLin", "GENOTQuad"] @@ -88,13 +87,15 @@ def __init__( output_dim: int, cond_dim: int, valid_freq: int, - ot_matcher: base_solver.BaseOTMatcher, + # TODO(michalk8) + ot_matcher: Any, optimizer: optax.GradientTransformation, flow: Type[flows.BaseFlow] = flows.ConstantNoiseFlow(0.0), # noqa: B008 time_sampler: Callable[[jax.Array, int], jnp.ndarray] = samplers.uniform_sampler, k_samples_per_x: int = 1, - matcher_latent_to_data: Optional[base_solver.OTMatcherLinear] = None, + # TODO(michalk8) + matcher_latent_to_data: Optional[Callable] = None, kwargs_solver_latent_to_data: Dict[str, Any] = types.MappingProxyType({}), fused_penalty: float = 0.0, callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], diff --git a/src/ott/neural/models/__init__.py b/src/ott/neural/models/__init__.py index ba39ae8b4..83287aec5 100644 --- a/src/ott/neural/models/__init__.py +++ b/src/ott/neural/models/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import base_solver, layers, nets +from . import layers, nets diff --git a/src/ott/neural/models/base_solver.py b/src/ott/neural/models/base_solver.py deleted file mode 100644 index 5ddfd5ef5..000000000 --- a/src/ott/neural/models/base_solver.py +++ /dev/null @@ -1,308 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Any, Callable, Dict, Literal, Mapping, Optional, Tuple, Union - -import jax -import jax.numpy as jnp -from jax import tree_util - -from ott.geometry import costs, pointcloud -from ott.problems.linear import linear_problem -from ott.problems.quadratic import quadratic_problem -from ott.solvers.linear import sinkhorn -from ott.solvers.quadratic import gromov_wasserstein, gromov_wasserstein_lr - -ScaleCost_t = Union[int, float, Literal["mean", "max_cost", "median"]] -ScaleCostQuad_t = Union[ScaleCost_t, Dict[str, ScaleCost_t]] - -__all__ = [ - "BaseOTMatcher", - "OTMatcherLinear", - "OTMatcherQuad", -] - - -def _get_sinkhorn_match_fn( - ot_solver: Any, - epsilon: float = 1e-2, - cost_fn: Optional[costs.CostFn] = None, - scale_cost: ScaleCost_t = 1.0, - tau_a: float = 1.0, - tau_b: float = 1.0, -) -> Callable: - - @jax.jit - def match_pairs(x: jnp.ndarray, y: jnp.ndarray) -> jnp.ndarray: - geom = pointcloud.PointCloud( - x, y, epsilon=epsilon, scale_cost=scale_cost, cost_fn=cost_fn - ) - return ot_solver( - linear_problem.LinearProblem(geom, tau_a=tau_a, tau_b=tau_b) - ) - - return match_pairs - - -def _get_gromov_match_fn( - ot_solver: Any, - cost_fn: Union[Any, Mapping[str, Any]], - scale_cost: ScaleCostQuad_t, - tau_a: float, - tau_b: float, - fused_penalty: float, -) -> Callable: - if isinstance(cost_fn, Mapping): - assert "cost_fn_xx" in cost_fn - assert "cost_fn_yy" in cost_fn - cost_fn_xx = cost_fn["cost_fn_xx"] - cost_fn_yy = cost_fn["cost_fn_yy"] - if fused_penalty > 0: - assert "cost_fn_xy" in cost_fn_xx - cost_fn_xy = cost_fn["cost_fn_xy"] - else: - cost_fn_xx = cost_fn_yy = cost_fn_xy = cost_fn - - if isinstance(scale_cost, Mapping): - assert "scale_cost_xx" in scale_cost - assert "scale_cost_yy" in scale_cost - scale_cost_xx = scale_cost["scale_cost_xx"] - scale_cost_yy = scale_cost["scale_cost_yy"] - if fused_penalty > 0: - assert "scale_cost_xy" in scale_cost - scale_cost_xy = cost_fn["scale_cost_xy"] - else: - scale_cost_xx = scale_cost_yy = scale_cost_xy = scale_cost - - @jax.jit - def match_pairs( - x_quad: jnp.ndarray, - y_quad: jnp.ndarray, - x_lin: Optional[jnp.ndarray], - y_lin: Optional[jnp.ndarray], - ) -> jnp.ndarray: - geom_xx = pointcloud.PointCloud( - x=x_quad, y=x_quad, cost_fn=cost_fn_xx, scale_cost=scale_cost_xx - ) - geom_yy = pointcloud.PointCloud( - x=y_quad, y=y_quad, cost_fn=cost_fn_yy, scale_cost=scale_cost_yy - ) - if fused_penalty > 0: - geom_xy = pointcloud.PointCloud( - x=x_lin, y=y_lin, cost_fn=cost_fn_xy, scale_cost=scale_cost_xy - ) - else: - geom_xy = None - prob = quadratic_problem.QuadraticProblem( - geom_xx, - geom_yy, - geom_xy, - fused_penalty=fused_penalty, - tau_a=tau_a, - tau_b=tau_b - ) - return ot_solver(prob) - - return match_pairs - - -class BaseOTMatcher: - """Base class for mini-batch neural OT matching classes.""" - - def sample_joint( - self, - rng: jax.Array, - joint_dist: jnp.ndarray, - source_arrays: Tuple[Optional[jnp.ndarray], ...], - target_arrays: Tuple[Optional[jnp.ndarray], ...], - ) -> Tuple[jnp.ndarray, ...]: - """Resample from arrays according to discrete joint distribution. - - Args: - rng: Random number generator. - joint_dist: Joint distribution between source and target to sample from. - source_arrays: Arrays corresponding to source distriubution to sample - from. - target_arrays: Arrays corresponding to target arrays to sample from. - - Returns: - Resampled source and target arrays. - """ - _, n_tgt = joint_dist.shape - tmat_flattened = joint_dist.flatten() - indices = jax.random.choice( - rng, len(tmat_flattened), p=tmat_flattened, shape=[joint_dist.shape[0]] - ) - indices_source = indices // n_tgt - indices_target = indices % n_tgt - return tree_util.tree_map(lambda b: b[indices_source], - source_arrays), tree_util.tree_map( - lambda b: b[indices_target], target_arrays - ) - - def sample_conditional_indices_from_tmap( - self, - rng: jax.Array, - conditional_distributions: jnp.ndarray, - *, - k_samples_per_x: int, - source_arrays: Tuple[Optional[jnp.ndarray], ...], - target_arrays: Tuple[Optional[jnp.ndarray], ...], - source_is_balanced: bool, - ) -> Tuple[jnp.ndarray, ...]: - """Sample from arrays according to discrete conditional distributions. - - Args: - rng: Random number generator. - conditional_distributions: Conditional distributions to sample from. - k_samples_per_x: Expectation of number of samples to draw from each - conditional distribution. - source_arrays: Arrays corresponding to source distriubution to sample - from. - target_arrays: Arrays corresponding to target arrays to sample from. - source_is_balanced: Whether the source distribution is balanced. - If :obj:`False`, the number of samples drawn from each conditional - distribution `k_samples_per_x` is proportional to the left marginals. - - Returns: - Resampled source and target arrays. - """ - n_src, n_tgt = conditional_distributions.shape - left_marginals = conditional_distributions.sum(axis=1) - if not source_is_balanced: - rng, rng_2 = jax.random.split(rng, 2) - indices = jax.random.choice( - key=rng_2, - a=jnp.arange(len(left_marginals)), - p=left_marginals, - shape=(len(left_marginals),) - ) - else: - indices = jnp.arange(n_src) - tmat_adapted = conditional_distributions[indices] - indices_per_row = jax.vmap( - lambda row: jax.random. - choice(key=rng, a=n_tgt, p=row, shape=(k_samples_per_x,)), - in_axes=0, - out_axes=0, - )( - tmat_adapted - ) - - indices_source = jnp.repeat(indices, k_samples_per_x) - indices_target = jnp.reshape( - indices_per_row % n_tgt, (n_src * k_samples_per_x,) - ) - return tree_util.tree_map( - lambda b: jnp. - reshape(b[indices_source], - (k_samples_per_x, n_src, *b.shape[1:])), source_arrays - ), tree_util.tree_map( - lambda b: jnp. - reshape(b[indices_target], - (k_samples_per_x, n_src, *b.shape[1:])), target_arrays - ) - - -class OTMatcherLinear(BaseOTMatcher): - """Class for mini-batch OT in neural optimal transport solvers. - - Args: - ot_solver: OT solver to match samples from the source and the target - distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. - If :obj:`None`, no matching will be performed as proposed in - :cite:`lipman:22`. - """ - - def __init__( - self, - ot_solver: sinkhorn.Sinkhorn, - epsilon: float = 1e-2, - cost_fn: Optional[costs.CostFn] = None, - scale_cost: ScaleCost_t = 1.0, - tau_a: float = 1.0, - tau_b: float = 1.0, - ) -> None: - - if isinstance( - ot_solver, gromov_wasserstein.GromovWasserstein - ) and epsilon is not None: - raise ValueError( - "If `ot_solver` is `GromovWasserstein`, `epsilon` must be `None`. " + - "This check is performed to ensure that in the (fused) Gromov case " + - "the `epsilon` parameter is passed via the `ot_solver`." - ) - self.ot_solver = ot_solver - self.epsilon = epsilon - self.cost_fn = cost_fn - self.scale_cost = scale_cost - self.tau_a = tau_a - self.tau_b = tau_b - self.match_fn = None if ot_solver is None else self._get_sinkhorn_match_fn( - self.ot_solver, self.epsilon, self.cost_fn, self.scale_cost, self.tau_a, - self.tau_b - ) - - def _get_sinkhorn_match_fn(self, *args, **kwargs) -> jnp.ndarray: - fn = _get_sinkhorn_match_fn(*args, **kwargs) - - @jax.jit - def match_pairs(*args, **kwargs): - return fn(*args, **kwargs).matrix - - return match_pairs - - -class OTMatcherQuad(BaseOTMatcher): - """Class for mini-batch OT in neural optimal transport solvers. - - Args: - ot_solver: OT solver to match samples from the source and the target - distribution as proposed in :cite:`tong:23`, :cite:`pooladian:23`. - If :obj:`None`, no matching will be performed as proposed in - :cite:`lipman:22`. - """ - - def __init__( - self, - ot_solver: Union[gromov_wasserstein.GromovWasserstein, - gromov_wasserstein_lr.LRGromovWasserstein], - cost_fn: Optional[costs.CostFn] = None, - scale_cost: ScaleCostQuad_t = 1.0, - tau_a: float = 1.0, - tau_b: float = 1.0, - fused_penalty: float = 0.0, - ) -> None: - self.ot_solver = ot_solver - self.cost_fn = cost_fn - self.scale_cost = scale_cost - self.tau_a = tau_a - self.tau_b = tau_b - self.fused_penalty = fused_penalty - self.match_fn = self._get_gromov_match_fn( - self.ot_solver, - self.cost_fn, - self.scale_cost, - self.tau_a, - self.tau_b, - fused_penalty=self.fused_penalty - ) - - def _get_gromov_match_fn(self, *args, **kwargs) -> jnp.ndarray: - fn = _get_gromov_match_fn(*args, **kwargs) - - @jax.jit - def match_pairs(*args, **kwargs): - return fn(*args, **kwargs).matrix - - return match_pairs diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 01042ec39..f4c0fc1d6 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -21,8 +21,6 @@ import optax from ott.neural.flow_models import flows, models, otfm, samplers, utils -from ott.neural.models import base_solver -from ott.solvers.linear import sinkhorn, sinkhorn_lr class TestOTFlowMatching: @@ -145,9 +143,8 @@ def test_flow_matching_conditional( condition_dim=condition_dim, latent_embed_dim=5, ) - ot_solver = sinkhorn.Sinkhorn( - ) if solver == "sinkhorn" else sinkhorn_lr.LRSinkhorn() - ot_matcher = base_solver.OTMatcherLinear(ot_solver) + # TODO(michalk8): check for LR + ot_matcher = utils.match_linear time_sampler = samplers.uniform_sampler optimizer = optax.adam(learning_rate=1e-3) From 1068410a562dfa05501a9e2d2362795e462e301b Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 11 Mar 2024 21:08:21 +0100 Subject: [PATCH 118/186] Add TODO --- src/ott/neural/data/datasets.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index d13237cc9..9a73677c1 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -71,6 +71,7 @@ class ConditionalOTDataset: def __init__( self, + # TODO(michalk8): generalize the type datasets: Iterable[OTDataset], weights: Iterable[float] = None, seed: Optional[int] = None, From e5597402e1bcfc047063db906f53ac2e87ac2a6b Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 13 Mar 2024 14:04:38 +0100 Subject: [PATCH 119/186] Update datasets, fix OTFM tests --- docs/neural/data.rst | 2 +- src/ott/neural/data/datasets.py | 157 +++++++++++++++-------- src/ott/neural/flow_models/otfm.py | 48 +++---- tests/neural/conftest.py | 199 ++++++++++++++--------------- tests/neural/genot_test.py | 1 - tests/neural/otfm_test.py | 186 +++++++++------------------ 6 files changed, 289 insertions(+), 304 deletions(-) diff --git a/docs/neural/data.rst b/docs/neural/data.rst index 95f05f93f..25172dcd3 100644 --- a/docs/neural/data.rst +++ b/docs/neural/data.rst @@ -12,4 +12,4 @@ Datasets :toctree: _autosummary datasets.OTDataset - datasets.ConditionalOTDataset + datasets.ConditionalLoader diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index 9a73677c1..0a2067f25 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -11,86 +11,139 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, Iterable, Optional +import collections +import dataclasses +from typing import Any, Dict, Iterable, Optional, Sequence -import jax.tree_util as jtu import numpy as np -__all__ = ["OTDataset", "ConditionalOTDataset"] +__all__ = ["OTData", "OTDataset", "ConditionalLoader"] +Item_t = Dict[str, np.ndarray] -class OTDataset: - """Dataset for Optimal transport problems. - Args: - lin: Linear part of the measure. - quad: Quadratic part of the measure. - conditions: Conditions of the source measure. - """ +@dataclasses.dataclass(repr=False, frozen=True) +class OTData: + """TODO.""" + lin: Optional[np.ndarray] = None + quad: Optional[np.ndarray] = None + condition: Optional[np.ndarray] = None + + def __getitem__(self, ix: int) -> Item_t: + return {k: v[ix] for k, v in self.__dict__.items() if v is not None} + + def __len__(self) -> int: + if self.lin is not None: + return len(self.lin) + if self.quad is not None: + return len(self.quad) + return 0 + + +class OTDataset: + """TODO.""" + SRC_PREFIX = "src" + TGT_PREFIX = "tgt" def __init__( self, - lin: Optional[np.ndarray] = None, - quad: Optional[np.ndarray] = None, - conditions: Optional[np.ndarray] = None, + src_data: OTData, + tgt_data: OTData, + src_conditions: Optional[Sequence[Any]] = None, + tgt_conditions: Optional[Sequence[Any]] = None, + is_aligned: bool = False, + seed: Optional[int] = None ): - self.data = {} - if lin is not None: - self.data["lin"] = lin - if quad is not None: - self.data["quad"] = quad - if conditions is not None: - self.data["conditions"] = conditions - self._check_sizes() - - def _check_sizes(self) -> None: - sizes = {k: len(v) for k, v in self.data.items()} - if not len(set(sizes.values())) == 1: - raise ValueError(f"Not all arrays have the same size: {sizes}.") - - def __getitem__(self, idx: np.ndarray) -> Dict[str, np.ndarray]: - return jtu.tree_map(lambda x: x[idx], self.data) + self.src_data = src_data + self.tgt_data = tgt_data + + if src_conditions is None: + src_conditions = [None] * len(src_data) + self.src_conditions = list(src_conditions) + if tgt_conditions is None: + tgt_conditions = [None] * len(tgt_data) + self.tgt_conditions = list(tgt_conditions) + + self._tgt_cond_to_ix = collections.defaultdict(list) + for ix, cond in enumerate(tgt_conditions): + self._tgt_cond_to_ix[cond].append(ix) + + self.is_aligned = is_aligned + self._rng = np.random.default_rng(seed) + + self._verify_integriy() + + def _verify_integriy(self) -> None: + assert len(self.src_data) == len(self.src_conditions) + assert len(self.src_data) == len(self.tgt_conditions) + + if self.is_aligned: + assert len(self.src_data) == len(self.tgt_data) + assert self.src_conditions == self.tgt_conditions + else: + sym_diff = set(self.src_conditions + ).symmetric_difference(self.tgt_conditions) + assert not sym_diff, sym_diff + + def _sample_from_target(self, src_ix: int) -> Item_t: + src_cond = self.src_conditions[src_ix] + tgt_ixs = self._tgt_cond_to_ix[src_cond] + ix = self._rng.choice(tgt_ixs) + return self.src_data[ix] + + def __getitem__(self, ix: int) -> Item_t: + src = self.src_data[ix] + src = {f"{self.SRC_PREFIX}_{k}": v for k, v in src.items()} + + tgt = self.src_data[ix] if self.is_aligned else self._sample_from_target(ix) + tgt = {f"{self.TGT_PREFIX}_{k}": v for k, v in tgt.items()} + + return {**src, **tgt} def __len__(self) -> int: - for v in self.data.values(): - return len(v) - return 0 + return len(self.src_data) -# TODO(michalk8): rename -class ConditionalOTDataset: +class ConditionalLoader: """Dataset for OT problems with conditions. This data loader wraps several data loaders and samples from them. Args: datasets: Datasets to sample from. - weights: TODO. seed: Random seed. """ def __init__( self, - # TODO(michalk8): generalize the type datasets: Iterable[OTDataset], - weights: Iterable[float] = None, seed: Optional[int] = None, ): self.datasets = tuple(datasets) - - if weights is None: - weights = np.ones(len(self.datasets)) - weights = np.asarray(weights) - self.weights = weights / np.sum(weights) - assert len(self.weights) == len(self.datasets), "TODO" - self._rng = np.random.default_rng(seed) - self._iterators = () - - def __next__(self) -> Dict[str, np.ndarray]: - idx = self._rng.choice(len(self._iterators), p=self.weights) - return next(self._iterators[idx]) - - def __iter__(self) -> "ConditionalOTDataset": - self._iterators = tuple(iter(ds) for ds in self.datasets) + self._iterators = [] + self._it = 0 + + def __next__(self) -> Item_t: + if self._it == len(self): + raise StopIteration + + ix = self._rng.choice(len(self._iterators)) + iterator = self._iterators[ix] + try: + data = next(iterator) + # TODO(michalk8): improve the logic a bit + self._it += 1 + return data + except StopIteration: + self._iterators[ix] = iter(self.datasets[ix]) + if not self._iterators: + raise + + def __iter__(self) -> "ConditionalLoader": + self._iterators = [iter(ds) for ds in self.datasets] + self._it = 0 return self + + def __len__(self) -> int: + return max((len(ds) for ds in self.datasets), default=0) diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index a1f60f849..83d709a96 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -111,37 +111,39 @@ def loss_fn( # TODO(michalk8): refactor in the future PR to just do one step def __call__( # noqa: D102 self, + loader: Any, # TODO(michalk8): type it correctly + *, n_iters: int, - train_source, - train_target, rng: Optional[jax.Array] = None, ) -> Dict[str, Any]: rng = utils.default_prng_key(rng) training_logs = {"loss": []} - for _ in range(n_iters): - for batch_source, batch_target in zip(train_source, train_target): - rng, rng_resample, rng_step_fn = jax.random.split(rng, 3) + for batch in loader: + rng, rng_resample, rng_step_fn = jax.random.split(rng, 3) - batch_source = jtu.tree_map(jnp.asarray, batch_source) - batch_target = jtu.tree_map(jnp.asarray, batch_target) + batch = jtu.tree_map(jnp.asarray, batch) - source = batch_source["lin"] - source_conditions = batch_source.get("conditions", None) - target = batch_target["lin"] + src, tgt = batch["src_lin"], batch["tgt_lin"] + src_conds = batch.get("src_condition", None) - if self.match_fn is not None: - tmat = self.match_fn(source, target) - src_ixs, tgt_ixs = sample_joint(rng_resample, tmat) - source, source_conditions = resample_data( - source, source_conditions, ixs=src_ixs - ) - target = resample_data(target, ixs=tgt_ixs) + if self.match_fn is not None: + tmat = self.match_fn(src, tgt) + src_ixs, tgt_ixs = sample_joint(rng_resample, tmat) + src, src_conds = resample_data(src, src_conds, ixs=src_ixs) + tgt = resample_data(tgt, ixs=tgt_ixs) - self.vf_state, loss = self.step_fn( - rng_step_fn, self.vf_state, source, target, source_conditions - ) - training_logs["loss"].append(float(loss)) + self.vf_state, loss = self.step_fn( + rng_step_fn, + self.vf_state, + src, + tgt, + src_conds, + ) + + training_logs["loss"].append(float(loss)) + if len(training_logs["loss"]) >= n_iters: + break return training_logs @@ -159,8 +161,8 @@ def transport( parameterized by the velocity field. Args: - x: Initial condition of the ODE of shape `(batch_size, ...)`. - condition: Condition of the input data of shape `(batch_size, ...)`. + x: Initial condition of the ODE of shape ``[batch_size, ...]``. + condition: Condition of the input data of shape ``[batch_size, ...]``. t0: Starting point of integration. t1: End point of integration. kwargs: Keyword arguments for the ODE solver. diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index f5c48e924..04f9917a8 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Tuple +from typing import Optional, Union import pytest @@ -22,71 +22,73 @@ from ott.neural.data import datasets -@pytest.fixture(scope="module") -def data_loaders_gaussian() -> Tuple[DataLoader, DataLoader]: +def _ot_data( + rng: np.random.Generator, + *, + n: int = 100, + dim: int = 2, + condition: Optional[Union[float, np.ndarray]] = None, + cond_dim: Optional[int] = None, + offset: float = 0.0 +) -> datasets.OTData: + data = rng.normal(size=(n, dim)) + offset + + if isinstance(condition, float): + cond_dim = dim if cond_dim is None else cond_dim + condition = np.full((n, cond_dim), fill_value=condition) + + return datasets.OTData(lin=data, condition=condition) + + +@pytest.fixture() +def lin_dl() -> DataLoader: """Returns a data loader for a simple Gaussian mixture.""" - rng = np.random.default_rng(seed=0) - source = rng.normal(size=(100, 2)) - target = rng.normal(size=(100, 2)) + 1.0 - src_dataset = datasets.OTDataset(lin=source) - tgt_dataset = datasets.OTDataset(lin=target) - loader_src = DataLoader(src_dataset, batch_size=16, shuffle=True) - loader_tgt = DataLoader(tgt_dataset, batch_size=16, shuffle=True) - return loader_src, loader_tgt + n, d = 100, 2 + rng = np.random.default_rng(0) + src, tgt = _ot_data(rng, n=n, dim=d), _ot_data(rng, n=n, dim=d, offset=1.0) + ds = datasets.OTDataset(src, tgt) + return DataLoader(ds, batch_size=16, shuffle=True) -@pytest.fixture(scope="module") -def data_loader_gaussian_conditional(): - """Returns a data loader for Gaussian mixtures with conditions.""" - rng = np.random.default_rng(seed=0) - source_0 = rng.normal(size=(100, 2)) - target_0 = rng.normal(size=(100, 2)) + 2.0 +@pytest.fixture() +def lin_dl_with_conds() -> DataLoader: + n, d = 100, 2 + rng = np.random.default_rng(13) - source_1 = rng.normal(size=(100, 2)) - target_1 = rng.normal(size=(100, 2)) - 2.0 - ds0 = datasets.OTDataset( - lin=source_0, - target_lin=target_0, - conditions=np.zeros_like(source_0) * 0.0 - ) - ds1 = datasets.OTDataset( - lin=source_1, - target_lin=target_1, - conditions=np.ones_like(source_1) * 1.0 - ) - sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) - sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) - dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) - dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) + src_cond, tgt_cond = rng.normal(size=(n, 1)), rng.normal(size=(n, 1)) + src = _ot_data(rng, n=n, dim=d, condition=src_cond) + tgt = _ot_data(rng, n=n, dim=d, condition=tgt_cond) - return datasets.ConditionalOTDataset((dl0, dl1)) + ds = datasets.OTDataset(src, tgt) + return DataLoader(ds, batch_size=16, shuffle=True) -@pytest.fixture(scope="module") -def data_loader_gaussian_with_conditions(): - """Returns a data loader for a simple Gaussian mixture with conditions.""" - rng = np.random.default_rng(seed=0) - source = rng.normal(size=(100, 2)) - target = rng.normal(size=(100, 2)) + 1.0 - source_conditions = rng.normal(size=(100, 1)) - target_conditions = rng.normal(size=(100, 1)) - 1.0 +@pytest.fixture() +def conditional_lin_dl() -> datasets.ConditionalLoader: + rng = np.random.default_rng(42) - dataset = datasets.OTDataset( - lin=source, - target_lin=target, - conditions=source_conditions, - target_conditions=target_conditions - ) - return DataLoader(dataset, batch_size=16, shuffle=True) + src0, tgt0 = _ot_data(rng, condition=0.0), _ot_data(rng, offset=2.0) + src1, tgt1 = _ot_data(rng, condition=1.0), _ot_data(rng, offset=-2.0) + + src_ds = datasets.OTDataset(src0, tgt0) + tgt_ds = datasets.OTDataset(src1, tgt1) + + src_dl = DataLoader(src_ds, batch_size=16, shuffle=True) + tgt_dl = DataLoader(tgt_ds, batch_size=16, shuffle=True) + + return datasets.ConditionalLoader([src_dl, tgt_dl]) + + +# TODO(michalk8): refactor the below for GENOT @pytest.fixture(scope="module") def genot_data_loader_linear(): """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) - source = rng.normal(size=(100, 2)) - target = rng.normal(size=(100, 2)) + 1.0 - dataset = datasets.OTDataset(lin=source, target_lin=target) + src = rng.normal(size=(100, 2)) + tgt = rng.normal(size=(100, 2)) + 1.0 + dataset = datasets.OTDataset(lin=src, tgt_lin=tgt) return DataLoader(dataset, batch_size=16, shuffle=True) @@ -94,35 +96,31 @@ def genot_data_loader_linear(): def genot_data_loader_linear_conditional(): """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) - source_0 = rng.normal(size=(100, 2)) - target_0 = rng.normal(size=(100, 2)) + 1.0 - source_1 = rng.normal(size=(100, 2)) - target_1 = rng.normal(size=(100, 2)) + 1.0 + src_0 = rng.normal(size=(100, 2)) + tgt_0 = rng.normal(size=(100, 2)) + 1.0 + src_1 = rng.normal(size=(100, 2)) + tgt_1 = rng.normal(size=(100, 2)) + 1.0 ds0 = datasets.OTDataset( - lin=source_0, - target_lin=target_0, - conditions=np.zeros_like(source_0) * 0.0 + lin=src_0, tgt_lin=tgt_0, conditions=np.zeros_like(src_0) * 0.0 ) ds1 = datasets.OTDataset( - lin=source_1, - target_lin=target_1, - conditions=np.ones_like(source_1) * 1.0 + lin=src_1, tgt_lin=tgt_1, conditions=np.ones_like(src_1) * 1.0 ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) - return datasets.ConditionalOTDataset((dl0, dl1)) + return datasets.ConditionalLoader((dl0, dl1)) @pytest.fixture(scope="module") def genot_data_loader_quad(): """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) - source = rng.normal(size=(100, 2)) - target = rng.normal(size=(100, 1)) + 1.0 - dataset = datasets.OTDataset(quad=source, target_quad=target) + src = rng.normal(size=(100, 2)) + tgt = rng.normal(size=(100, 1)) + 1.0 + dataset = datasets.OTDataset(quad=src, tgt_quad=tgt) return DataLoader(dataset, batch_size=16, shuffle=True) @@ -130,41 +128,34 @@ def genot_data_loader_quad(): def genot_data_loader_quad_conditional(): """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) - source_0 = rng.normal(size=(100, 2)) - target_0 = rng.normal(size=(100, 1)) + 1.0 - source_1 = rng.normal(size=(100, 2)) - target_1 = rng.normal(size=(100, 1)) + 1.0 + src_0 = rng.normal(size=(100, 2)) + tgt_0 = rng.normal(size=(100, 1)) + 1.0 + src_1 = rng.normal(size=(100, 2)) + tgt_1 = rng.normal(size=(100, 1)) + 1.0 ds0 = datasets.OTDataset( - quad=source_0, - target_quad=target_0, - conditions=np.zeros_like(source_0) * 0.0 + quad=src_0, tgt_quad=tgt_0, conditions=np.zeros_like(src_0) * 0.0 ) ds1 = datasets.OTDataset( - quad=source_1, - target_quad=target_1, - conditions=np.ones_like(source_1) * 1.0 + quad=src_1, tgt_quad=tgt_1, conditions=np.ones_like(src_1) * 1.0 ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) - return datasets.ConditionalOTDataset((dl0, dl1)) + return datasets.ConditionalLoader((dl0, dl1)) @pytest.fixture(scope="module") def genot_data_loader_fused(): """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) - source_q = rng.normal(size=(100, 2)) - target_q = rng.normal(size=(100, 1)) + 1.0 - source_lin = rng.normal(size=(100, 2)) - target_lin = rng.normal(size=(100, 2)) + 1.0 + src_q = rng.normal(size=(100, 2)) + tgt_q = rng.normal(size=(100, 1)) + 1.0 + src_lin = rng.normal(size=(100, 2)) + tgt_lin = rng.normal(size=(100, 2)) + 1.0 dataset = datasets.OTDataset( - lin=source_lin, - quad=source_q, - target_lin=target_lin, - target_quad=target_q + lin=src_lin, quad=src_q, tgt_lin=tgt_lin, tgt_quad=tgt_q ) return DataLoader(dataset, batch_size=16, shuffle=True) @@ -173,32 +164,32 @@ def genot_data_loader_fused(): def genot_data_loader_fused_conditional(): """Returns a data loader for a simple Gaussian mixture.""" rng = np.random.default_rng(seed=0) - source_q_0 = rng.normal(size=(100, 2)) - target_q_0 = rng.normal(size=(100, 1)) + 1.0 - source_lin_0 = rng.normal(size=(100, 2)) - target_lin_0 = rng.normal(size=(100, 2)) + 1.0 + src_q_0 = rng.normal(size=(100, 2)) + tgt_q_0 = rng.normal(size=(100, 1)) + 1.0 + src_lin_0 = rng.normal(size=(100, 2)) + tgt_lin_0 = rng.normal(size=(100, 2)) + 1.0 - source_q_1 = 2 * rng.normal(size=(100, 2)) - target_q_1 = 2 * rng.normal(size=(100, 1)) + 1.0 - source_lin_1 = 2 * rng.normal(size=(100, 2)) - target_lin_1 = 2 * rng.normal(size=(100, 2)) + 1.0 + src_q_1 = 2 * rng.normal(size=(100, 2)) + tgt_q_1 = 2 * rng.normal(size=(100, 1)) + 1.0 + src_lin_1 = 2 * rng.normal(size=(100, 2)) + tgt_lin_1 = 2 * rng.normal(size=(100, 2)) + 1.0 ds0 = datasets.OTDataset( - lin=source_lin_0, - target_lin=target_lin_0, - quad=source_q_0, - target_quad=target_q_0, - conditions=np.zeros_like(source_lin_0) * 0.0 + lin=src_lin_0, + tgt_lin=tgt_lin_0, + quad=src_q_0, + tgt_quad=tgt_q_0, + conditions=np.zeros_like(src_lin_0) * 0.0 ) ds1 = datasets.OTDataset( - lin=source_lin_1, - target_lin=target_lin_1, - quad=source_q_1, - target_quad=target_q_1, - conditions=np.ones_like(source_lin_1) * 1.0 + lin=src_lin_1, + tgt_lin=tgt_lin_1, + quad=src_q_1, + tgt_quad=tgt_q_1, + conditions=np.ones_like(src_lin_1) * 1.0 ) sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) - return datasets.ConditionalOTDataset((dl0, dl1)) + return datasets.ConditionalLoader((dl0, dl1)) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index f938728c1..c60b2e064 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -24,7 +24,6 @@ from ott.neural.flow_models.genot import GENOTLin, GENOTQuad from ott.neural.flow_models.models import VelocityField from ott.neural.flow_models.samplers import uniform_sampler -from ott.neural.models import base_solver from ott.solvers.linear import sinkhorn, sinkhorn_lr from ott.solvers.quadratic import gromov_wasserstein, gromov_wasserstein_lr diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index f4c0fc1d6..df2fb4bdb 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -12,172 +12,112 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Literal, Type import pytest +import jax import jax.numpy as jnp +from torch.utils.data import DataLoader import optax +from ott.neural.data import datasets from ott.neural.flow_models import flows, models, otfm, samplers, utils class TestOTFlowMatching: - @pytest.mark.parametrize( - "flow", [ - flows.ConstantNoiseFlow(0.0), - flows.ConstantNoiseFlow(1.0), - flows.BrownianNoiseFlow(0.2), - ] - ) - def test_flow_matching_unconditional( - self, data_loaders_gaussian, flow: flows.BaseFlow - ): + def test_fm(self, lin_dl: DataLoader): input_dim = 2 neural_vf = models.VelocityField( output_dim=2, condition_dim=0, latent_embed_dim=5, ) - optimizer = optax.adam(learning_rate=1e-3) - fm = otfm.OTFlowMatching( input_dim, neural_vf, - flow=flow, + flow=flows.ConstantNoiseFlow(0.0), time_sampler=samplers.uniform_sampler, - match_fn=utils.match_linear, - optimizer=optimizer, - ) - _ = fm( - n_iters=2, - train_source=data_loaders_gaussian[0], - train_target=data_loaders_gaussian[1], + match_fn=jax.jit(utils.match_linear), + optimizer=optax.adam(learning_rate=1e-3), ) - # TODO(michalk8): nicer - batch_src = next(iter(data_loaders_gaussian[0])) - source = jnp.asarray(batch_src["lin"]) - batch_tgt = next(iter(data_loaders_gaussian[1])) - target = jnp.asarray(batch_tgt["lin"]) - source_conditions = jnp.asarray( - batch_src["conditions"] - ) if "conditions" in batch_src else None - - result_forward = fm.transport(source, condition=source_conditions) - # TODO(michalk8): better condition - assert jnp.sum(jnp.isnan(result_forward)) == 0 - - result_backward = fm.transport( - target, condition=source_conditions, t0=1.0, t1=0.0 - ) - assert jnp.sum(jnp.isnan(result_backward)) == 0 - - @pytest.mark.parametrize( - "flow", [ - flows.ConstantNoiseFlow(0.0), - flows.ConstantNoiseFlow(1.1), - flows.BrownianNoiseFlow(2.2) - ] - ) - def test_flow_matching_with_conditions( - self, data_loader_gaussian_with_conditions, flow: flows.BaseFlow - ): + _logs = fm(lin_dl, n_iters=2) + + for batch in lin_dl: + src = jnp.asarray(batch["src_lin"]) + tgt = jnp.asarray(batch["tgt_lin"]) + break + + res_fwd = fm.transport(src) + res_bwd = fm.transport(tgt, t0=1.0, t1=0.0) + + # TODO(michalk8): better assertions + assert jnp.sum(jnp.isnan(res_fwd)) == 0 + assert jnp.sum(jnp.isnan(res_bwd)) == 0 + + def test_fm_with_conds(self, lin_dl_with_conds: DataLoader): input_dim, cond_dim = 2, 1 neural_vf = models.VelocityField( output_dim=input_dim, condition_dim=cond_dim, latent_embed_dim=5, ) - time_sampler = functools.partial(samplers.uniform_sampler, offset=1e-5) - optimizer = optax.adam(learning_rate=1e-3) - fm = otfm.OTFlowMatching( 2, neural_vf, - match_fn=utils.match_linear, - flow=flow, - time_sampler=time_sampler, - optimizer=optimizer, - ) - _ = fm( - n_iters=2, - train_source=data_loader_gaussian_with_conditions, - train_target=data_loader_gaussian_with_conditions, + flow=flows.BrownianNoiseFlow(0.12), + time_sampler=functools.partial(samplers.uniform_sampler, offset=1e-5), + match_fn=jax.jit(utils.match_linear), + optimizer=optax.adam(learning_rate=1e-3), ) - batch = next(iter(data_loader_gaussian_with_conditions)) - source = jnp.asarray(batch["source_lin"]) - target = jnp.asarray(batch["target_lin"]) - source_conditions = jnp.asarray(batch["source_conditions"]) if len( - batch["source_conditions"] - ) > 0 else None + _logs = fm(lin_dl_with_conds, n_iters=2) - result_forward = fm.transport(source, condition=source_conditions) - assert jnp.sum(jnp.isnan(result_forward)) == 0 + for batch in lin_dl_with_conds: + src = jnp.asarray(batch["src_lin"]) + tgt = jnp.asarray(batch["tgt_lin"]) + src_cond = jnp.asarray(batch["src_condition"]) + break - result_backward = fm.transport( - target, condition=source_conditions, t0=1.0, t1=0.0 - ) - assert jnp.sum(jnp.isnan(result_backward)) == 0 - - @pytest.mark.parametrize( - "flow", - [ - flows.ConstantNoiseFlow(0.0), - flows.ConstantNoiseFlow(13.0), - flows.BrownianNoiseFlow(0.12) - ], - ) - @pytest.mark.parametrize("solver", ["sinkhorn", "lr_sinkhorn"]) - def test_flow_matching_conditional( - self, data_loader_gaussian_conditional, flow: Type[flows.BaseFlow], - solver: Literal["sinkhorn", "lr_sinkhorn"] + res_fwd = fm.transport(src, condition=src_cond) + res_bwd = fm.transport(tgt, condition=src_cond, t0=1.0, t1=0.0) + + # TODO(michalk8): better assertions + assert jnp.sum(jnp.isnan(res_fwd)) == 0 + assert jnp.sum(jnp.isnan(res_bwd)) == 0 + + @pytest.mark.parametrize("rank", [-1, 10]) + def test_fm_conditional_loader( + self, rank: int, conditional_lin_dl: datasets.ConditionalLoader ): - dim = 2 - condition_dim = 0 + input_dim, cond_dim = 2, 0 neural_vf = models.VelocityField( - output_dim=dim, - condition_dim=condition_dim, + output_dim=input_dim, + condition_dim=cond_dim, latent_embed_dim=5, ) - # TODO(michalk8): check for LR - ot_matcher = utils.match_linear - time_sampler = samplers.uniform_sampler - optimizer = optax.adam(learning_rate=1e-3) - fm = otfm.OTFlowMatching( + input_dim, neural_vf, - input_dim=dim, - cond_dim=condition_dim, - ot_matcher=ot_matcher, - flow=flow, - time_sampler=time_sampler, - optimizer=optimizer, - ) - fm( - data_loader_gaussian_conditional, - data_loader_gaussian_conditional, - n_iters=2, - valid_freq=3 + flow=flows.ConstantNoiseFlow(13.0), + time_sampler=samplers.uniform_sampler, + match_fn=jax.jit(functools.partial(utils.match_linear, rank=rank)), + optimizer=optax.adam(learning_rate=1e-3), ) - batch = next(iter(data_loader_gaussian_conditional)) - source = jnp.asarray(batch["source_lin"]) - target = jnp.asarray(batch["target_lin"]) - source_conditions = jnp.asarray(batch["source_conditions"]) if len( - batch["source_conditions"] - ) > 0 else None - result_forward = fm.transport( - source, condition=source_conditions, forward=True - ) - assert isinstance(result_forward, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_forward)) == 0 + _logs = fm(conditional_lin_dl, n_iters=2) - result_backward = fm.transport( - target, condition=source_conditions, forward=False - ) - assert isinstance(result_backward, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_backward)) == 0 + for batch in conditional_lin_dl: + src = jnp.asarray(batch["src_lin"]) + tgt = jnp.asarray(batch["tgt_lin"]) + src_cond = jnp.asarray(batch["src_condition"]) + break + + res_fwd = fm.transport(src, condition=src_cond) + res_bwd = fm.transport(tgt, condition=src_cond, t0=1.0, t1=0.0) + + # TODO(michalk8): better assertions + assert jnp.sum(jnp.isnan(res_fwd)) == 0 + assert jnp.sum(jnp.isnan(res_bwd)) == 0 From a9fe6181ec83fe718e632f0c51e70b3a7d07e2ad Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Thu, 14 Mar 2024 16:13:57 +0100 Subject: [PATCH 120/186] Start cleaning GENOT --- src/ott/neural/flow_models/genot.py | 214 +++++++--------------------- src/ott/neural/flow_models/otfm.py | 33 ++--- 2 files changed, 59 insertions(+), 188 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 88be0da38..fbdd7687c 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -12,135 +12,63 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -import types -from typing import Any, Callable, Dict, Optional, Tuple, Type, Union +from typing import Any, Callable, Optional, Tuple, Union import jax import jax.numpy as jnp import jax.tree_util as jtu import diffrax -import optax from flax.training import train_state from ott import utils -from ott.neural.flow_models import flows, samplers +from ott.neural.flow_models import flows, models __all__ = ["GENOTBase", "GENOTLin", "GENOTQuad"] +# TODO(michalk8): remove the base class? class GENOTBase: - """Base class for GENOT models (:cite:`klein_uscidda:23`). - - GENOT (Generative Entropic Neural Optimal Transport) is a neural solver - for entropic OT prooblems, in the linear - (:class:`ott.neural.flows.genot.GENOTLin`), the Gromov-Wasserstein, and - the Fused Gromov-Wasserstein ((:class:`ott.neural.flows.genot.GENOTQUad`)) - setting. + """TODO :cite:`klein_uscidda:23`. Args: velocity_field: Neural vector field parameterized by a neural network. - input_dim: Dimension of the data in the source distribution. - output_dim: Dimension of the data in the target distribution. - cond_dim: Dimension of the conditioning variable. - valid_freq: Frequency of validation. - ot_solver: OT solver to match samples from the source and the target - distribution. - epsilon: Entropy regularization term of the OT problem solved by - `ot_solver`. - cost_fn: Cost function for the OT problem solved by the `ot_solver`. - In the linear case, this is always expected to be of type `str`. - If the problem is of quadratic type and `cost_fn` is a string, - the `cost_fn` is used for all terms, i.e. both quadratic terms and, - if applicable, the linear temr. If of type :class:`dict`, the keys - are expected to be `cost_fn_xx`, `cost_fn_yy`, and if applicable, - `cost_fn_xy`. - scale_cost: How to scale the cost matrix for the OT problem solved by - the `ot_solver`. In the linear case, this is always expected to be - not a :class:`dict`. If the problem is of quadratic type and - `scale_cost` is a string, the `scale_cost` argument is used for all - terms, i.e. both quadratic terms and, if applicable, the linear temr. - If of type :class:`dict`, the keys are expected to be `scale_cost_xx`, - `scale_cost_yy`, and if applicable, `scale_cost_xy`. - optimizer: Optimizer for `velocity_field`. flow: Flow between latent distribution and target distribution. time_sampler: Sampler for the time. - k_samples_per_x: Number of samples drawn from the conditional distribution of an input sample, see algorithm TODO. - solver_latent_to_data: Linear OT solver to match the latent distribution + data_match_fn: Linear OT solver to match the latent distribution with the conditional distribution. - kwargs_solver_latent_to_data: Keyword arguments for `solver_latent_to_data`. - #TODO: adapt - fused_penalty: Fused penalty of the linear/fused term in the Fused - Gromov-Wasserstein problem. - callback_fn: Callback function. - rng: Random number generator. + latent_match_fn: TODO. + latent_noise_fn: TODO. + k_samples_per_x: Number of samples drawn from the conditional distribution + kwargs: TODO. """ def __init__( self, - velocity_field: Callable[[ - jnp.ndarray, jnp.ndarray, Optional[jnp.ndarray], Optional[jnp.ndarray] - ], jnp.ndarray], - *, - input_dim: int, - output_dim: int, - cond_dim: int, - valid_freq: int, - # TODO(michalk8) - ot_matcher: Any, - optimizer: optax.GradientTransformation, - flow: Type[flows.BaseFlow] = flows.ConstantNoiseFlow(0.0), # noqa: B008 - time_sampler: Callable[[jax.Array, int], - jnp.ndarray] = samplers.uniform_sampler, + velocity_field: models.VelocityField, + flow: flows.BaseFlow, + time_sampler: Callable[[jax.Array, int], jnp.ndarray], + data_match_fn: Any, + latent_match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], + jnp.ndarray]] = None, + # TODO(michalk8): add a default for this? + latent_noise_fn: Optional[Callable[[jax.Array, Tuple[int, ...]], + jnp.ndarray]] = None, k_samples_per_x: int = 1, - # TODO(michalk8) - matcher_latent_to_data: Optional[Callable] = None, - kwargs_solver_latent_to_data: Dict[str, Any] = types.MappingProxyType({}), - fused_penalty: float = 0.0, - callback_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray, jnp.ndarray], - Any]] = None, - rng: Optional[jax.Array] = None, + **kwargs: Any, ): - rng = utils.default_prng_key(rng) - - self.rng = utils.default_prng_key(rng) - self.valid_freq = valid_freq - self.velocity_field = velocity_field - self.state_velocity_field: Optional[train_state.TrainState] = None + self.vf = velocity_field self.flow = flow self.time_sampler = time_sampler - self.optimizer = optimizer - self.ot_matcher = ot_matcher - self.latent_noise_fn = jax.tree_util.Partial( - jax.random.multivariate_normal, - mean=jnp.zeros((output_dim,)), - cov=jnp.diag(jnp.ones((output_dim,))) - ) - self.input_dim = input_dim - self.output_dim = output_dim - self.cond_dim = cond_dim + self.ot_matcher = data_match_fn + if latent_match_fn is not None: + latent_match_fn = jax.jit(jax.vmap(latent_match_fn, 0, 0)) + self.latent_match_fn = latent_match_fn + self.latent_noise_fn = latent_noise_fn self.k_samples_per_x = k_samples_per_x - # OT data-data matching parameters - - self.fused_penalty = fused_penalty - - # OT latent-data matching parameters - self.matcher_latent_to_data = matcher_latent_to_data - self.kwargs_solver_latent_to_data = kwargs_solver_latent_to_data - - # callback parameteres - self.callback_fn = callback_fn - self.setup() - - def setup(self) -> None: - """Set up the model.""" - self.state_velocity_field = ( - self.velocity_field.create_train_state( - self.rng, self.optimizer, self.output_dim - ) - ) + self.vf_state = self.vf.create_train_state(**kwargs) self.step_fn = self._get_step_fn() def _get_step_fn(self) -> Callable: @@ -148,7 +76,7 @@ def _get_step_fn(self) -> Callable: @jax.jit def step_fn( rng: jax.Array, - state_velocity_field: train_state.TrainState, + vf_state: train_state.TrainState, time: jnp.ndarray, source: jnp.ndarray, target: jnp.ndarray, @@ -162,9 +90,7 @@ def loss_fn( source_conditions: Optional[jnp.ndarray], rng: jax.Array ): x_t = self.flow.compute_xt(rng, time, latent, target) - apply_fn = functools.partial( - state_velocity_field.apply_fn, {"params": params} - ) + apply_fn = functools.partial(vf_state.apply_fn, {"params": params}) cond_input = jnp.concatenate([ source, source_conditions @@ -175,11 +101,10 @@ def loss_fn( grad_fn = jax.value_and_grad(loss_fn, has_aux=False) loss, grads = grad_fn( - state_velocity_field.params, time, source, target, latent, - source_conditions, rng + vf_state.params, time, source, target, latent, source_conditions, rng ) - return state_velocity_field.apply_gradients(grads=grads), loss + return vf_state.apply_gradients(grads=grads), loss return step_fn @@ -218,7 +143,7 @@ def transport( raise NotImplementedError if condition is not None: assert len(source) == len(condition), (len(source), len(condition)) - latent_batch = self.latent_noise_fn(rng, shape=(len(source),)) + latent_batch = self.latent_noise_fn(rng, (len(source),)) cond_input = source if condition is None else ( jnp.concatenate([source, condition], axis=-1) ) @@ -226,11 +151,8 @@ def transport( @jax.jit def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: ode_term = diffrax.ODETerm( - lambda t, x, args: self.state_velocity_field. - apply_fn({"params": self.state_velocity_field.params}, - t=t, - x=x, - condition=cond) + lambda t, x, args: self.vf_state. + apply_fn({"params": self.vf_state.params}, t=t, x=x, condition=cond) ), solver = kwargs.pop("solver", diffrax.Tsit5()) stepsize_controller = kwargs.pop( @@ -250,14 +172,6 @@ def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: return jax.vmap(solve_ode)(latent_batch, cond_input) - def _valid_step(self, valid_loader, iter): - pass - - @property - def learn_rescaling(self) -> bool: - """Whether to learn at least one rescaling factor.""" - return False - def _reshape_samples(self, arrays: Tuple[jnp.ndarray, ...], batch_size: int) -> Tuple[jnp.ndarray, ...]: return jax.tree_util.tree_map( @@ -265,12 +179,6 @@ def _reshape_samples(self, arrays: Tuple[jnp.ndarray, ...], arrays ) - def _learn_rescaling( - self, source: jnp.ndarray, target: jnp.ndarray, - source_conditions: Optional[jnp.ndarray], tmat: jnp.ndarray - ) -> Tuple[jnp.ndarray, jnp.ndarray, float, float]: - raise NotImplementedError - class GENOTLin(GENOTBase): """Implementation of GENOT-L (:cite:`klein:23`). @@ -293,7 +201,7 @@ def __call__( rng = utils.default_prng_key(rng) training_logs = {"loss": []} - for it in range(n_iters): + for _ in range(n_iters): for batch_source, batch_target in zip(train_source, train_target): ( rng, rng_resample, rng_noise, rng_time, rng_latent_data_match, @@ -311,7 +219,7 @@ def __call__( n_samples = batch_size * self.k_samples_per_x time = self.time_sampler(rng_time, n_samples) latent = self.latent_noise_fn( - rng_noise, shape=(self.k_samples_per_x, batch_size) + rng_noise, (self.k_samples_per_x, batch_size) ) tmat = self.ot_matcher.match_fn( @@ -329,11 +237,9 @@ def __call__( source_is_balanced=(self.ot_matcher.tau_a == 1.0) ) - if self.matcher_latent_to_data is not None: - tmats_latent_data = jnp.array( - jax.vmap(self.matcher_latent_to_data.match_fn, 0, - 0)(x=latent, y=target) - ) + if self.latent_match_fn is not None: + # already vmapped + tmats_latent_data = self.latent_match_fn(latent, target) rng_latent_data_match = jax.random.split( rng_latent_data_match, self.k_samples_per_x @@ -347,23 +253,13 @@ def __call__( source, source_conditions, target, latent = self._reshape_samples( (source, source_conditions, target, latent), batch_size ) - self.state_velocity_field, loss = self.step_fn( - rng_step_fn, self.state_velocity_field, time, source, target, - latent, source_conditions + self.vf_state, loss = self.step_fn( + rng_step_fn, self.vf_state, time, source, target, latent, + source_conditions ) - if self.learn_rescaling: - eta_preds, xi_preds, loss_a, loss_b = self._learn_rescaling( - source=source, - target=target, - condition=source_conditions, - tmat=tmat - ) training_logs["loss"].append(float(loss)) - if it % valid_freq == 0: - self._valid_step(valid_source, valid_target, it) - class GENOTQuad(GENOTBase): """Implementation of GENOT-Q and GENOT-F (:cite:`klein:23`). @@ -388,7 +284,7 @@ def __call__( rng = utils.default_prng_key(rng) training_logs = {"loss": []} - for it in range(n_iters): + for _ in range(n_iters): for batch_source, batch_target in zip(train_source, train_target): ( rng, rng_resample, rng_noise, rng_time, rng_latent_data_match, @@ -408,7 +304,7 @@ def __call__( n_samples = batch_size * self.k_samples_per_x time = self.time_sampler(rng_time, n_samples) latent = self.latent_noise_fn( - rng_noise, shape=(self.k_samples_per_x, batch_size) + rng_noise, (self.k_samples_per_x, batch_size) ) tmat = self.ot_matcher.match_fn( @@ -433,11 +329,9 @@ def __call__( ) ) - if self.matcher_latent_to_data is not None: - tmats_latent_data = jnp.array( - jax.vmap(self.matcher_latent_to_data.match_fn, 0, - 0)(x=latent, y=target) - ) + if self.latent_match_fn is not None: + # already vmapped + tmats_latent_data = self.latent_match_fn(latent, target) rng_latent_data_match = jax.random.split( rng_latent_data_match, self.k_samples_per_x @@ -453,18 +347,8 @@ def __call__( (source, source_conditions, target, latent), batch_size ) - self.state_velocity_field, loss = self.step_fn( - rng_step_fn, self.state_velocity_field, time, source, target, - latent, source_conditions + self.vf_state, loss = self.step_fn( + rng_step_fn, self.vf_state, time, source, target, latent, + source_conditions ) - if self.learn_rescaling: - eta_preds, xi_preds, loss_a, loss_b = self._learn_rescaling( - source=source, - target=target, - condition=source_conditions, - tmat=tmat - ) training_logs["loss"].append(float(loss)) - - if it % valid_freq == 0: - self._valid_step(valid_source, valid_target, it) diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index 83d709a96..79ffa016c 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -19,11 +19,10 @@ import jax.tree_util as jtu import diffrax -import optax from flax.training import train_state from ott import utils -from ott.neural.flow_models import flows, models +from ott.neural.flow_models import flows, models, samplers from ott.neural.flow_models.utils import resample_data, sample_joint __all__ = ["OTFlowMatching"] @@ -35,40 +34,31 @@ class OTFlowMatching: With an extension to OT-FM :cite:`tong:23`, :cite:`pooladian:23`. Args: - input_dim: Dimension of the input data. velocity_field: Neural vector field parameterized by a neural network. flow: Flow between source and target distribution. time_sampler: Sampler for the time. - optimizer: Optimizer for the velocity field's parameters. match_fn: TODO. - rng: Random number generator. + kwargs: TODO. """ # TODO(michalk8): in the future, `input_dim`, `optimizer` and `rng` will be # in a separate function def __init__( self, - input_dim: int, velocity_field: models.VelocityField, flow: flows.BaseFlow, - time_sampler: Callable[[jax.Array, int], jnp.ndarray], - optimizer: optax.GradientTransformation, + time_sampler: Callable[[jax.Array, int], + jnp.ndarray] = samplers.uniform_sampler, match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], jnp.ndarray]] = None, - rng: Optional[jax.Array] = None, + **kwargs: Any, ): - rng = utils.default_prng_key(rng) - - self.input_dim = input_dim self.vf = velocity_field self.flow = flow self.time_sampler = time_sampler self.match_fn = match_fn - self.optimizer = optimizer - self.vf_state = self.vf.create_train_state( - rng, self.optimizer, self.input_dim - ) + self.vf_state = self.vf.create_train_state(**kwargs) self.step_fn = self._get_step_fn() def _get_step_fn(self) -> Callable: @@ -76,7 +66,7 @@ def _get_step_fn(self) -> Callable: @jax.jit def step_fn( rng: jax.Array, - state_velocity_field: train_state.TrainState, + vf_state: train_state.TrainState, source: jnp.ndarray, target: jnp.ndarray, source_conditions: Optional[jnp.ndarray], @@ -89,9 +79,7 @@ def loss_fn( ) -> jnp.ndarray: x_t = self.flow.compute_xt(rng, t, source, target) - apply_fn = functools.partial( - state_velocity_field.apply_fn, {"params": params} - ) + apply_fn = functools.partial(vf_state.apply_fn, {"params": params}) v_t = jax.vmap(apply_fn)(t=t, x=x_t, condition=source_conditions) u_t = self.flow.compute_ut(t, source, target) return jnp.mean((v_t - u_t) ** 2) @@ -101,10 +89,9 @@ def loss_fn( t = self.time_sampler(key_t, batch_size) grad_fn = jax.value_and_grad(loss_fn) loss, grads = grad_fn( - state_velocity_field.params, t, source, target, source_conditions, - key_model + vf_state.params, t, source, target, source_conditions, key_model ) - return state_velocity_field.apply_gradients(grads=grads), loss + return vf_state.apply_gradients(grads=grads), loss return step_fn From abca4f72e7a0da25f0d1f351e5f6bcfc136737f8 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 02:25:11 +0100 Subject: [PATCH 121/186] Update GENOT --- src/ott/neural/data/datasets.py | 2 +- src/ott/neural/flow_models/genot.py | 181 ++++++++++++++++++++-------- src/ott/neural/flow_models/otfm.py | 23 ++-- src/ott/neural/flow_models/utils.py | 12 -- 4 files changed, 143 insertions(+), 75 deletions(-) diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index 0a2067f25..eca6f1e51 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -75,7 +75,7 @@ def __init__( def _verify_integriy(self) -> None: assert len(self.src_data) == len(self.src_conditions) - assert len(self.src_data) == len(self.tgt_conditions) + assert len(self.tgt_data) == len(self.tgt_conditions) if self.is_aligned: assert len(self.src_data) == len(self.tgt_data) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index fbdd7687c..a4878c892 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Any, Callable, Optional, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Tuple import jax import jax.numpy as jnp @@ -23,12 +23,13 @@ from ott import utils from ott.neural.flow_models import flows, models +from ott.neural.flow_models import utils as flow_utils -__all__ = ["GENOTBase", "GENOTLin", "GENOTQuad"] +__all__ = ["GENOT", "GENOTLin", "GENOTQuad"] # TODO(michalk8): remove the base class? -class GENOTBase: +class GENOT: """TODO :cite:`klein_uscidda:23`. Args: @@ -49,7 +50,9 @@ def __init__( velocity_field: models.VelocityField, flow: flows.BaseFlow, time_sampler: Callable[[jax.Array, int], jnp.ndarray], - data_match_fn: Any, + # TODO(mcihalk8): all args are optional + data_match_fn: Callable[ + [jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray], jnp.ndarray], latent_match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], jnp.ndarray]] = None, # TODO(michalk8): add a default for this? @@ -61,9 +64,7 @@ def __init__( self.vf = velocity_field self.flow = flow self.time_sampler = time_sampler - self.ot_matcher = data_match_fn - if latent_match_fn is not None: - latent_match_fn = jax.jit(jax.vmap(latent_match_fn, 0, 0)) + self.data_match_fn = data_match_fn self.latent_match_fn = latent_match_fn self.latent_noise_fn = latent_noise_fn self.k_samples_per_x = k_samples_per_x @@ -108,16 +109,110 @@ def loss_fn( return step_fn + def __call__( + self, + loader: Any, + n_iters: int, + rng: Optional[jax.Array] = None + ) -> Dict[str, List[float]]: + """TODO.""" + + def prepare_data( + batch: Dict[str, jnp.ndarray] + ) -> Tuple[Tuple[jnp.ndarray, Optional[jnp.ndarray], jnp.ndarray], Tuple[ + jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]]: + src_lin, src_quad = batch.get("src_lin"), batch.get("src_quad") + tgt_lin, tgt_quad = batch.get("tgt_lin"), batch.get("tgt_quad") + arrs = src_lin, tgt_lin, src_quad, tgt_quad + + if src_quad is None and tgt_quad is None: # lin + src, tgt = src_lin, tgt_lin + elif src_lin is None and tgt_lin is None: # quad + src, tgt = src_quad, tgt_quad + elif all(arr is not None for arr in arrs): # fused quad + src = jnp.concatenate([src_lin, src_quad], axis=1) + tgt = jnp.concatenate([tgt_lin, tgt_quad], axis=1) + else: + raise RuntimeError("TODO") + + # TODO(michalk8): filter `None` from the `arrs`? + return (src, batch.get("src_condition"), tgt), arrs + + rng = utils.default_prng_key(rng) + training_logs = {"loss": []} + for batch in loader: + rng = jax.random.split(rng, 6) + rng, rng_resample, rng_noise, rng_time, rng_latent, rng_step_fn = rng + + batch = jtu.tree_map(jnp.asarray, batch) + (src, src_cond, tgt), data = prepare_data(batch) + + time = self.time_sampler(rng_time, len(src) * self.k_samples_per_x) + latent = self.latent_noise_fn(rng_noise, (self.k_samples_per_x, len(src))) + + tmat = self.data_match_fn(*data) # (n, m) + src_ixs, tgt_ixs = flow_utils.sample_conditional( # (n, k), (m, k) + rng_resample, + tmat, + k=self.k_samples_per_x, + uniform_marginals=True, # TODO(michalk8): expose + ) + + src = src[src_ixs].swapaxes(0, 1) # (k, n, ...) + tgt = tgt[tgt_ixs].swapaxes(0, 1) # (k, m, ...) + if src_cond is not None: + src_cond = src_cond[src_ixs].swapaxes(0, 1) # (k, n, ...) + + if self.latent_match_fn is not None: + src, src_cond, tgt = self._match_latent(rng, src, src_cond, latent, tgt) + + src = src.reshape(-1, *src.shape[2:]) # (k * bs, ...) + tgt = tgt.reshape(-1, *tgt.shape[2:]) + latent = latent.reshape(-1, *latent.shape[2:]) + if src_cond is not None: + src_cond = src_cond.reshape(-1, *src_cond.shape[2:]) + + self.vf_state, loss = self.step_fn( + rng_step_fn, self.vf_state, time, src, tgt, latent, src_cond + ) + training_logs["loss"].append(float(loss)) + + return training_logs + + def _match_latent( + self, rng: jax.Array, src: jnp.ndarray, src_cond: Optional[jnp.ndarray], + latent: jnp.ndarray, tgt: jnp.ndarray + ) -> Tuple[jnp.ndarray, Optional[jnp.ndarray], jnp.ndarray]: + + def resample( + rng: jax.Array, src: jnp.ndarray, src_cond: Optional[jnp.ndarray], + tgt: jnp.ndarray, latent: jnp.ndarray + ) -> Tuple[jnp.ndarray, Optional[jnp.ndarray], jnp.ndarray]: + tmat = self.latent_match_fn(latent, tgt) # (n, k) + + src_ixs, tgt_ixs = flow_utils.sample_joint(rng, tmat) # (n,), (m,) + src, tgt = src[src_ixs], tgt[tgt_ixs] + if src_cond is not None: + src_cond = src_cond[src_ixs] + + return src, src_cond, tgt + + cond_axis = None if src_cond is None else 0 + in_axes, out_axes = (0, 0, cond_axis, 0, 0), (0, None, 0) + resample_fn = jax.jit(jax.vmap(resample, in_axes, out_axes)) + + rngs = jax.random.split(rng, self.k_samples_per_x) + return resample_fn(rngs, src, src_cond, tgt, latent) + def transport( self, source: jnp.ndarray, condition: Optional[jnp.ndarray] = None, + t0: float = 0.0, + t1: float = 1.0, rng: Optional[jax.Array] = None, - forward: bool = True, - t_0: float = 0.0, - t_1: float = 1.0, **kwargs: Any, - ) -> Union[jnp.array, diffrax.Solution, Optional[jnp.ndarray]]: + ) -> jnp.ndarray: """Transport data with the learnt plan. This method pushes-forward the `source` to its conditional distribution by @@ -127,60 +222,48 @@ def transport( Args: source: Data to transport. condition: Condition of the input data. + t0: Starting time of integration of neural ODE. + t1: End time of integration of neural ODE. rng: random seed for sampling from the latent distribution. - forward: If `True` integrates forward, otherwise backwards. - t_0: Starting time of integration of neural ODE. - t_1: End time of integration of neural ODE. kwargs: Keyword arguments for the ODE solver. Returns: The push-forward or pull-back distribution defined by the learnt transport plan. - """ - rng = utils.default_prng_key(rng) - if not forward: - raise NotImplementedError - if condition is not None: - assert len(source) == len(condition), (len(source), len(condition)) - latent_batch = self.latent_noise_fn(rng, (len(source),)) - cond_input = source if condition is None else ( - jnp.concatenate([source, condition], axis=-1) - ) - @jax.jit - def solve_ode(input: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: - ode_term = diffrax.ODETerm( - lambda t, x, args: self.vf_state. - apply_fn({"params": self.vf_state.params}, t=t, x=x, condition=cond) - ), - solver = kwargs.pop("solver", diffrax.Tsit5()) - stepsize_controller = kwargs.pop( - "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) - ) + def vf(t: jnp.ndarray, x: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: + params = self.vf_state.params + return self.vf_state.apply_fn({"params": params}, t, x, cond) + + def solve_ode(x: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: + ode_term = diffrax.ODETerm(vf) sol = diffrax.diffeqsolve( ode_term, - solver, - t0=t_0, - t1=t_1, - dt0=kwargs.pop("dt0", None), - y0=input, - stepsize_controller=stepsize_controller, + t0=t0, + t1=t1, + y0=x, + args=cond, **kwargs, ) return sol.ys[0] - return jax.vmap(solve_ode)(latent_batch, cond_input) - - def _reshape_samples(self, arrays: Tuple[jnp.ndarray, ...], - batch_size: int) -> Tuple[jnp.ndarray, ...]: - return jax.tree_util.tree_map( - lambda x: jnp.reshape(x, (batch_size * self.k_samples_per_x, -1)), - arrays + kwargs.setdefault("dt0", None) + kwargs.setdefault("solver", diffrax.Tsit5()) + kwargs.setdefault( + "stepsize_controller", diffrax.PIDController(rtol=1e-5, atol=1e-5) ) + rng = utils.default_prng_key(rng) + latent = self.latent_noise_fn(rng, (len(source),)) + + if condition is not None: + source = jnp.concatenate([source, condition], axis=-1) + + return jax.jit(jax.vmap(solve_ode))(latent, source) + -class GENOTLin(GENOTBase): +class GENOTLin(GENOT): """Implementation of GENOT-L (:cite:`klein:23`). GENOT-L (Generative Entropic Neural Optimal Transport, linear) is a @@ -261,7 +344,7 @@ def __call__( training_logs["loss"].append(float(loss)) -class GENOTQuad(GENOTBase): +class GENOTQuad(GENOT): """Implementation of GENOT-Q and GENOT-F (:cite:`klein:23`). GENOT-Q (Generative Entropic Neural Optimal Transport, quadratic) and diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index 79ffa016c..b793a17c3 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Any, Callable, Dict, Optional, Tuple +from typing import Any, Callable, Dict, List, Optional, Tuple import jax import jax.numpy as jnp @@ -23,7 +23,7 @@ from ott import utils from ott.neural.flow_models import flows, models, samplers -from ott.neural.flow_models.utils import resample_data, sample_joint +from ott.neural.flow_models.utils import sample_joint __all__ = ["OTFlowMatching"] @@ -80,7 +80,7 @@ def loss_fn( x_t = self.flow.compute_xt(rng, t, source, target) apply_fn = functools.partial(vf_state.apply_fn, {"params": params}) - v_t = jax.vmap(apply_fn)(t=t, x=x_t, condition=source_conditions) + v_t = jax.vmap(apply_fn)(t, x_t, source_conditions) u_t = self.flow.compute_ut(t, source, target) return jnp.mean((v_t - u_t) ** 2) @@ -102,30 +102,29 @@ def __call__( # noqa: D102 *, n_iters: int, rng: Optional[jax.Array] = None, - ) -> Dict[str, Any]: + ) -> Dict[str, List[float]]: rng = utils.default_prng_key(rng) training_logs = {"loss": []} - for batch in loader: rng, rng_resample, rng_step_fn = jax.random.split(rng, 3) batch = jtu.tree_map(jnp.asarray, batch) src, tgt = batch["src_lin"], batch["tgt_lin"] - src_conds = batch.get("src_condition", None) + src_cond = batch.get("src_condition") if self.match_fn is not None: tmat = self.match_fn(src, tgt) src_ixs, tgt_ixs = sample_joint(rng_resample, tmat) - src, src_conds = resample_data(src, src_conds, ixs=src_ixs) - tgt = resample_data(tgt, ixs=tgt_ixs) + src, tgt = src[src_ixs], tgt[tgt_ixs] + src_cond = None if src_cond is None else src_cond[src_ixs] self.vf_state, loss = self.step_fn( rng_step_fn, self.vf_state, src, tgt, - src_conds, + src_cond, ) training_logs["loss"].append(float(loss)) @@ -162,10 +161,8 @@ def transport( def vf( t: jnp.ndarray, x: jnp.ndarray, cond: Optional[jnp.ndarray] ) -> jnp.ndarray: - return self.vf_state.apply_fn({"params": self.vf_state.params}, - t=t, - x=x, - condition=cond) + params = self.vf_state.params + return self.vf_state.apply_fn({"params": params}, t, x, cond) def solve_ode(x: jnp.ndarray, cond: Optional[jnp.ndarray]) -> jnp.ndarray: ode_term = diffrax.ODETerm(vf) diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index d6d023266..9b8386ea9 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -2,7 +2,6 @@ import jax import jax.numpy as jnp -import jax.tree_util as jtu from ott.geometry import costs, pointcloud from ott.solvers import linear, quadratic @@ -12,7 +11,6 @@ "match_quadratic", "sample_joint", "sample_conditional", - "resample_data", ] ScaleCost_t = Union[float, Literal["mean", "max_cost", "median"]] @@ -95,13 +93,3 @@ def sample_conditional( src_ixs = jnp.repeat(indices[:, None], k, axis=1) # (n, k) return src_ixs, tgt_ixs - - -def resample_data(*data: Optional[jnp.ndarray], - ixs: jnp.ndarray) -> Tuple[Optional[jnp.ndarray], ...]: - """TODO.""" - if ixs.ndim == 2: - ixs = ixs.reshape(-1) - assert ixs.ndim == 1, ixs.shape - data = jtu.tree_map(lambda arr: None if arr is None else arr[ixs], data) - return data[0] if len(data) == 1 else data From f2c20a47e5d34c90dfb7ec1d1159ca936fb22ed7 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 02:25:50 +0100 Subject: [PATCH 122/186] Remove old GENOTLin/GENOTQuad --- src/ott/neural/flow_models/genot.py | 176 +--------------------------- 1 file changed, 1 insertion(+), 175 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index a4878c892..f6b9c080e 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -25,7 +25,7 @@ from ott.neural.flow_models import flows, models from ott.neural.flow_models import utils as flow_utils -__all__ = ["GENOT", "GENOTLin", "GENOTQuad"] +__all__ = ["GENOT"] # TODO(michalk8): remove the base class? @@ -261,177 +261,3 @@ def solve_ode(x: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: source = jnp.concatenate([source, condition], axis=-1) return jax.jit(jax.vmap(solve_ode))(latent, source) - - -class GENOTLin(GENOT): - """Implementation of GENOT-L (:cite:`klein:23`). - - GENOT-L (Generative Entropic Neural Optimal Transport, linear) is a - neural solver for entropic (linear) OT problems. - """ - - def __call__( - self, - n_iters: int, - train_source, - train_target, - valid_source, - valid_target, - valid_freq: int = 5000, - rng: Optional[jax.Array] = None, - ): - """Train GENOTLin.""" - rng = utils.default_prng_key(rng) - training_logs = {"loss": []} - - for _ in range(n_iters): - for batch_source, batch_target in zip(train_source, train_target): - ( - rng, rng_resample, rng_noise, rng_time, rng_latent_data_match, - rng_step_fn - ) = jax.random.split(rng, 6) - - batch_source = jtu.tree_map(jnp.asarray, batch_source) - batch_target = jtu.tree_map(jnp.asarray, batch_target) - - source = batch_source["lin"] - source_conditions = batch_source.get("conditions", None) - target = batch_target["lin"] - - batch_size = len(source) - n_samples = batch_size * self.k_samples_per_x - time = self.time_sampler(rng_time, n_samples) - latent = self.latent_noise_fn( - rng_noise, (self.k_samples_per_x, batch_size) - ) - - tmat = self.ot_matcher.match_fn( - source, - target, - ) - - (source, source_conditions - ), (target,) = self.ot_matcher.sample_conditional_indices_from_tmap( - rng=rng_resample, - conditional_distributions=tmat, - k_samples_per_x=self.k_samples_per_x, - source_arrays=(source, source_conditions), - target_arrays=(target,), - source_is_balanced=(self.ot_matcher.tau_a == 1.0) - ) - - if self.latent_match_fn is not None: - # already vmapped - tmats_latent_data = self.latent_match_fn(latent, target) - - rng_latent_data_match = jax.random.split( - rng_latent_data_match, self.k_samples_per_x - ) - (source, source_conditions - ), (target,) = jax.vmap(self.ot_matcher.sample_joint, 0, 0)( - rng_latent_data_match, tmats_latent_data, - (source, source_conditions), (target,) - ) - - source, source_conditions, target, latent = self._reshape_samples( - (source, source_conditions, target, latent), batch_size - ) - self.vf_state, loss = self.step_fn( - rng_step_fn, self.vf_state, time, source, target, latent, - source_conditions - ) - - training_logs["loss"].append(float(loss)) - - -class GENOTQuad(GENOT): - """Implementation of GENOT-Q and GENOT-F (:cite:`klein:23`). - - GENOT-Q (Generative Entropic Neural Optimal Transport, quadratic) and - GENOT-F (Generative Entropic Neural Optimal Transport, fused) are neural - solver for entropic Gromov-Wasserstein and entropic Fused Gromov-Wasserstein - problems, respectively. - """ - - def __call__( - self, - n_iters: int, - train_source, - train_target, - valid_source, - valid_target, - valid_freq: int = 5000, - rng: Optional[jax.Array] = None, - ): - """Train GENOTQuad.""" - rng = utils.default_prng_key(rng) - training_logs = {"loss": []} - - for _ in range(n_iters): - for batch_source, batch_target in zip(train_source, train_target): - ( - rng, rng_resample, rng_noise, rng_time, rng_latent_data_match, - rng_step_fn - ) = jax.random.split(rng, 6) - - batch_source = jtu.tree_map(jnp.asarray, batch_source) - batch_target = jtu.tree_map(jnp.asarray, batch_target) - - source_lin = batch_source.get("lin", None) - source_quad = batch_source["quad"] - source_conditions = batch_source.get("conditions", None) - target_lin = batch_target.get("lin", None) - target_quad = batch_target["quad"] - - batch_size = len(source_quad) - n_samples = batch_size * self.k_samples_per_x - time = self.time_sampler(rng_time, n_samples) - latent = self.latent_noise_fn( - rng_noise, (self.k_samples_per_x, batch_size) - ) - - tmat = self.ot_matcher.match_fn( - source_quad, target_quad, source_lin, target_lin - ) - - if self.ot_matcher.fused_penalty > 0.0: - source = jnp.concatenate((source_lin, source_quad), axis=1) - target = jnp.concatenate((target_lin, target_quad), axis=1) - else: - source = source_quad - target = target_quad - - (source, source_conditions), (target,) = ( - self.ot_matcher.sample_conditional_indices_from_tmap( - rng=rng_resample, - conditional_distributions=tmat, - k_samples_per_x=self.k_samples_per_x, - source_arrays=(source, source_conditions), - target_arrays=(target,), - source_is_balanced=(self.ot_matcher.tau_a == 1.0) - ) - ) - - if self.latent_match_fn is not None: - # already vmapped - tmats_latent_data = self.latent_match_fn(latent, target) - - rng_latent_data_match = jax.random.split( - rng_latent_data_match, self.k_samples_per_x - ) - - (source, source_conditions - ), (target,) = jax.vmap(self.ot_matcher.sample_joint, 0, 0)( - rng_latent_data_match, tmats_latent_data, - (source, source_conditions), (target,) - ) - - source, source_conditions, target, latent = self._reshape_samples( - (source, source_conditions, target, latent), batch_size - ) - - self.vf_state, loss = self.step_fn( - rng_step_fn, self.vf_state, time, source, target, latent, - source_conditions - ) - training_logs["loss"].append(float(loss)) From 693ecc4e4fa9cd03f9e8eb3cfa9f2c0244521095 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 02:34:55 +0100 Subject: [PATCH 123/186] Remove axis swapping --- src/ott/neural/flow_models/genot.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index f6b9c080e..b2f5965c2 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -145,12 +145,13 @@ def prepare_data( rng, rng_resample, rng_noise, rng_time, rng_latent, rng_step_fn = rng batch = jtu.tree_map(jnp.asarray, batch) - (src, src_cond, tgt), data = prepare_data(batch) + (src, src_cond, tgt), matching_data = prepare_data(batch) - time = self.time_sampler(rng_time, len(src) * self.k_samples_per_x) - latent = self.latent_noise_fn(rng_noise, (self.k_samples_per_x, len(src))) + n = src.shape[0] + time = self.time_sampler(rng_time, n * self.k_samples_per_x) + latent = self.latent_noise_fn(rng_noise, (n, self.k_samples_per_x)) - tmat = self.data_match_fn(*data) # (n, m) + tmat = self.data_match_fn(*matching_data) # (n, m) src_ixs, tgt_ixs = flow_utils.sample_conditional( # (n, k), (m, k) rng_resample, tmat, @@ -158,16 +159,15 @@ def prepare_data( uniform_marginals=True, # TODO(michalk8): expose ) - src = src[src_ixs].swapaxes(0, 1) # (k, n, ...) - tgt = tgt[tgt_ixs].swapaxes(0, 1) # (k, m, ...) + src, tgt = src[src_ixs], tgt[tgt_ixs] # (n, k, ...), # (m, k, ...) if src_cond is not None: - src_cond = src_cond[src_ixs].swapaxes(0, 1) # (k, n, ...) + src_cond = src_cond[src_ixs] if self.latent_match_fn is not None: src, src_cond, tgt = self._match_latent(rng, src, src_cond, latent, tgt) - src = src.reshape(-1, *src.shape[2:]) # (k * bs, ...) - tgt = tgt.reshape(-1, *tgt.shape[2:]) + src = src.reshape(-1, *src.shape[2:]) # (n * k, ...) + tgt = tgt.reshape(-1, *tgt.shape[2:]) # (m * k, ...) latent = latent.reshape(-1, *latent.shape[2:]) if src_cond is not None: src_cond = src_cond.reshape(-1, *src_cond.shape[2:]) @@ -197,8 +197,8 @@ def resample( return src, src_cond, tgt - cond_axis = None if src_cond is None else 0 - in_axes, out_axes = (0, 0, cond_axis, 0, 0), (0, None, 0) + cond_axis = None if src_cond is None else 1 + in_axes, out_axes = (0, 1, cond_axis, 1, 1), (1, cond_axis, 1) resample_fn = jax.jit(jax.vmap(resample, in_axes, out_axes)) rngs = jax.random.split(rng, self.k_samples_per_x) From 3d9c70278187e330bad60dca9d6c3294bc25e212 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 02:39:49 +0100 Subject: [PATCH 124/186] Remove old todo --- src/ott/neural/flow_models/genot.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index b2f5965c2..2ac511611 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -28,7 +28,6 @@ __all__ = ["GENOT"] -# TODO(michalk8): remove the base class? class GENOT: """TODO :cite:`klein_uscidda:23`. From f27d209e1fa8abe3e4bb69e6104a79b68743615b Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 14:39:01 +0100 Subject: [PATCH 125/186] Fix OTFM tests --- src/ott/neural/flow_models/models.py | 20 ++---- src/ott/neural/flow_models/otfm.py | 7 +- tests/neural/conftest.py | 12 ++-- tests/neural/genot_test.py | 1 - tests/neural/otfm_test.py | 102 ++++++--------------------- 5 files changed, 37 insertions(+), 105 deletions(-) diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index c71fff2c2..26cc31915 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -50,9 +50,6 @@ class VelocityField(nn.Module): If :obj:`None`, set to ``latent_embed_dim``. t_embed_dim: Dimensionality of the time embedding. If :obj:`None`, set to ``latent_embed_dim``. - joint_hidden_dim: Dimensionality of the hidden layers of the joint network. - If :obj:`None`, set to ``latent_embed_dim + condition_embed_dim + - t_embed_dim``. num_layers_per_block: Number of layers per block. act_fn: Activation function. n_freqs: Number of frequencies to use for the time embedding. @@ -62,7 +59,6 @@ class VelocityField(nn.Module): condition_dim: int = 0 condition_embed_dim: Optional[int] = None t_embed_dim: Optional[int] = None - joint_hidden_dim: Optional[int] = None num_layers_per_block: int = 3 act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu n_freqs: int = 128 @@ -72,18 +68,9 @@ def __post_init__(self) -> None: self.condition_embed_dim = self.latent_embed_dim if self.t_embed_dim is None: self.t_embed_dim = self.latent_embed_dim - - concat_embed_dim = ( + self.joint_hidden_dim = ( self.latent_embed_dim + self.condition_embed_dim + self.t_embed_dim ) - if self.joint_hidden_dim is not None: - assert (self.joint_hidden_dim >= concat_embed_dim), ( - "joint_hidden_dim must be greater than or equal to the sum of" - " all embedded dimensions." - ) - self.joint_hidden_dim = self.latent_embed_dim - else: - self.joint_hidden_dim = concat_embed_dim super().__post_init__() @nn.compact @@ -121,8 +108,11 @@ def __call__( x = x_layer(x) if self.condition_dim > 0: + assert condition is not None, \ + "Condition must be specified when `condition_dim > 0`." condition_layer = layers.MLPBlock( - dim=self.condition_embed_dim, + # TODO(michalk8): doesn't fail with `condition_embed_dim` + dim=self.condition_dim, out_dim=self.condition_embed_dim, num_layers=self.num_layers_per_block, act_fn=self.act_fn diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index b793a17c3..d80d8e6b8 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import functools from typing import Any, Callable, Dict, List, Optional, Tuple import jax @@ -79,8 +78,10 @@ def loss_fn( ) -> jnp.ndarray: x_t = self.flow.compute_xt(rng, t, source, target) - apply_fn = functools.partial(vf_state.apply_fn, {"params": params}) - v_t = jax.vmap(apply_fn)(t, x_t, source_conditions) + v_t = vf_state.apply_fn({"params": params}, t, x_t, source_conditions) + # TODO(michalk8): should be removed + # apply_fn = functools.partial(vf_state.apply_fn, {"params": params}) + # v_t = jax.vmap(apply_fn)(t, x_t, source_conditions) u_t = self.flow.compute_ut(t, source, target) return jnp.mean((v_t - u_t) ** 2) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 04f9917a8..c3cd11ce7 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -52,10 +52,11 @@ def lin_dl() -> DataLoader: @pytest.fixture() def lin_dl_with_conds() -> DataLoader: - n, d = 100, 2 + n, d, cond_dim = 100, 2, 3 rng = np.random.default_rng(13) - src_cond, tgt_cond = rng.normal(size=(n, 1)), rng.normal(size=(n, 1)) + src_cond = rng.normal(size=(n, cond_dim)) + tgt_cond = rng.normal(size=(n, cond_dim)) src = _ot_data(rng, n=n, dim=d, condition=src_cond) tgt = _ot_data(rng, n=n, dim=d, condition=tgt_cond) @@ -65,10 +66,13 @@ def lin_dl_with_conds() -> DataLoader: @pytest.fixture() def conditional_lin_dl() -> datasets.ConditionalLoader: + cond_dim = 4 rng = np.random.default_rng(42) - src0, tgt0 = _ot_data(rng, condition=0.0), _ot_data(rng, offset=2.0) - src1, tgt1 = _ot_data(rng, condition=1.0), _ot_data(rng, offset=-2.0) + src0 = _ot_data(rng, condition=0.0, cond_dim=cond_dim) + tgt0 = _ot_data(rng, offset=2.0) + src1 = _ot_data(rng, condition=1.0, cond_dim=cond_dim) + tgt1 = _ot_data(rng, offset=-2.0) src_ds = datasets.OTDataset(src0, tgt0) tgt_ds = datasets.OTDataset(src1, tgt1) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index c60b2e064..1c9c985a9 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -21,7 +21,6 @@ import optax from ott.geometry import costs -from ott.neural.flow_models.genot import GENOTLin, GENOTQuad from ott.neural.flow_models.models import VelocityField from ott.neural.flow_models.samplers import uniform_sampler from ott.solvers.linear import sinkhorn, sinkhorn_lr diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index df2fb4bdb..ccca15214 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -11,112 +11,50 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import functools - import pytest import jax import jax.numpy as jnp -from torch.utils.data import DataLoader import optax -from ott.neural.data import datasets -from ott.neural.flow_models import flows, models, otfm, samplers, utils +from ott.neural.flow_models import flows, models, otfm, utils class TestOTFlowMatching: - def test_fm(self, lin_dl: DataLoader): - input_dim = 2 - neural_vf = models.VelocityField( - output_dim=2, - condition_dim=0, - latent_embed_dim=5, - ) - fm = otfm.OTFlowMatching( - input_dim, - neural_vf, - flow=flows.ConstantNoiseFlow(0.0), - time_sampler=samplers.uniform_sampler, - match_fn=jax.jit(utils.match_linear), - optimizer=optax.adam(learning_rate=1e-3), - ) - - _logs = fm(lin_dl, n_iters=2) - - for batch in lin_dl: - src = jnp.asarray(batch["src_lin"]) - tgt = jnp.asarray(batch["tgt_lin"]) - break - - res_fwd = fm.transport(src) - res_bwd = fm.transport(tgt, t0=1.0, t1=0.0) - - # TODO(michalk8): better assertions - assert jnp.sum(jnp.isnan(res_fwd)) == 0 - assert jnp.sum(jnp.isnan(res_bwd)) == 0 + @pytest.mark.parametrize(("cond_dim", "dl"), [(0, "lin_dl"), + (3, "lin_dl_with_conds"), + (4, "conditional_lin_dl")]) + def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): + input_dim, output_dim, latent_dim = 2, 2, 5 + dl = request.getfixturevalue(dl) - def test_fm_with_conds(self, lin_dl_with_conds: DataLoader): - input_dim, cond_dim = 2, 1 neural_vf = models.VelocityField( - output_dim=input_dim, + output_dim=output_dim, condition_dim=cond_dim, - latent_embed_dim=5, + latent_embed_dim=latent_dim, ) fm = otfm.OTFlowMatching( - 2, neural_vf, - flow=flows.BrownianNoiseFlow(0.12), - time_sampler=functools.partial(samplers.uniform_sampler, offset=1e-5), + flows.ConstantNoiseFlow(0.0), match_fn=jax.jit(utils.match_linear), + rng=rng, optimizer=optax.adam(learning_rate=1e-3), + input_dim=input_dim, ) - _logs = fm(lin_dl_with_conds, n_iters=2) - - for batch in lin_dl_with_conds: - src = jnp.asarray(batch["src_lin"]) - tgt = jnp.asarray(batch["tgt_lin"]) - src_cond = jnp.asarray(batch["src_condition"]) - break - - res_fwd = fm.transport(src, condition=src_cond) - res_bwd = fm.transport(tgt, condition=src_cond, t0=1.0, t1=0.0) - - # TODO(michalk8): better assertions - assert jnp.sum(jnp.isnan(res_fwd)) == 0 - assert jnp.sum(jnp.isnan(res_bwd)) == 0 - - @pytest.mark.parametrize("rank", [-1, 10]) - def test_fm_conditional_loader( - self, rank: int, conditional_lin_dl: datasets.ConditionalLoader - ): - input_dim, cond_dim = 2, 0 - neural_vf = models.VelocityField( - output_dim=input_dim, - condition_dim=cond_dim, - latent_embed_dim=5, - ) - fm = otfm.OTFlowMatching( - input_dim, - neural_vf, - flow=flows.ConstantNoiseFlow(13.0), - time_sampler=samplers.uniform_sampler, - match_fn=jax.jit(functools.partial(utils.match_linear, rank=rank)), - optimizer=optax.adam(learning_rate=1e-3), - ) - - _logs = fm(conditional_lin_dl, n_iters=2) + _logs = fm(dl, n_iters=3) - for batch in conditional_lin_dl: - src = jnp.asarray(batch["src_lin"]) - tgt = jnp.asarray(batch["tgt_lin"]) - src_cond = jnp.asarray(batch["src_condition"]) - break + batch = next(iter(dl)) + src = jnp.asarray(batch["src_lin"]) + tgt = jnp.asarray(batch["tgt_lin"]) + src_cond = batch.get("src_condition") + if src_cond is not None: + src_cond = jnp.asarray(src_cond) res_fwd = fm.transport(src, condition=src_cond) - res_bwd = fm.transport(tgt, condition=src_cond, t0=1.0, t1=0.0) + res_bwd = fm.transport(tgt, t0=1.0, t1=0.0, condition=src_cond) # TODO(michalk8): better assertions assert jnp.sum(jnp.isnan(res_fwd)) == 0 From 4688998c4ce0b842d0784b74a33374a0fee47e32 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 16:41:46 +0100 Subject: [PATCH 126/186] Remove `MLPBlock` and `RescalingMLP` --- src/ott/neural/__init__.py | 2 +- src/ott/neural/duality/layers.py | 5 +- src/ott/neural/flow_models/models.py | 64 +++++--------- src/ott/neural/models/__init__.py | 14 ---- src/ott/neural/models/layers.py | 50 ----------- src/ott/neural/models/nets.py | 121 --------------------------- tests/neural/losses_test.py | 5 +- tests/neural/map_estimator_test.py | 6 +- 8 files changed, 28 insertions(+), 239 deletions(-) delete mode 100644 src/ott/neural/models/__init__.py delete mode 100644 src/ott/neural/models/layers.py delete mode 100644 src/ott/neural/models/nets.py diff --git a/src/ott/neural/__init__.py b/src/ott/neural/__init__.py index 678919a8c..10dac222c 100644 --- a/src/ott/neural/__init__.py +++ b/src/ott/neural/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import data, duality, flow_models, gaps, models +from . import data, duality, flow_models, gaps diff --git a/src/ott/neural/duality/layers.py b/src/ott/neural/duality/layers.py index e0d755d0e..6ed857452 100644 --- a/src/ott/neural/duality/layers.py +++ b/src/ott/neural/duality/layers.py @@ -79,7 +79,8 @@ def __call__(self, x: jnp.ndarray) -> jnp.ndarray: class PosDefPotentials(nn.Module): - r""":math:`\frac{1}{2} x^T (A_i A_i^T + \text{Diag}(d_i)) x + b_i^T x^2 + c_i` potentials. + r""":math:`\frac{1}{2} x^T (A_i A_i^T + \text{Diag}(d_i)) x + b_i^T x^2 + c_i` + potentials. This class implements a layer that takes (batched) ``d``-dimensional vectors ``x`` in, to output a ``num_potentials``-dimensional vector. Each of the @@ -111,7 +112,7 @@ class PosDefPotentials(nn.Module): bias_init: Initializer for the bias. The default is :func:`~flax.linen.initializers.zeros`. precision: Numerical precision of the computation. - """ # noqa: E501 + """ # noqa: D205,E501 num_potentials: int rank: int = 0 diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index 26cc31915..d0a07d66e 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -21,7 +21,6 @@ from flax.training import train_state import ott.neural.flow_models.layers as flow_layers -from ott.neural.models import layers __all__ = ["VelocityField"] @@ -37,7 +36,7 @@ class VelocityField(nn.Module): from :math:`t=t_0` to :math:`t=t_1`. Each of the input, condition, and time embeddings are passed through a block - consisting of ``num_layers_per_block`` layers of dimension + consisting of ``num_layers`` layers of dimension ``latent_embed_dim``, ``condition_embed_dim``, and ``time_embed_dim``, respectively. The output of each block is concatenated and passed through a final block of dimension ``joint_hidden_dim``. @@ -50,7 +49,7 @@ class VelocityField(nn.Module): If :obj:`None`, set to ``latent_embed_dim``. t_embed_dim: Dimensionality of the time embedding. If :obj:`None`, set to ``latent_embed_dim``. - num_layers_per_block: Number of layers per block. + num_layers: Number of layers. act_fn: Activation function. n_freqs: Number of frequencies to use for the time embedding. """ @@ -59,7 +58,7 @@ class VelocityField(nn.Module): condition_dim: int = 0 condition_embed_dim: Optional[int] = None t_embed_dim: Optional[int] = None - num_layers_per_block: int = 3 + num_layers: int = 3 act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu n_freqs: int = 128 @@ -83,52 +82,27 @@ def __call__( """Forward pass through the neural vector field. Args: - t: Time of shape `(batch_size, 1)`. - x: Data of shape `(batch_size, output_dim)`. - condition: Conditioning vector. + t: Time of shape ``[batch, 1]``. + x: Data of shape ``[batch, ...]``. + condition: Conditioning vector of shape ``[batch, cond_dim]``. Returns: - Output of the neural vector field. + Output of the neural vector field of shape ``[batch, output_dim]``. """ t = flow_layers.CyclicalTimeEncoder(self.n_freqs)(t) - t_layer = layers.MLPBlock( - dim=self.t_embed_dim, - out_dim=self.t_embed_dim, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn - ) - t = t_layer(t) - x_layer = layers.MLPBlock( - dim=self.latent_embed_dim, - out_dim=self.latent_embed_dim, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn - ) - x = x_layer(x) - - if self.condition_dim > 0: - assert condition is not None, \ - "Condition must be specified when `condition_dim > 0`." - condition_layer = layers.MLPBlock( - # TODO(michalk8): doesn't fail with `condition_embed_dim` - dim=self.condition_dim, - out_dim=self.condition_embed_dim, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn - ) - condition = condition_layer(condition) - concatenated = jnp.concatenate([t, x, condition], axis=-1) - else: - concatenated = jnp.concatenate([t, x], axis=-1) - - out_layer = layers.MLPBlock( - dim=self.joint_hidden_dim, - out_dim=self.joint_hidden_dim, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn - ) - out = out_layer(concatenated) + for _ in range(self.num_layers): + t = self.act_fn(nn.Dense(self.t_embed_dim)(t)) + x = self.act_fn(nn.Dense(self.latent_embed_dim)(x)) + if self.condition_dim > 0: + assert condition is not None, "TODO." + condition = self.act_fn(nn.Dense(self.condition_embed_dim)(condition)) + + arrs = [t, x] + ([] if condition is None else [condition]) + out = jnp.concatenate(arrs, axis=-1) + + for _ in range(self.num_layers): + out = self.act_fn(nn.Dense(self.joint_hidden_dim)(out)) return nn.Dense(self.output_dim, use_bias=True)(out) def create_train_state( diff --git a/src/ott/neural/models/__init__.py b/src/ott/neural/models/__init__.py deleted file mode 100644 index 83287aec5..000000000 --- a/src/ott/neural/models/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from . import layers, nets diff --git a/src/ott/neural/models/layers.py b/src/ott/neural/models/layers.py deleted file mode 100644 index d0352ff05..000000000 --- a/src/ott/neural/models/layers.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Any - -import jax.numpy as jnp - -import flax.linen as nn - -__all__ = ["MLPBlock"] - - -class MLPBlock(nn.Module): - """An MLP block. - - Args: - dim: Dimensionality of the input data. - num_layers: Number of layers in the MLP block. - act_fn: Activation function. - out_dim: Dimensionality of the output data. - """ - dim: int = 128 - num_layers: int = 3 - act_fn: Any = nn.silu - out_dim: int = 128 - - @nn.compact - def __call__(self, x: jnp.ndarray) -> jnp.ndarray: - """Apply the MLP block. - - Args: - x: Input data of shape (batch_size, dim). - - Returns: - Output data of shape (batch_size, out_dim). - """ - for _ in range(self.num_layers): - x = nn.Dense(self.dim)(x) - x = self.act_fn(x) - return nn.Dense(self.out_dim)(x) diff --git a/src/ott/neural/models/nets.py b/src/ott/neural/models/nets.py deleted file mode 100644 index cad4e84c2..000000000 --- a/src/ott/neural/models/nets.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Callable, Optional - -import jax -import jax.numpy as jnp - -import flax.linen as nn -import optax -from flax.training import train_state - -from ott.neural.models import layers - -__all__ = ["RescalingMLP"] - - -class RescalingMLP(nn.Module): - """Network to learn distributional rescaling factors based on a MLP. - - The input is passed through a block consisting of ``num_layers_per_block`` - with size ``hidden_dim``. - If ``condition_dim`` is greater than 0, the conditioning vector is passed - through a block of the same size. - Both outputs are concatenated and passed through another block of the same - size. - - To ensure non-negativity of the output, the output is exponentiated. - - Args: - hidden_dim: Dimensionality of the hidden layers. - condition_dim: Dimensionality of the conditioning vector. - num_layers_per_block: Number of layers per block. - act_fn: Activation function. - - Returns: - Non-negative escaling factors. - """ - hidden_dim: int - condition_dim: int = 0 - num_layers_per_block: int = 3 - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.selu - - @nn.compact - def __call__( - self, - x: jnp.ndarray, - condition: Optional[jnp.ndarray] = None - ) -> jnp.ndarray: - """Forward pass through the rescaling network. - - Args: - x: Data of shape ``[n, ...]``. - condition: Condition of shape ``[n, condition_dim]``. - - Returns: - Estimated rescaling factors. - """ - x_layer = layers.MLPBlock( - dim=self.hidden_dim, - out_dim=self.hidden_dim, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn - ) - x = x_layer(x) - - if self.condition_dim > 0: - condition_layer = layers.MLPBlock( - dim=self.hidden_dim, - out_dim=self.hidden_dim, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn - ) - - condition = condition_layer(condition) - concatenated = jnp.concatenate((x, condition), axis=-1) - else: - concatenated = x - - out_layer = layers.MLPBlock( - dim=self.hidden_dim, - out_dim=1, - num_layers=self.num_layers_per_block, - act_fn=self.act_fn - ) - - out = out_layer(concatenated) - return jnp.exp(out) - - def create_train_state( - self, - rng: jax.Array, - optimizer: optax.OptState, - input_dim: int, - ) -> train_state.TrainState: - """Create the training state. - - Args: - rng: Random number generator. - optimizer: Optimizer. - input_dim: Dimensionality of the input. - - Returns: - Training state. - """ - params = self.init( - rng, jnp.ones((1, input_dim)), jnp.ones((1, self.condition_dim)) - )["params"] - return train_state.TrainState.create( - apply_fn=self.apply, params=params, tx=optimizer - ) diff --git a/tests/neural/losses_test.py b/tests/neural/losses_test.py index e26d8227b..e1e13f193 100644 --- a/tests/neural/losses_test.py +++ b/tests/neural/losses_test.py @@ -11,15 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import pytest import jax import numpy as np from ott.geometry import costs +from ott.neural.duality import models from ott.neural.gaps import monge_gap -from ott.neural.models import nets @pytest.mark.fast() @@ -35,7 +34,7 @@ def test_monge_gap_non_negativity( rng1, rng2 = jax.random.split(rng, 2) reference_points = jax.random.normal(rng1, (n_samples, n_features)) - model = nets.MLP(dim_hidden=[8, 8], is_potential=False) + model = models.PotentialMLP(dim_hidden=[8, 8], is_potential=False) params = model.init(rng2, x=reference_points[0]) target = model.apply(params, reference_points) diff --git a/tests/neural/map_estimator_test.py b/tests/neural/map_estimator_test.py index 399dff39d..cee66e40e 100644 --- a/tests/neural/map_estimator_test.py +++ b/tests/neural/map_estimator_test.py @@ -19,8 +19,8 @@ from ott import datasets from ott.geometry import pointcloud +from ott.neural.duality import models from ott.neural.gaps import map_estimator, monge_gap -from ott.neural.models import nets from ott.tools import sinkhorn_divergence @@ -44,14 +44,14 @@ def fitting_loss( x=samples, y=mapped_samples, ).divergence - return (div, None) + return div, None def regularizer(x, y): gap, out = monge_gap.monge_gap_from_samples(x, y, return_output=True) return gap, out.n_iters # define the model - model = nets.MLP(dim_hidden=[16, 8], is_potential=False) + model = models.PotentialMLP(dim_hidden=[16, 8], is_potential=False) # generate data train_dataset, valid_dataset, dim_data = ( From 52c5de985f4cd02c2536a35a27e0f5ab3773f4ad Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 16:43:02 +0100 Subject: [PATCH 127/186] Add forgotten license --- src/ott/neural/flow_models/utils.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index 9b8386ea9..21f91b350 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -1,3 +1,16 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. from typing import Any, Literal, Optional, Tuple, Union import jax From 0b417d76299aeeff18636b9d60deb07238418906 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:13:58 +0100 Subject: [PATCH 128/186] Remove `__post_init__` from `VF` --- src/ott/neural/flow_models/models.py | 61 ++++++++++++---------------- tests/neural/genot_test.py | 12 +++--- tests/neural/otfm_test.py | 11 ++--- 3 files changed, 39 insertions(+), 45 deletions(-) diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index d0a07d66e..ff3183016 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -37,41 +37,29 @@ class VelocityField(nn.Module): Each of the input, condition, and time embeddings are passed through a block consisting of ``num_layers`` layers of dimension - ``latent_embed_dim``, ``condition_embed_dim``, and ``time_embed_dim``, + ``hidden_dim``, ``condition_dim``, and ``time_embed_dim``, respectively. The output of each block is concatenated and passed through a final block of dimension ``joint_hidden_dim``. Args: output_dim: Dimensionality of the neural vector field. - latent_embed_dim: Dimensionality of the embedding of the data. - condition_dim: Dimensionality of the conditioning vector. - condition_embed_dim: Dimensionality of the embedding of the condition. - If :obj:`None`, set to ``latent_embed_dim``. - t_embed_dim: Dimensionality of the time embedding. - If :obj:`None`, set to ``latent_embed_dim``. + hidden_dim: Dimensionality of the embedding of the data. num_layers: Number of layers. + condition_dim: Dimensionality of the embedding of the condition. + If :obj:`None`, TODO. + time_dim: Dimensionality of the time embedding. + If :obj:`None`, set to ``hidden_dim``. act_fn: Activation function. n_freqs: Number of frequencies to use for the time embedding. """ output_dim: int - latent_embed_dim: int - condition_dim: int = 0 - condition_embed_dim: Optional[int] = None - t_embed_dim: Optional[int] = None + hidden_dim: int num_layers: int = 3 + condition_dim: Optional[int] = None + time_dim: Optional[int] = None act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu n_freqs: int = 128 - def __post_init__(self) -> None: - if self.condition_embed_dim is None: - self.condition_embed_dim = self.latent_embed_dim - if self.t_embed_dim is None: - self.t_embed_dim = self.latent_embed_dim - self.joint_hidden_dim = ( - self.latent_embed_dim + self.condition_embed_dim + self.t_embed_dim - ) - super().__post_init__() - @nn.compact def __call__( self, @@ -84,32 +72,36 @@ def __call__( Args: t: Time of shape ``[batch, 1]``. x: Data of shape ``[batch, ...]``. - condition: Conditioning vector of shape ``[batch, cond_dim]``. + condition: Conditioning vector of shape ``[batch, ...]``. Returns: Output of the neural vector field of shape ``[batch, output_dim]``. """ + time_dim = self.hidden_dim if self.time_dim is None else self.time_dim t = flow_layers.CyclicalTimeEncoder(self.n_freqs)(t) for _ in range(self.num_layers): - t = self.act_fn(nn.Dense(self.t_embed_dim)(t)) - x = self.act_fn(nn.Dense(self.latent_embed_dim)(x)) - if self.condition_dim > 0: + t = self.act_fn(nn.Dense(time_dim)(t)) + x = self.act_fn(nn.Dense(self.hidden_dim)(x)) + if self.condition_dim is not None: assert condition is not None, "TODO." - condition = self.act_fn(nn.Dense(self.condition_embed_dim)(condition)) + condition = self.act_fn(nn.Dense(self.condition_dim)(condition)) - arrs = [t, x] + ([] if condition is None else [condition]) - out = jnp.concatenate(arrs, axis=-1) + feats = [t, x] + ([] if condition is None else [condition]) + feats = jnp.concatenate(feats, axis=-1) + joint_dim = feats.shape[-1] for _ in range(self.num_layers): - out = self.act_fn(nn.Dense(self.joint_hidden_dim)(out)) - return nn.Dense(self.output_dim, use_bias=True)(out) + feats = self.act_fn(nn.Dense(joint_dim)(feats)) + + return nn.Dense(self.output_dim, use_bias=True)(feats) def create_train_state( self, rng: jax.Array, optimizer: optax.OptState, input_dim: int, + cond_dim: Optional[int] = None, ) -> train_state.TrainState: """Create the training state. @@ -117,14 +109,15 @@ def create_train_state( rng: Random number generator. optimizer: Optimizer. input_dim: Dimensionality of the input. + cond_dim: TODO. Returns: The training state. """ - params = self.init( - rng, jnp.ones((1, 1)), jnp.ones((1, input_dim)), - jnp.ones((1, self.condition_dim)) - )["params"] + t, x = jnp.ones((1, 1)), jnp.ones((1, input_dim)) + cond = jnp.ones((1, cond_dim)) if self.condition_dim is not None else None + + params = self.init(rng, t, x, cond)["params"] return train_state.TrainState.create( apply_fn=self.apply, params=params, tx=optimizer ) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 1c9c985a9..50e7dd504 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -60,7 +60,7 @@ def test_genot_linear_unconditional( neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, - latent_embed_dim=5, + hidden_dim=5, ) ot_solver = sinkhorn.Sinkhorn( ) if solver == "sinkhorn" else sinkhorn_lr.LRSinkhorn(rank=3) @@ -121,7 +121,7 @@ def test_genot_linear_conditional( neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, - latent_embed_dim=5, + hidden_dim=5, ) ot_solver = sinkhorn.Sinkhorn( ) if solver == "sinkhorn" else sinkhorn_lr.LRSinkhorn(rank=3) @@ -182,7 +182,7 @@ def test_genot_quad_unconditional( neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, - latent_embed_dim=5, + hidden_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein( epsilon=1e-2 @@ -243,7 +243,7 @@ def test_genot_fused_unconditional( neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, - latent_embed_dim=5, + hidden_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein( epsilon=1e-2 @@ -306,7 +306,7 @@ def test_genot_quad_conditional( neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, - latent_embed_dim=5, + hidden_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein( epsilon=1e-2 @@ -372,7 +372,7 @@ def test_genot_fused_conditional( neural_vf = VelocityField( output_dim=target_dim, condition_dim=source_dim + condition_dim, - latent_embed_dim=5, + hidden_dim=5, ) ot_solver = gromov_wasserstein.GromovWasserstein( epsilon=1e-2 diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index ccca15214..60044293d 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -27,13 +27,13 @@ class TestOTFlowMatching: (3, "lin_dl_with_conds"), (4, "conditional_lin_dl")]) def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): - input_dim, output_dim, latent_dim = 2, 2, 5 + output_dim, hidden_dim = 2, 5 dl = request.getfixturevalue(dl) neural_vf = models.VelocityField( - output_dim=output_dim, - condition_dim=cond_dim, - latent_embed_dim=latent_dim, + output_dim, + hidden_dim, + condition_dim=hidden_dim if cond_dim > 0 else None, ) fm = otfm.OTFlowMatching( neural_vf, @@ -41,7 +41,8 @@ def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): match_fn=jax.jit(utils.match_linear), rng=rng, optimizer=optax.adam(learning_rate=1e-3), - input_dim=input_dim, + input_dim=2, # all dataloaders have dim `2` + cond_dim=cond_dim, ) _logs = fm(dl, n_iters=3) From fe74a57f0a63cb5c3f0dbf9a01e7e49c5d0419de Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:23:54 +0100 Subject: [PATCH 129/186] Move cyclical time encoder --- src/ott/neural/flow_models/__init__.py | 2 +- src/ott/neural/flow_models/layers.py | 44 -------------------------- src/ott/neural/flow_models/models.py | 13 ++++---- src/ott/neural/flow_models/utils.py | 19 +++++++++++ tests/neural/otfm_test.py | 2 +- 5 files changed, 28 insertions(+), 52 deletions(-) delete mode 100644 src/ott/neural/flow_models/layers.py diff --git a/src/ott/neural/flow_models/__init__.py b/src/ott/neural/flow_models/__init__.py index cc2c4bfdb..2d6fca4b5 100644 --- a/src/ott/neural/flow_models/__init__.py +++ b/src/ott/neural/flow_models/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import flows, genot, layers, models, otfm, samplers +from . import flows, genot, models, otfm, samplers diff --git a/src/ott/neural/flow_models/layers.py b/src/ott/neural/flow_models/layers.py deleted file mode 100644 index 2f87f6cfc..000000000 --- a/src/ott/neural/flow_models/layers.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import jax.numpy as jnp - -import flax.linen as nn - -__all__ = ["CyclicalTimeEncoder"] - - -class CyclicalTimeEncoder(nn.Module): - r"""A cyclical time encoder. - - Encodes time :math:`t` as :math:`cos(\hat{t})` and :math:`sin(\hat{t})` - where :math:`\hat{t} = [2\pi t, 2\pi 2 t,\dots, 2\pi n_f t]`. - - Args: - n_freqs: Frequency :math:`n_f` of the cyclical encoding. - """ - n_freqs: int = 128 - - @nn.compact - def __call__(self, t: jnp.ndarray) -> jnp.ndarray: # noqa: D102 - """Encode time :math:`t` into a cyclical representation. - - Args: - t: Time of shape ``[n, 1]``. - - Returns: - Encoded time of shape ``[n, 2 * n_freqs]``. - """ - freq = 2 * jnp.arange(self.n_freqs) * jnp.pi - t = freq * t - return jnp.concatenate([jnp.cos(t), jnp.sin(t)], axis=-1) diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index ff3183016..5590113e8 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -20,7 +20,7 @@ import optax from flax.training import train_state -import ott.neural.flow_models.layers as flow_layers +from ott.neural.flow_models import utils __all__ = ["VelocityField"] @@ -42,23 +42,24 @@ class VelocityField(nn.Module): a final block of dimension ``joint_hidden_dim``. Args: - output_dim: Dimensionality of the neural vector field. hidden_dim: Dimensionality of the embedding of the data. + output_dim: Dimensionality of the neural vector field. num_layers: Number of layers. condition_dim: Dimensionality of the embedding of the condition. If :obj:`None`, TODO. time_dim: Dimensionality of the time embedding. If :obj:`None`, set to ``hidden_dim``. + time_encoder: TODO. act_fn: Activation function. - n_freqs: Number of frequencies to use for the time embedding. """ - output_dim: int hidden_dim: int + output_dim: int num_layers: int = 3 condition_dim: Optional[int] = None time_dim: Optional[int] = None + time_encoder: Callable[[jnp.ndarray], + jnp.ndarray] = utils.cyclical_time_encoder act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu - n_freqs: int = 128 @nn.compact def __call__( @@ -78,8 +79,8 @@ def __call__( Output of the neural vector field of shape ``[batch, output_dim]``. """ time_dim = self.hidden_dim if self.time_dim is None else self.time_dim - t = flow_layers.CyclicalTimeEncoder(self.n_freqs)(t) + t = self.time_encoder(t) for _ in range(self.num_layers): t = self.act_fn(nn.Dense(time_dim)(t)) x = self.act_fn(nn.Dense(self.hidden_dim)(x)) diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index 21f91b350..d385edb2e 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -24,6 +24,7 @@ "match_quadratic", "sample_joint", "sample_conditional", + "cyclical_time_encoder", ] ScaleCost_t = Union[float, Literal["mean", "max_cost", "median"]] @@ -106,3 +107,21 @@ def sample_conditional( src_ixs = jnp.repeat(indices[:, None], k, axis=1) # (n, k) return src_ixs, tgt_ixs + + +def cyclical_time_encoder(t: jnp.ndarray, n_freqs: int = 128) -> jnp.ndarray: + r"""Encode time :math:`t` into a cyclical representation. + + Time :math:`t` is encoded as :math:`cos(\hat{t})` and :math:`sin(\hat{t})` + where :math:`\hat{t} = [2\pi t, 2\pi 2 t,\dots, 2\pi n_f t]`. + + Args: + t: Time of shape ``[n, 1]``. + n_freqs: Frequency :math:`n_f` of the cyclical encoding. + + Returns: + Encoded time of shape ``[n, 2 * n_freqs]``. + """ + freq = 2 * jnp.arange(n_freqs) * jnp.pi + t = freq * t + return jnp.concatenate([jnp.cos(t), jnp.sin(t)], axis=-1) diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 60044293d..30e38dba6 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -31,8 +31,8 @@ def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): dl = request.getfixturevalue(dl) neural_vf = models.VelocityField( - output_dim, hidden_dim, + output_dim, condition_dim=hidden_dim if cond_dim > 0 else None, ) fm = otfm.OTFlowMatching( From 4affc14375518b4f064c5e2b067373e7c0eac14a Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:27:58 +0100 Subject: [PATCH 130/186] Move more stuff to `utils` --- src/ott/neural/flow_models/__init__.py | 2 +- src/ott/neural/flow_models/otfm.py | 8 ++-- src/ott/neural/flow_models/samplers.py | 52 -------------------------- src/ott/neural/flow_models/utils.py | 34 +++++++++++++++++ tests/neural/genot_test.py | 2 +- 5 files changed, 40 insertions(+), 58 deletions(-) diff --git a/src/ott/neural/flow_models/__init__.py b/src/ott/neural/flow_models/__init__.py index 2d6fca4b5..a6239fa06 100644 --- a/src/ott/neural/flow_models/__init__.py +++ b/src/ott/neural/flow_models/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import flows, genot, models, otfm, samplers +from . import flows, genot, models, otfm, utils diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index d80d8e6b8..d436fd9a0 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -21,8 +21,8 @@ from flax.training import train_state from ott import utils -from ott.neural.flow_models import flows, models, samplers -from ott.neural.flow_models.utils import sample_joint +from ott.neural.flow_models import flows, models +from ott.neural.flow_models import utils as flow_utils __all__ = ["OTFlowMatching"] @@ -47,7 +47,7 @@ def __init__( velocity_field: models.VelocityField, flow: flows.BaseFlow, time_sampler: Callable[[jax.Array, int], - jnp.ndarray] = samplers.uniform_sampler, + jnp.ndarray] = flow_utils.uniform_sampler, match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], jnp.ndarray]] = None, **kwargs: Any, @@ -116,7 +116,7 @@ def __call__( # noqa: D102 if self.match_fn is not None: tmat = self.match_fn(src, tgt) - src_ixs, tgt_ixs = sample_joint(rng_resample, tmat) + src_ixs, tgt_ixs = flow_utils.sample_joint(rng_resample, tmat) src, tgt = src[src_ixs], tgt[tgt_ixs] src_cond = None if src_cond is None else src_cond[src_ixs] diff --git a/src/ott/neural/flow_models/samplers.py b/src/ott/neural/flow_models/samplers.py index 9bd85d8b0..e69de29bb 100644 --- a/src/ott/neural/flow_models/samplers.py +++ b/src/ott/neural/flow_models/samplers.py @@ -1,52 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Optional - -import jax -import jax.numpy as jnp - -__all__ = ["uniform_sampler"] - - -def uniform_sampler( - rng: jax.Array, - num_samples: int, - low: float = 0.0, - high: float = 1.0, - offset: Optional[float] = None -) -> jnp.ndarray: - r"""Sample from a uniform distribution. - - Sample :math:`t` from a uniform distribution :math:`[low, high]`. - If `offset` is not :obj:`None`, one element :math:`t` is sampled from - :math:`[low, high]` and the K samples are constructed via - :math:`(t + k)/K \mod (high - low - offset) + low`. - - Args: - rng: Random number generator. - num_samples: Number of samples to generate. - low: Lower bound of the uniform distribution. - high: Upper bound of the uniform distribution. - offset: Offset of the uniform distribution. If :obj:`None`, no offset is - used. - - Returns: - An array with `num_samples` samples of the time :math:`t`. - """ - if offset is None: - return jax.random.uniform(rng, (num_samples, 1), minval=low, maxval=high) - - t = jax.random.uniform(rng, (1, 1), minval=low, maxval=high) - mod_term = ((high - low) - offset) - return (t + jnp.arange(num_samples)[:, None] / num_samples) % mod_term diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index d385edb2e..516342ed6 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -25,6 +25,7 @@ "sample_joint", "sample_conditional", "cyclical_time_encoder", + "uniform_sampler", ] ScaleCost_t = Union[float, Literal["mean", "max_cost", "median"]] @@ -125,3 +126,36 @@ def cyclical_time_encoder(t: jnp.ndarray, n_freqs: int = 128) -> jnp.ndarray: freq = 2 * jnp.arange(n_freqs) * jnp.pi t = freq * t return jnp.concatenate([jnp.cos(t), jnp.sin(t)], axis=-1) + + +def uniform_sampler( + rng: jax.Array, + num_samples: int, + low: float = 0.0, + high: float = 1.0, + offset: Optional[float] = None +) -> jnp.ndarray: + r"""Sample from a uniform distribution. + + Sample :math:`t` from a uniform distribution :math:`[low, high]`. + If `offset` is not :obj:`None`, one element :math:`t` is sampled from + :math:`[low, high]` and the K samples are constructed via + :math:`(t + k)/K \mod (high - low - offset) + low`. + + Args: + rng: Random number generator. + num_samples: Number of samples to generate. + low: Lower bound of the uniform distribution. + high: Upper bound of the uniform distribution. + offset: Offset of the uniform distribution. If :obj:`None`, no offset is + used. + + Returns: + An array with `num_samples` samples of the time :math:`t`. + """ + if offset is None: + return jax.random.uniform(rng, (num_samples, 1), minval=low, maxval=high) + + t = jax.random.uniform(rng, (1, 1), minval=low, maxval=high) + mod_term = ((high - low) - offset) + return (t + jnp.arange(num_samples)[:, None] / num_samples) % mod_term diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 50e7dd504..abd8aad94 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -22,7 +22,7 @@ from ott.geometry import costs from ott.neural.flow_models.models import VelocityField -from ott.neural.flow_models.samplers import uniform_sampler +from ott.neural.flow_models.utils import uniform_sampler from ott.solvers.linear import sinkhorn, sinkhorn_lr from ott.solvers.quadratic import gromov_wasserstein, gromov_wasserstein_lr From 21ce5230646a0794e7918e1bd86e762906a5e8c8 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:29:08 +0100 Subject: [PATCH 131/186] Remove `samplers.py` --- src/ott/neural/flow_models/samplers.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 src/ott/neural/flow_models/samplers.py diff --git a/src/ott/neural/flow_models/samplers.py b/src/ott/neural/flow_models/samplers.py deleted file mode 100644 index e69de29bb..000000000 From aa636ef11d7fd80104d26aab29449347fb064167 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:39:54 +0100 Subject: [PATCH 132/186] Rename `cond_dim` -> `condition_dim` --- src/ott/neural/flow_models/genot.py | 2 +- src/ott/neural/flow_models/models.py | 8 ++++---- tests/neural/otfm_test.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 2ac511611..b27360d1c 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -49,7 +49,7 @@ def __init__( velocity_field: models.VelocityField, flow: flows.BaseFlow, time_sampler: Callable[[jax.Array, int], jnp.ndarray], - # TODO(mcihalk8): all args are optional + # TODO(michalk8): all args are optional data_match_fn: Callable[ [jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray], jnp.ndarray], latent_match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index 5590113e8..a3b3261b0 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -28,7 +28,7 @@ class VelocityField(nn.Module): r"""Parameterized neural vector field. - The `VelocityField` learns a map :math:`v: \mathbb{R}\times \mathbb{R}^d + This class learns a map :math:`v: \mathbb{R}\times \mathbb{R}^d \rightarrow \mathbb{R}^d` solving the ODE :math:`\frac{dx}{dt} = v(t, x)`. Given a source distribution at time :math:`t_0`, the velocity field can be used to transport the source distribution given at :math:`t_0` to @@ -102,7 +102,7 @@ def create_train_state( rng: jax.Array, optimizer: optax.OptState, input_dim: int, - cond_dim: Optional[int] = None, + condition_dim: Optional[int] = None, ) -> train_state.TrainState: """Create the training state. @@ -110,13 +110,13 @@ def create_train_state( rng: Random number generator. optimizer: Optimizer. input_dim: Dimensionality of the input. - cond_dim: TODO. + condition_dim: TODO. Returns: The training state. """ t, x = jnp.ones((1, 1)), jnp.ones((1, input_dim)) - cond = jnp.ones((1, cond_dim)) if self.condition_dim is not None else None + cond = None if self.condition_dim is None else jnp.ones((1, condition_dim)) params = self.init(rng, t, x, cond)["params"] return train_state.TrainState.create( diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 30e38dba6..a9b799d4a 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -42,7 +42,7 @@ def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): rng=rng, optimizer=optax.adam(learning_rate=1e-3), input_dim=2, # all dataloaders have dim `2` - cond_dim=cond_dim, + condition_dim=cond_dim, ) _logs = fm(dl, n_iters=3) From da0ef92c3a626068f6db3158af533ea82d8413c8 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:56:08 +0100 Subject: [PATCH 133/186] Nicer formatting --- src/ott/neural/flow_models/flows.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/ott/neural/flow_models/flows.py b/src/ott/neural/flow_models/flows.py index 150e2086e..2cde34833 100644 --- a/src/ott/neural/flow_models/flows.py +++ b/src/ott/neural/flow_models/flows.py @@ -44,9 +44,9 @@ def compute_mu_t( at time :math:`t`. Args: - t: Time :math:`t` of shape `(batch_size, 1)`. - src: Sample from the source distribution of shape `(batch_size, ...)`. - tgt: Sample from the target distribution of shape `(batch_size, ...)`. + t: Time :math:`t` of shape ``[batch, 1]``. + src: Sample from the source distribution of shape ``[batch, ...]``. + tgt: Sample from the target distribution of shape ``[batch, ...]``. """ @abc.abstractmethod @@ -54,7 +54,7 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: """Compute the standard deviation of the probability path at time :math:`t`. Args: - t: Time :math:`t` of shape `(batch_size, 1)`. + t: Time :math:`t` of shape ``[batch, 1]``. Returns: Standard deviation of the probability path at time :math:`t`. @@ -70,9 +70,9 @@ def compute_ut( :math:`x_1` at time :math:`t`. Args: - t: Time :math:`t` of shape `(batch_size, 1)`. - src: Sample from the source distribution of shape `(batch_size, ...)`. - tgt: Sample from the target distribution of shape `(batch_size, ...)`. + t: Time :math:`t` of shape ``[batch, 1]``. + src: Sample from the source distribution of shape ``[batch, ...]``. + tgt: Sample from the target distribution of shape ``[batch, ...]``. Returns: Conditional vector field evaluated at time :math:`t`. @@ -88,9 +88,9 @@ def compute_xt( Args: rng: Random number generator. - t: Time :math:`t` of shape `(batch_size, 1)`. - src: Sample from the source distribution of shape `(batch_size, ...)`. - tgt: Sample from the target distribution of shape `(batch_size, ...)`. + t: Time :math:`t` of shape ``[batch, 1]``. + src: Sample from the source distribution of shape ``[batch, ...]``. + tgt: Sample from the target distribution of shape ``[batch, ...]``. Returns: Samples from the probability path between :math:`x_0` and :math:`x_1` @@ -132,7 +132,7 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: r"""Compute noise of the flow at time :math:`t`. Args: - t: Time :math:`t` of shape `(batch_size, 1)`. + t: Time :math:`t` of shape ``[batch, 1]``. Returns: Constant, time-independent standard deviation :math:`\sigma`. @@ -155,7 +155,7 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: r"""Compute noise of the flow at time :math:`t`. Args: - t: Time :math:`t` of shape `(batch_size, 1)`. + t: Time :math:`t` of shape ``[batch, 1]``. Returns: Samples from the probability path between :math:`x_0` and :math:`x_1` From de1c2646e8033094f9dc05a2335ff16ecba8b824 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:57:19 +0100 Subject: [PATCH 134/186] Fix bug when sampling from the target --- src/ott/neural/data/datasets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index eca6f1e51..fb1bc345b 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -89,7 +89,7 @@ def _sample_from_target(self, src_ix: int) -> Item_t: src_cond = self.src_conditions[src_ix] tgt_ixs = self._tgt_cond_to_ix[src_cond] ix = self._rng.choice(tgt_ixs) - return self.src_data[ix] + return self.tgt_data[ix] def __getitem__(self, ix: int) -> Item_t: src = self.src_data[ix] From ce763f043befb5457c1975c3db0c03ec3b93977e Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:57:42 +0100 Subject: [PATCH 135/186] Fix another bug when sampling from the data --- src/ott/neural/data/datasets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index fb1bc345b..63215e61e 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -95,7 +95,7 @@ def __getitem__(self, ix: int) -> Item_t: src = self.src_data[ix] src = {f"{self.SRC_PREFIX}_{k}": v for k, v in src.items()} - tgt = self.src_data[ix] if self.is_aligned else self._sample_from_target(ix) + tgt = self.tgt_data[ix] if self.is_aligned else self._sample_from_target(ix) tgt = {f"{self.TGT_PREFIX}_{k}": v for k, v in tgt.items()} return {**src, **tgt} From f9db2db98332f82fbd948e6ae1afd30e7ad5be19 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 19:04:25 +0100 Subject: [PATCH 136/186] Add initial test for GW --- src/ott/neural/flow_models/genot.py | 32 ++++++++---- src/ott/neural/flow_models/otfm.py | 10 ++-- src/ott/neural/flow_models/utils.py | 13 +++++ tests/neural/conftest.py | 80 +++++++++++++---------------- tests/neural/genot_test.py | 56 ++++++++++++++++++-- tests/neural/otfm_test.py | 8 +-- 6 files changed, 130 insertions(+), 69 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index b27360d1c..34a207b50 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -34,10 +34,10 @@ class GENOT: Args: velocity_field: Neural vector field parameterized by a neural network. flow: Flow between latent distribution and target distribution. - time_sampler: Sampler for the time. - of an input sample, see algorithm TODO. data_match_fn: Linear OT solver to match the latent distribution with the conditional distribution. + time_sampler: Sampler for the time. + of an input sample, see algorithm TODO. latent_match_fn: TODO. latent_noise_fn: TODO. k_samples_per_x: Number of samples drawn from the conditional distribution @@ -48,23 +48,28 @@ def __init__( self, velocity_field: models.VelocityField, flow: flows.BaseFlow, - time_sampler: Callable[[jax.Array, int], jnp.ndarray], - # TODO(michalk8): all args are optional data_match_fn: Callable[ [jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray], jnp.ndarray], + time_sampler: Callable[[jax.Array, int], + jnp.ndarray] = flow_utils.uniform_sampler, latent_match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], jnp.ndarray]] = None, - # TODO(michalk8): add a default for this? latent_noise_fn: Optional[Callable[[jax.Array, Tuple[int, ...]], jnp.ndarray]] = None, + # TODO(michalk8): rename, too descriptive k_samples_per_x: int = 1, **kwargs: Any, ): self.vf = velocity_field self.flow = flow - self.time_sampler = time_sampler self.data_match_fn = data_match_fn + self.time_sampler = time_sampler self.latent_match_fn = latent_match_fn + if latent_noise_fn is None: + dim = kwargs["input_dim"] + latent_noise_fn = functools.partial( + flow_utils.multivariate_normal, dim=dim + ) self.latent_noise_fn = latent_noise_fn self.k_samples_per_x = k_samples_per_x @@ -90,13 +95,14 @@ def loss_fn( source_conditions: Optional[jnp.ndarray], rng: jax.Array ): x_t = self.flow.compute_xt(rng, time, latent, target) - apply_fn = functools.partial(vf_state.apply_fn, {"params": params}) + cond = ( + source if source_conditions is None else + jnp.concatenate([source, source_conditions], axis=-1) + ) - cond_input = jnp.concatenate([ - source, source_conditions - ], axis=1) if source_conditions is not None else source - v_t = jax.vmap(apply_fn)(t=time, x=x_t, condition=cond_input) + v_t = vf_state.apply_fn({"params": params}, time, x_t, cond) u_t = self.flow.compute_ut(time, latent, target) + return jnp.mean((v_t - u_t) ** 2) grad_fn = jax.value_and_grad(loss_fn, has_aux=False) @@ -104,6 +110,7 @@ def loss_fn( vf_state.params, time, source, target, latent, source_conditions, rng ) + # TODO(michalk8): follow the convention with loss being first return vf_state.apply_gradients(grads=grads), loss return step_fn @@ -174,7 +181,10 @@ def prepare_data( self.vf_state, loss = self.step_fn( rng_step_fn, self.vf_state, time, src, tgt, latent, src_cond ) + training_logs["loss"].append(float(loss)) + if len(training_logs["loss"]) >= n_iters: + break return training_logs diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index d436fd9a0..a8b85a44e 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -35,8 +35,8 @@ class OTFlowMatching: Args: velocity_field: Neural vector field parameterized by a neural network. flow: Flow between source and target distribution. - time_sampler: Sampler for the time. match_fn: TODO. + time_sampler: Sampler for the time. kwargs: TODO. """ @@ -46,10 +46,10 @@ def __init__( self, velocity_field: models.VelocityField, flow: flows.BaseFlow, - time_sampler: Callable[[jax.Array, int], - jnp.ndarray] = flow_utils.uniform_sampler, match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], jnp.ndarray]] = None, + time_sampler: Callable[[jax.Array, int], + jnp.ndarray] = flow_utils.uniform_sampler, **kwargs: Any, ): self.vf = velocity_field @@ -79,10 +79,8 @@ def loss_fn( x_t = self.flow.compute_xt(rng, t, source, target) v_t = vf_state.apply_fn({"params": params}, t, x_t, source_conditions) - # TODO(michalk8): should be removed - # apply_fn = functools.partial(vf_state.apply_fn, {"params": params}) - # v_t = jax.vmap(apply_fn)(t, x_t, source_conditions) u_t = self.flow.compute_ut(t, source, target) + return jnp.mean((v_t - u_t) ** 2) batch_size = len(source) diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index 516342ed6..a656f440f 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -26,6 +26,7 @@ "sample_conditional", "cyclical_time_encoder", "uniform_sampler", + "multivariate_normal", ] ScaleCost_t = Union[float, Literal["mean", "max_cost", "median"]] @@ -159,3 +160,15 @@ def uniform_sampler( t = jax.random.uniform(rng, (1, 1), minval=low, maxval=high) mod_term = ((high - low) - offset) return (t + jnp.arange(num_samples)[:, None] / num_samples) % mod_term + + +def multivariate_normal( + rng: jax.Array, + shape: Tuple[int, ...], + dim: int, + mean: float = 0.0, + cov: float = 1.0 +) -> jnp.ndarray: + mean = jnp.full(dim, fill_value=mean) + cov = jnp.diag(jnp.full(dim, fill_value=cov)) + return jax.random.multivariate_normal(rng, mean=mean, cov=cov, shape=shape) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index c3cd11ce7..e7de132e8 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -26,39 +26,50 @@ def _ot_data( rng: np.random.Generator, *, n: int = 100, - dim: int = 2, + lin_dim: Optional[int] = None, + quad_dim: Optional[int] = None, condition: Optional[Union[float, np.ndarray]] = None, cond_dim: Optional[int] = None, offset: float = 0.0 ) -> datasets.OTData: - data = rng.normal(size=(n, dim)) + offset + assert lin_dim or quad_dim, "TODO" + + lin_data = None if lin_dim is None else ( + rng.normal(size=(n, lin_dim)) + offset + ) + quad_data = None if quad_dim is None else ( + rng.normal(size=(n, quad_dim)) + offset + ) if isinstance(condition, float): - cond_dim = dim if cond_dim is None else cond_dim + cond_dim = lin_dim if cond_dim is None else cond_dim condition = np.full((n, cond_dim), fill_value=condition) - return datasets.OTData(lin=data, condition=condition) + return datasets.OTData(lin=lin_data, quad=quad_data, condition=condition) @pytest.fixture() def lin_dl() -> DataLoader: """Returns a data loader for a simple Gaussian mixture.""" - n, d = 100, 2 + n, d = 128, 2 rng = np.random.default_rng(0) - src, tgt = _ot_data(rng, n=n, dim=d), _ot_data(rng, n=n, dim=d, offset=1.0) + + src = _ot_data(rng, n=n, lin_dim=d) + tgt = _ot_data(rng, n=n, lin_dim=d, offset=1.0) ds = datasets.OTDataset(src, tgt) + return DataLoader(ds, batch_size=16, shuffle=True) @pytest.fixture() def lin_dl_with_conds() -> DataLoader: - n, d, cond_dim = 100, 2, 3 + n, d, cond_dim = 128, 2, 3 rng = np.random.default_rng(13) src_cond = rng.normal(size=(n, cond_dim)) tgt_cond = rng.normal(size=(n, cond_dim)) - src = _ot_data(rng, n=n, dim=d, condition=src_cond) - tgt = _ot_data(rng, n=n, dim=d, condition=tgt_cond) + src = _ot_data(rng, n=n, lin_dim=d, condition=src_cond) + tgt = _ot_data(rng, n=n, lin_dim=d, condition=tgt_cond) ds = datasets.OTDataset(src, tgt) return DataLoader(ds, batch_size=16, shuffle=True) @@ -66,12 +77,12 @@ def lin_dl_with_conds() -> DataLoader: @pytest.fixture() def conditional_lin_dl() -> datasets.ConditionalLoader: - cond_dim = 4 + d, cond_dim = 2, 4 rng = np.random.default_rng(42) - src0 = _ot_data(rng, condition=0.0, cond_dim=cond_dim) + src0 = _ot_data(rng, condition=0.0, lin_dim=d, cond_dim=cond_dim) tgt0 = _ot_data(rng, offset=2.0) - src1 = _ot_data(rng, condition=1.0, cond_dim=cond_dim) + src1 = _ot_data(rng, condition=1.0, lin_dim=d, cond_dim=cond_dim) tgt1 = _ot_data(rng, offset=-2.0) src_ds = datasets.OTDataset(src0, tgt0) @@ -83,39 +94,22 @@ def conditional_lin_dl() -> datasets.ConditionalLoader: return datasets.ConditionalLoader([src_dl, tgt_dl]) -# TODO(michalk8): refactor the below for GENOT - - -@pytest.fixture(scope="module") -def genot_data_loader_linear(): - """Returns a data loader for a simple Gaussian mixture.""" - rng = np.random.default_rng(seed=0) - src = rng.normal(size=(100, 2)) - tgt = rng.normal(size=(100, 2)) + 1.0 - dataset = datasets.OTDataset(lin=src, tgt_lin=tgt) - return DataLoader(dataset, batch_size=16, shuffle=True) - - -@pytest.fixture(scope="module") -def genot_data_loader_linear_conditional(): - """Returns a data loader for a simple Gaussian mixture.""" - rng = np.random.default_rng(seed=0) - src_0 = rng.normal(size=(100, 2)) - tgt_0 = rng.normal(size=(100, 2)) + 1.0 - src_1 = rng.normal(size=(100, 2)) - tgt_1 = rng.normal(size=(100, 2)) + 1.0 - ds0 = datasets.OTDataset( - lin=src_0, tgt_lin=tgt_0, conditions=np.zeros_like(src_0) * 0.0 - ) - ds1 = datasets.OTDataset( - lin=src_1, tgt_lin=tgt_1, conditions=np.ones_like(src_1) * 1.0 +@pytest.fixture() +def quad_dl(): + n, d = 128, 2 + rng = np.random.default_rng(11) + src, tgt = _ot_data( + rng, n=n, quad_dim=d + ), _ot_data( + rng, n=n, quad_dim=d, offset=1.0 ) - sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) - sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) - dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) - dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) + ds = datasets.OTDataset(src, tgt) + return DataLoader(ds, batch_size=16, shuffle=True) - return datasets.ConditionalLoader((dl0, dl1)) + +@pytest.fixture() +def quad_dl_with_conds(): + pass @pytest.fixture(scope="module") diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index abd8aad94..a8c02f07b 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -16,15 +16,61 @@ import pytest +import jax import jax.numpy as jnp import optax -from ott.geometry import costs -from ott.neural.flow_models.models import VelocityField -from ott.neural.flow_models.utils import uniform_sampler -from ott.solvers.linear import sinkhorn, sinkhorn_lr -from ott.solvers.quadratic import gromov_wasserstein, gromov_wasserstein_lr +from ott.neural.flow_models import flows, genot, models, utils + + +def data_match_fn( + src_lin: jnp.ndarray, tgt_lin: jnp.ndarray, src_quad: jnp.ndarray, + tgt_quad: jnp.ndarray +): + # TODO(michalk8): extend for GW/FGW + return utils.match_linear(src_lin, tgt_lin) + + +class TestGENOT: + + # TODO(michalk8): test gw/fgw, k, etc. + @pytest.mark.parametrize(("cond_dim", "dl"), [(2, "lin_dl")]) + def test_genot2(self, rng: jax.Array, cond_dim: int, dl: str, request): + rng_init, rng_call = jax.random.split(rng) + input_dim, hidden_dim = 2, 7 + dl = request.getfixturevalue(dl) + + vf = models.VelocityField( + hidden_dim=hidden_dim, + output_dim=input_dim, + # TODO(michalk8): the source is the condition + condition_dim=cond_dim, + ) + + model = genot.GENOT( + vf, + flow=flows.ConstantNoiseFlow(0.0), + data_match_fn=data_match_fn, + rng=rng_init, + optimizer=optax.adam(learning_rate=1e-3), + input_dim=input_dim, + condition_dim=cond_dim, + ) + + _logs = model(dl, n_iters=3, rng=rng_call) + + # TODO(michalk8): generalize for gw/fgw + batch = next(iter(dl)) + src = jnp.asarray(batch["src_lin"]) + tgt = jnp.asarray(batch["tgt_lin"]) + src_cond = batch.get("src_condition") + if src_cond is not None: + src_cond = jnp.asarray(src_cond) + + res = model.transport(src, condition=src_cond) + + assert jnp.sum(jnp.isnan(res)) == 0 class TestGENOTLin: diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index a9b799d4a..a4db65fa5 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -27,12 +27,12 @@ class TestOTFlowMatching: (3, "lin_dl_with_conds"), (4, "conditional_lin_dl")]) def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): - output_dim, hidden_dim = 2, 5 + input_dim, hidden_dim = 2, 5 dl = request.getfixturevalue(dl) neural_vf = models.VelocityField( - hidden_dim, - output_dim, + hidden_dim=hidden_dim, + output_dim=input_dim, condition_dim=hidden_dim if cond_dim > 0 else None, ) fm = otfm.OTFlowMatching( @@ -41,7 +41,7 @@ def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): match_fn=jax.jit(utils.match_linear), rng=rng, optimizer=optax.adam(learning_rate=1e-3), - input_dim=2, # all dataloaders have dim `2` + input_dim=input_dim, condition_dim=cond_dim, ) From 8bc9b104b611bbaec95fda9f0efcee0975aa6a88 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 19:05:29 +0100 Subject: [PATCH 137/186] Remove old GENOT tests --- src/ott/neural/flow_models/utils.py | 1 + tests/neural/genot_test.py | 388 ---------------------------- 2 files changed, 1 insertion(+), 388 deletions(-) diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index a656f440f..1181de1cb 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -169,6 +169,7 @@ def multivariate_normal( mean: float = 0.0, cov: float = 1.0 ) -> jnp.ndarray: + """TODO.""" mean = jnp.full(dim, fill_value=mean) cov = jnp.diag(jnp.full(dim, fill_value=cov)) return jax.random.multivariate_normal(rng, mean=mean, cov=cov, shape=shape) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index a8c02f07b..063b770db 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -11,8 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import functools -from typing import Iterator, Literal, Optional, Union import pytest @@ -63,7 +61,6 @@ def test_genot2(self, rng: jax.Array, cond_dim: int, dl: str, request): # TODO(michalk8): generalize for gw/fgw batch = next(iter(dl)) src = jnp.asarray(batch["src_lin"]) - tgt = jnp.asarray(batch["tgt_lin"]) src_cond = batch.get("src_condition") if src_cond is not None: src_cond = jnp.asarray(src_cond) @@ -71,388 +68,3 @@ def test_genot2(self, rng: jax.Array, cond_dim: int, dl: str, request): res = model.transport(src, condition=src_cond) assert jnp.sum(jnp.isnan(res)) == 0 - - -class TestGENOTLin: - - @pytest.mark.parametrize("scale_cost", ["mean", 2.0]) - @pytest.mark.parametrize("k_samples_per_x", [1, 3]) - @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - @pytest.mark.parametrize("solver", ["sinkhorn", "lr_sinkhorn"]) - def test_genot_linear_unconditional( - self, - genot_data_loader_linear: Iterator, - scale_cost: Union[float, Literal["mean"]], - k_samples_per_x: int, - solver_latent_to_data: Optional[str], - solver: Literal["sinkhorn", "lr_sinkhorn"], - ): - matcher_latent_to_data = ( - None if solver_latent_to_data is None else - base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) - ) - batch = next(iter(genot_data_loader_linear)) - source_lin, source_conditions, target_lin = jnp.array( - batch["source_lin"] - ), jnp.array(batch["source_conditions"]) if len(batch["source_conditions"] - ) else None, jnp.array( - batch["target_lin"] - ) - - source_dim = source_lin.shape[1] - target_dim = target_lin.shape[1] - condition_dim = 0 - - neural_vf = VelocityField( - output_dim=target_dim, - condition_dim=source_dim + condition_dim, - hidden_dim=5, - ) - ot_solver = sinkhorn.Sinkhorn( - ) if solver == "sinkhorn" else sinkhorn_lr.LRSinkhorn(rank=3) - ot_matcher = base_solver.OTMatcherLinear( - ot_solver, cost_fn=costs.SqEuclidean(), scale_cost=scale_cost - ) - time_sampler = uniform_sampler - optimizer = optax.adam(learning_rate=1e-3) - genot = GENOTLin( - neural_vf, - input_dim=source_dim, - output_dim=target_dim, - cond_dim=condition_dim, - ot_matcher=ot_matcher, - optimizer=optimizer, - time_sampler=time_sampler, - k_samples_per_x=k_samples_per_x, - matcher_latent_to_data=matcher_latent_to_data, - ) - genot( - genot_data_loader_linear, - genot_data_loader_linear, - n_iters=2, - valid_freq=3 - ) - - batch = next(iter(genot_data_loader_linear)) - result_forward = genot.transport( - source_lin, condition=source_conditions, forward=True - ) - assert isinstance(result_forward, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_forward)) == 0 - - @pytest.mark.parametrize("k_samples_per_x", [1, 2]) - @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - @pytest.mark.parametrize("solver", ["sinkhorn", "lr_sinkhorn"]) - def test_genot_linear_conditional( - self, genot_data_loader_linear_conditional: Iterator, - k_samples_per_x: int, solver_latent_to_data: Optional[str], - solver: Literal["sinkhorn", "lr_sinkhorn"] - ): - matcher_latent_to_data = ( - None if solver_latent_to_data is None else - base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) - ) - - batch = next(iter(genot_data_loader_linear_conditional)) - source_lin, source_conditions, target_lin = jnp.array( - batch["source_lin"] - ), jnp.array(batch["source_conditions"]) if len(batch["source_conditions"] - ) else None, jnp.array( - batch["target_lin"] - ) - source_dim = source_lin.shape[1] - target_dim = target_lin.shape[1] - condition_dim = source_conditions.shape[1] - - neural_vf = VelocityField( - output_dim=target_dim, - condition_dim=source_dim + condition_dim, - hidden_dim=5, - ) - ot_solver = sinkhorn.Sinkhorn( - ) if solver == "sinkhorn" else sinkhorn_lr.LRSinkhorn(rank=3) - ot_matcher = base_solver.OTMatcherLinear( - ot_solver, cost_fn=costs.SqEuclidean() - ) - time_sampler = uniform_sampler - - optimizer = optax.adam(learning_rate=1e-3) - genot = GENOTLin( - neural_vf, - input_dim=source_dim, - output_dim=target_dim, - cond_dim=condition_dim, - ot_matcher=ot_matcher, - optimizer=optimizer, - time_sampler=time_sampler, - k_samples_per_x=k_samples_per_x, - matcher_latent_to_data=matcher_latent_to_data, - ) - genot( - genot_data_loader_linear_conditional, - genot_data_loader_linear_conditional, - n_iters=2, - valid_freq=3 - ) - result_forward = genot.transport( - source_lin, condition=source_conditions, forward=True - ) - assert isinstance(result_forward, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_forward)) == 0 - - -class TestGENOTQuad: - - @pytest.mark.parametrize("k_samples_per_x", [1, 2]) - @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - @pytest.mark.parametrize("solver", ["gromov", "gromov_lr"]) - def test_genot_quad_unconditional( - self, genot_data_loader_quad: Iterator, k_samples_per_x: int, - solver_latent_to_data: Optional[str], solver: Literal["gromov", - "gromov_lr"] - ): - matcher_latent_to_data = ( - None if solver_latent_to_data is None else - base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) - ) - - batch = next(iter(genot_data_loader_quad)) - (source_quad, source_conditions, target_quad) = ( - jnp.array(batch["source_quad"]), jnp.array(batch["source_conditions"]) - if len(batch["source_conditions"]) else None, - jnp.array(batch["target_quad"]) - ) - source_dim = source_quad.shape[1] - target_dim = target_quad.shape[1] - condition_dim = 0 - neural_vf = VelocityField( - output_dim=target_dim, - condition_dim=source_dim + condition_dim, - hidden_dim=5, - ) - ot_solver = gromov_wasserstein.GromovWasserstein( - epsilon=1e-2 - ) if solver == "gromov" else gromov_wasserstein_lr.LRGromovWasserstein( - rank=3, epsilon=1e-2 - ) - ot_matcher = base_solver.OTMatcherQuad( - ot_solver, cost_fn=costs.SqEuclidean() - ) - - time_sampler = functools.partial(uniform_sampler, offset=1e-2) - optimizer = optax.adam(learning_rate=1e-3) - genot = GENOTQuad( - neural_vf, - input_dim=source_dim, - output_dim=target_dim, - cond_dim=condition_dim, - ot_matcher=ot_matcher, - optimizer=optimizer, - time_sampler=time_sampler, - k_samples_per_x=k_samples_per_x, - matcher_latent_to_data=matcher_latent_to_data, - ) - genot( - genot_data_loader_quad, genot_data_loader_quad, n_iters=2, valid_freq=3 - ) - - result_forward = genot.transport( - source_quad, condition=source_conditions, forward=True - ) - assert isinstance(result_forward, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_forward)) == 0 - - @pytest.mark.parametrize("k_samples_per_x", [1, 2]) - @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - @pytest.mark.parametrize("solver", ["gromov", "gromov_lr"]) - def test_genot_fused_unconditional( - self, genot_data_loader_fused: Iterator, k_samples_per_x: int, - solver_latent_to_data: Optional[str], solver: Literal["gromov", - "gromov_lr"] - ): - matcher_latent_to_data = ( - None if solver_latent_to_data is None else - base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) - ) - - batch = next(iter(genot_data_loader_fused)) - (source_lin, source_quad, source_conditions, target_lin, target_quad) = ( - jnp.array(batch["source_lin"]) if len(batch["source_lin"]) else None, - jnp.array(batch["source_quad"]), jnp.array(batch["source_conditions"]) - if len(batch["source_conditions"]) else None, - jnp.array(batch["target_lin"]) if len(batch["target_lin"]) else None, - jnp.array(batch["target_quad"]) - ) - source_dim = source_lin.shape[1] + source_quad.shape[1] - target_dim = target_lin.shape[1] + target_quad.shape[1] - condition_dim = 0 - neural_vf = VelocityField( - output_dim=target_dim, - condition_dim=source_dim + condition_dim, - hidden_dim=5, - ) - ot_solver = gromov_wasserstein.GromovWasserstein( - epsilon=1e-2 - ) if solver == "gromov" else gromov_wasserstein_lr.LRGromovWasserstein( - rank=3, epsilon=1e-2 - ) - ot_matcher = base_solver.OTMatcherQuad( - ot_solver, cost_fn=costs.SqEuclidean(), fused_penalty=0.5 - ) - - optimizer = optax.adam(learning_rate=1e-3) - genot = GENOTQuad( - neural_vf, - input_dim=source_dim, - output_dim=target_dim, - cond_dim=condition_dim, - ot_matcher=ot_matcher, - optimizer=optimizer, - k_samples_per_x=k_samples_per_x, - matcher_latent_to_data=matcher_latent_to_data, - ) - genot( - genot_data_loader_fused, - genot_data_loader_fused, - n_iters=2, - valid_freq=3 - ) - - result_forward = genot.transport( - jnp.concatenate((source_lin, source_quad), axis=1), - condition=source_conditions, - forward=True - ) - assert isinstance(result_forward, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_forward)) == 0 - - @pytest.mark.parametrize("k_samples_per_x", [1, 2]) - @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - @pytest.mark.parametrize("solver", ["gromov", "gromov_lr"]) - def test_genot_quad_conditional( - self, genot_data_loader_quad_conditional: Iterator, k_samples_per_x: int, - solver_latent_to_data: Optional[str], solver: Literal["gromov", - "gromov_lr"] - ): - matcher_latent_to_data = ( - None if solver_latent_to_data is None else - base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) - ) - - batch = next(iter(genot_data_loader_quad_conditional)) - (source_quad, source_conditions, target_quad) = ( - jnp.array(batch["source_quad"]), jnp.array(batch["source_conditions"]) - if len(batch["source_conditions"]) else None, - jnp.array(batch["target_quad"]) - ) - - source_dim = source_quad.shape[1] - target_dim = target_quad.shape[1] - condition_dim = source_conditions.shape[1] - neural_vf = VelocityField( - output_dim=target_dim, - condition_dim=source_dim + condition_dim, - hidden_dim=5, - ) - ot_solver = gromov_wasserstein.GromovWasserstein( - epsilon=1e-2 - ) if solver == "gromov" else gromov_wasserstein_lr.LRGromovWasserstein( - rank=3, epsilon=1e-2 - ) - ot_matcher = base_solver.OTMatcherQuad( - ot_solver, cost_fn=costs.SqEuclidean() - ) - time_sampler = uniform_sampler - - optimizer = optax.adam(learning_rate=1e-3) - genot = GENOTQuad( - neural_vf, - input_dim=source_dim, - output_dim=target_dim, - cond_dim=condition_dim, - ot_matcher=ot_matcher, - optimizer=optimizer, - time_sampler=time_sampler, - k_samples_per_x=k_samples_per_x, - matcher_latent_to_data=matcher_latent_to_data, - ) - genot( - genot_data_loader_quad_conditional, - genot_data_loader_quad_conditional, - n_iters=2, - valid_freq=3 - ) - - result_forward = genot.transport( - source_quad, condition=source_conditions, forward=True - ) - assert isinstance(result_forward, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_forward)) == 0 - - @pytest.mark.parametrize("k_samples_per_x", [1, 2]) - @pytest.mark.parametrize("solver_latent_to_data", [None, "sinkhorn"]) - @pytest.mark.parametrize("solver", ["gromov", "gromov_lr"]) - def test_genot_fused_conditional( - self, genot_data_loader_fused_conditional: Iterator, k_samples_per_x: int, - solver_latent_to_data: Optional[str], solver: Literal["gromov", - "gromov_lr"] - ): - solver_latent_to_data = ( - None if solver_latent_to_data is None else sinkhorn.Sinkhorn() - ) - matcher_latent_to_data = ( - None if solver_latent_to_data is None else - base_solver.OTMatcherLinear(sinkhorn.Sinkhorn()) - ) - batch = next(iter(genot_data_loader_fused_conditional)) - (source_lin, source_quad, source_conditions, target_lin, target_quad) = ( - jnp.array(batch["source_lin"]) if len(batch["source_lin"]) else None, - jnp.array(batch["source_quad"]), jnp.array(batch["source_conditions"]) - if len(batch["source_conditions"]) else None, - jnp.array(batch["target_lin"]) if len(batch["target_lin"]) else None, - jnp.array(batch["target_quad"]) - ) - source_dim = source_lin.shape[1] + source_quad.shape[1] - target_dim = target_lin.shape[1] + target_quad.shape[1] - condition_dim = source_conditions.shape[1] - neural_vf = VelocityField( - output_dim=target_dim, - condition_dim=source_dim + condition_dim, - hidden_dim=5, - ) - ot_solver = gromov_wasserstein.GromovWasserstein( - epsilon=1e-2 - ) if solver == "gromov" else gromov_wasserstein_lr.LRGromovWasserstein( - rank=3, epsilon=1e-2 - ) - ot_matcher = base_solver.OTMatcherQuad( - ot_solver, cost_fn=costs.SqEuclidean(), fused_penalty=0.5 - ) - time_sampler = uniform_sampler - optimizer = optax.adam(learning_rate=1e-3) - - genot = GENOTQuad( - neural_vf, - input_dim=source_dim, - output_dim=target_dim, - cond_dim=condition_dim, - ot_matcher=ot_matcher, - optimizer=optimizer, - time_sampler=time_sampler, - k_samples_per_x=k_samples_per_x, - matcher_latent_to_data=matcher_latent_to_data, - ) - genot( - genot_data_loader_fused_conditional, - genot_data_loader_fused_conditional, - n_iters=2, - valid_freq=3 - ) - - result_forward = genot.transport( - jnp.concatenate((source_lin, source_quad), axis=1), - condition=source_conditions, - forward=True - ) - assert isinstance(result_forward, jnp.ndarray) - assert jnp.sum(jnp.isnan(result_forward)) == 0 From 6f4f8640daa6a77180168ed2ffc9d2febb50d7b3 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 15 Mar 2024 19:06:21 +0100 Subject: [PATCH 138/186] Remove old dataloaders --- tests/neural/conftest.py | 82 ---------------------------------------- 1 file changed, 82 deletions(-) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index e7de132e8..a3d89d959 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -16,7 +16,6 @@ import pytest import numpy as np -import torch from torch.utils.data import DataLoader from ott.neural.data import datasets @@ -110,84 +109,3 @@ def quad_dl(): @pytest.fixture() def quad_dl_with_conds(): pass - - -@pytest.fixture(scope="module") -def genot_data_loader_quad(): - """Returns a data loader for a simple Gaussian mixture.""" - rng = np.random.default_rng(seed=0) - src = rng.normal(size=(100, 2)) - tgt = rng.normal(size=(100, 1)) + 1.0 - dataset = datasets.OTDataset(quad=src, tgt_quad=tgt) - return DataLoader(dataset, batch_size=16, shuffle=True) - - -@pytest.fixture(scope="module") -def genot_data_loader_quad_conditional(): - """Returns a data loader for a simple Gaussian mixture.""" - rng = np.random.default_rng(seed=0) - src_0 = rng.normal(size=(100, 2)) - tgt_0 = rng.normal(size=(100, 1)) + 1.0 - src_1 = rng.normal(size=(100, 2)) - tgt_1 = rng.normal(size=(100, 1)) + 1.0 - ds0 = datasets.OTDataset( - quad=src_0, tgt_quad=tgt_0, conditions=np.zeros_like(src_0) * 0.0 - ) - ds1 = datasets.OTDataset( - quad=src_1, tgt_quad=tgt_1, conditions=np.ones_like(src_1) * 1.0 - ) - sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) - sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) - dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) - dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) - - return datasets.ConditionalLoader((dl0, dl1)) - - -@pytest.fixture(scope="module") -def genot_data_loader_fused(): - """Returns a data loader for a simple Gaussian mixture.""" - rng = np.random.default_rng(seed=0) - src_q = rng.normal(size=(100, 2)) - tgt_q = rng.normal(size=(100, 1)) + 1.0 - src_lin = rng.normal(size=(100, 2)) - tgt_lin = rng.normal(size=(100, 2)) + 1.0 - dataset = datasets.OTDataset( - lin=src_lin, quad=src_q, tgt_lin=tgt_lin, tgt_quad=tgt_q - ) - return DataLoader(dataset, batch_size=16, shuffle=True) - - -@pytest.fixture(scope="module") -def genot_data_loader_fused_conditional(): - """Returns a data loader for a simple Gaussian mixture.""" - rng = np.random.default_rng(seed=0) - src_q_0 = rng.normal(size=(100, 2)) - tgt_q_0 = rng.normal(size=(100, 1)) + 1.0 - src_lin_0 = rng.normal(size=(100, 2)) - tgt_lin_0 = rng.normal(size=(100, 2)) + 1.0 - - src_q_1 = 2 * rng.normal(size=(100, 2)) - tgt_q_1 = 2 * rng.normal(size=(100, 1)) + 1.0 - src_lin_1 = 2 * rng.normal(size=(100, 2)) - tgt_lin_1 = 2 * rng.normal(size=(100, 2)) + 1.0 - - ds0 = datasets.OTDataset( - lin=src_lin_0, - tgt_lin=tgt_lin_0, - quad=src_q_0, - tgt_quad=tgt_q_0, - conditions=np.zeros_like(src_lin_0) * 0.0 - ) - ds1 = datasets.OTDataset( - lin=src_lin_1, - tgt_lin=tgt_lin_1, - quad=src_q_1, - tgt_quad=tgt_q_1, - conditions=np.ones_like(src_lin_1) * 1.0 - ) - sampler0 = torch.utils.data.RandomSampler(ds0, replacement=True) - sampler1 = torch.utils.data.RandomSampler(ds1, replacement=True) - dl0 = DataLoader(ds0, batch_size=16, sampler=sampler0) - dl1 = DataLoader(ds1, batch_size=16, sampler=sampler1) - return datasets.ConditionalLoader((dl0, dl1)) From 11911c4081c9d4fc7317bf92fd1232d4b70c9fa8 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Sun, 17 Mar 2024 23:48:33 +0100 Subject: [PATCH 139/186] Add more todos --- src/ott/neural/flow_models/genot.py | 11 +++++++---- src/ott/neural/flow_models/otfm.py | 6 ++++-- tests/neural/conftest.py | 9 ++++----- 3 files changed, 15 insertions(+), 11 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 34a207b50..fc550cd93 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -12,11 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple import jax import jax.numpy as jnp import jax.tree_util as jtu +import numpy as np import diffrax from flax.training import train_state @@ -40,7 +41,9 @@ class GENOT: of an input sample, see algorithm TODO. latent_match_fn: TODO. latent_noise_fn: TODO. + # TODO(michalk8): rename k_samples_per_x: Number of samples drawn from the conditional distribution + # TODO(michalk8): expose all args for the train state? kwargs: TODO. """ @@ -48,6 +51,7 @@ def __init__( self, velocity_field: models.VelocityField, flow: flows.BaseFlow, + # TODO(michalk8): all of these can be optional, explain in the docs data_match_fn: Callable[ [jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray], jnp.ndarray], time_sampler: Callable[[jax.Array, int], @@ -66,9 +70,8 @@ def __init__( self.time_sampler = time_sampler self.latent_match_fn = latent_match_fn if latent_noise_fn is None: - dim = kwargs["input_dim"] latent_noise_fn = functools.partial( - flow_utils.multivariate_normal, dim=dim + flow_utils.multivariate_normal, dim=kwargs["input_dim"] ) self.latent_noise_fn = latent_noise_fn self.k_samples_per_x = k_samples_per_x @@ -117,7 +120,7 @@ def loss_fn( def __call__( self, - loader: Any, + loader: Iterable[Dict[str, np.ndarray]], n_iters: int, rng: Optional[jax.Array] = None ) -> Dict[str, List[float]]: diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index a8b85a44e..e1ad5aaab 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -11,11 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple import jax import jax.numpy as jnp import jax.tree_util as jtu +import numpy as np import diffrax from flax.training import train_state @@ -37,6 +38,7 @@ class OTFlowMatching: flow: Flow between source and target distribution. match_fn: TODO. time_sampler: Sampler for the time. + # TODO(michalk8): expose all args for the train state? kwargs: TODO. """ @@ -97,7 +99,7 @@ def loss_fn( # TODO(michalk8): refactor in the future PR to just do one step def __call__( # noqa: D102 self, - loader: Any, # TODO(michalk8): type it correctly + loader: Iterable[Dict[str, np.ndarray]], *, n_iters: int, rng: Optional[jax.Array] = None, diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index a3d89d959..d0bc11e7e 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -97,12 +97,11 @@ def conditional_lin_dl() -> datasets.ConditionalLoader: def quad_dl(): n, d = 128, 2 rng = np.random.default_rng(11) - src, tgt = _ot_data( - rng, n=n, quad_dim=d - ), _ot_data( - rng, n=n, quad_dim=d, offset=1.0 - ) + + src = _ot_data(rng, n=n, quad_dim=d) + tgt = _ot_data(rng, n=n, quad_dim=d, offset=1.0) ds = datasets.OTDataset(src, tgt) + return DataLoader(ds, batch_size=16, shuffle=True) From a8de2ea17ff920ca536265a333ce843fe5a00842 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 10:08:54 +0100 Subject: [PATCH 140/186] add docs to dataloader --- src/ott/neural/data/datasets.py | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index 63215e61e..0ec8edfe9 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -24,7 +24,13 @@ @dataclasses.dataclass(repr=False, frozen=True) class OTData: - """TODO.""" + """Distribution data for (conditional) optimal transport problems. + + Args: + lin: Linear (living in the shared space) part of the samples. + quad: Quadratic (living in the incomparable subspace) part of the samples. + condition: Condition corresponding to the data distribution. + """ lin: Optional[np.ndarray] = None quad: Optional[np.ndarray] = None condition: Optional[np.ndarray] = None @@ -41,7 +47,16 @@ def __len__(self) -> int: class OTDataset: - """TODO.""" + """Dataset for (conditional) optimal transport problems. + + Args: + src_data: Samples from the source distribution. + tgt_data: Samples from the target distribution. + src_conditions: Conditions for the source data. + tgt_conditions: Conditions for the target data. + is_aligned: Whether the samples from `src_data` and `tgt_data` are paired. + seed: Random seed. + """ SRC_PREFIX = "src" TGT_PREFIX = "tgt" @@ -71,9 +86,9 @@ def __init__( self.is_aligned = is_aligned self._rng = np.random.default_rng(seed) - self._verify_integriy() + self._verify_integrity() - def _verify_integriy(self) -> None: + def _verify_integrity(self) -> None: assert len(self.src_data) == len(self.src_conditions) assert len(self.tgt_data) == len(self.tgt_conditions) From dfaf0428334e5d54d374c845e185cbf3d8f5f60c Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 10:36:52 +0100 Subject: [PATCH 141/186] expose args in GENOT --- src/ott/neural/flow_models/genot.py | 68 ++++++++++++++++++++-------- src/ott/neural/flow_models/models.py | 5 +- 2 files changed, 53 insertions(+), 20 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index fc550cd93..cd10784a2 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -30,21 +30,37 @@ class GENOT: - """TODO :cite:`klein_uscidda:23`. + """GENOT for entropic neural optimal transport :cite:`klein_uscidda:23`. + + GENOT (Generative Entropic Neural Optimal Transport) is a framework for + learning neural optimal transport plans between two distributions. It + allows for learning linear and quadratic (Fused) Gromov-Wasserstein couplings, + in both the balanced and the unbalanced setting. + Args: velocity_field: Neural vector field parameterized by a neural network. flow: Flow between latent distribution and target distribution. - data_match_fn: Linear OT solver to match the latent distribution - with the conditional distribution. - time_sampler: Sampler for the time. - of an input sample, see algorithm TODO. - latent_match_fn: TODO. - latent_noise_fn: TODO. + data_match_fn: OT solver to matching the source and the target distribution. + source_dim: Dimension of the source space. + target_dim: Dimension of the target space. + condition_dim: Dimension of the conditions. + time_sampler: Sampler for the time to learn the neural ODE. If :obj:`None`, + the time is uniformly sampled. # TODO(michalk8): rename k_samples_per_x: Number of samples drawn from the conditional distribution + per single source sample. + latent_match_fn: Linear OT matcher to optimally pair the latent + distribution with the `k_samples_per_x` samples of the conditional + distribution (corresponding to one sample). If :obj:`None`, samples + from the latent distribution are randomly paired with the samples from + the conditional distribution. + latent_noise_fn: Function to sample from the latent distribution in the + target space. If :obj:`None`, the latent distribution is sampled from a + multivariate normal distribution. # TODO(michalk8): expose all args for the train state? - kwargs: TODO. + kwargs: Keyword arguments for + :meth:`ott.neural.flow_models.models.VelocityField.create_train_state`. """ def __init__( @@ -54,14 +70,17 @@ def __init__( # TODO(michalk8): all of these can be optional, explain in the docs data_match_fn: Callable[ [jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray], jnp.ndarray], + source_dim: int, + target_dim: int, + condition_dim: int, time_sampler: Callable[[jax.Array, int], jnp.ndarray] = flow_utils.uniform_sampler, + # TODO(michalk8): rename, too descriptive + k_samples_per_x: int = 1, latent_match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], jnp.ndarray]] = None, latent_noise_fn: Optional[Callable[[jax.Array, Tuple[int, ...]], jnp.ndarray]] = None, - # TODO(michalk8): rename, too descriptive - k_samples_per_x: int = 1, **kwargs: Any, ): self.vf = velocity_field @@ -71,12 +90,16 @@ def __init__( self.latent_match_fn = latent_match_fn if latent_noise_fn is None: latent_noise_fn = functools.partial( - flow_utils.multivariate_normal, dim=kwargs["input_dim"] + flow_utils.multivariate_normal, dim=target_dim ) self.latent_noise_fn = latent_noise_fn self.k_samples_per_x = k_samples_per_x - self.vf_state = self.vf.create_train_state(**kwargs) + self.vf_state = self.vf.create_train_state( + input_dim=target_dim, + condition_dim=source_dim + condition_dim, + **kwargs + ) self.step_fn = self._get_step_fn() def _get_step_fn(self) -> Callable: @@ -113,8 +136,7 @@ def loss_fn( vf_state.params, time, source, target, latent, source_conditions, rng ) - # TODO(michalk8): follow the convention with loss being first - return vf_state.apply_gradients(grads=grads), loss + return loss, vf_state.apply_gradients(grads=grads) return step_fn @@ -124,7 +146,17 @@ def __call__( n_iters: int, rng: Optional[jax.Array] = None ) -> Dict[str, List[float]]: - """TODO.""" + """Train the GENOT model. + + Args: + loader: Data loader returning a dictionary with possible keys + `src_lin`, `tgt_lin`, `src_quad`, `tgt_quad`, `src_conditions`. + n_iters: Number of iterations to train the model. + rng: Random seed. + + Returns: + Training logs. + """ def prepare_data( batch: Dict[str, jnp.ndarray] @@ -150,8 +182,8 @@ def prepare_data( rng = utils.default_prng_key(rng) training_logs = {"loss": []} for batch in loader: - rng = jax.random.split(rng, 6) - rng, rng_resample, rng_noise, rng_time, rng_latent, rng_step_fn = rng + rng = jax.random.split(rng, 5) + rng, rng_resample, rng_noise, rng_time, rng_step_fn = rng batch = jtu.tree_map(jnp.asarray, batch) (src, src_cond, tgt), matching_data = prepare_data(batch) @@ -181,7 +213,7 @@ def prepare_data( if src_cond is not None: src_cond = src_cond.reshape(-1, *src_cond.shape[2:]) - self.vf_state, loss = self.step_fn( + loss, self.vf_state = self.step_fn( rng_step_fn, self.vf_state, time, src, tgt, latent, src_cond ) diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index a3b3261b0..eb164e12c 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -109,8 +109,9 @@ def create_train_state( Args: rng: Random number generator. optimizer: Optimizer. - input_dim: Dimensionality of the input. - condition_dim: TODO. + input_dim: Dimensionality of the velocity field. + condition_dim: Dimensionsanilty of the condition + to the velocity field. Returns: The training state. From 2734c605c5c175fe3db5d5b6be2f4af4b7a436b9 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 11:08:11 +0100 Subject: [PATCH 142/186] add docs and adapt data_match_fn --- src/ott/neural/flow_models/utils.py | 38 +++++++++++-- tests/neural/conftest.py | 2 +- tests/neural/genot_test.py | 83 +++++++++++++++++++++++------ 3 files changed, 102 insertions(+), 21 deletions(-) diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index 1181de1cb..00c246018 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -40,7 +40,19 @@ def match_linear( scale_cost: ScaleCost_t = 1.0, **kwargs: Any ) -> jnp.ndarray: - """TODO.""" + """Compute solution to a linear OT problem. + + Args: + x: Linear term of the source point cloud. + y: Linear term of the target point cloud. + cost_fn: Cost function. + epsilon: Regularization parameter. + scale_cost: Scaling of the cost matrix. + kwargs: Additional arguments for :func:`ott.solvers.linear.solve`. + + Returns: + Optimal transport matrix. + """ geom = pointcloud.PointCloud( x, y, cost_fn=cost_fn, epsilon=epsilon, scale_cost=scale_cost ) @@ -51,19 +63,35 @@ def match_linear( def match_quadratic( xx: jnp.ndarray, yy: jnp.ndarray, - xy: Optional[jnp.ndarray] = None, + x: Optional[jnp.ndarray] = None, + y: Optional[jnp.ndarray] = None, # TODO(michalk8): expose for all the costs scale_cost: ScaleCost_t = 1.0, cost_fn: Optional[costs.CostFn] = None, **kwargs: Any ) -> jnp.ndarray: - """TODO.""" + """Compute solution to a quadratic OT problem. + + Args: + xx: Quadratic (incomparable) term of the source point cloud. + yy: Quadratic (incomparable) term of the target point cloud. + x: Linear (fused) term of the source point cloud. + y: Linear (fused) term of the target point cloud. + scale_cost: Scaling of the cost matrix. + cost_fn: Cost function. + kwargs: Additional arguments for :func:`ott.solvers.quadratic.solve`. + + Returns: + Optimal transport matrix. + """ geom_xx = pointcloud.PointCloud(xx, cost_fn=cost_fn, scale_cost=scale_cost) geom_yy = pointcloud.PointCloud(yy, cost_fn=cost_fn, scale_cost=scale_cost) - if xy is None: + if x is None: geom_xy = None else: - geom_xy = pointcloud.PointCloud(xy, cost_fn=cost_fn, scale_cost=scale_cost) + geom_xy = pointcloud.PointCloud( + x, y, cost_fn=cost_fn, scale_cost=scale_cost + ) out = quadratic.solve(geom_xx, geom_yy, geom_xy, **kwargs) return out.matrix diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index d0bc11e7e..ede2d4dc4 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -99,7 +99,7 @@ def quad_dl(): rng = np.random.default_rng(11) src = _ot_data(rng, n=n, quad_dim=d) - tgt = _ot_data(rng, n=n, quad_dim=d, offset=1.0) + tgt = _ot_data(rng, n=n, quad_dim=d + 2, offset=1.0) ds = datasets.OTDataset(src, tgt) return DataLoader(ds, batch_size=16, shuffle=True) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 063b770db..b88864f6a 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -11,6 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import functools +from typing import Literal import pytest @@ -24,47 +26,98 @@ def data_match_fn( src_lin: jnp.ndarray, tgt_lin: jnp.ndarray, src_quad: jnp.ndarray, - tgt_quad: jnp.ndarray + tgt_quad: jnp.ndarray, *, type: Literal["linear", "quadratic", "fused"] ): - # TODO(michalk8): extend for GW/FGW - return utils.match_linear(src_lin, tgt_lin) + if type == "linear": + return utils.match_linear(x=src_lin, y=tgt_lin) + if type == "quadratic": + return utils.match_quadratic(xx=src_quad, yy=tgt_quad) + if type == "fused": + return utils.match_quadratic(xx=src_quad, yy=tgt_quad, x=src_lin, y=tgt_lin) + raise NotImplementedError(f"Unknown type: {type}") class TestGENOT: # TODO(michalk8): test gw/fgw, k, etc. - @pytest.mark.parametrize(("cond_dim", "dl"), [(2, "lin_dl")]) - def test_genot2(self, rng: jax.Array, cond_dim: int, dl: str, request): + @pytest.mark.parametrize("dl", ["lin_dl", "conditional_lin_dl"]) + def test_genot_linear(self, rng: jax.Array, dl: str, request): rng_init, rng_call = jax.random.split(rng) - input_dim, hidden_dim = 2, 7 + hidden_dim = 7 dl = request.getfixturevalue(dl) + batch = next(iter(dl)) + src = jnp.asarray(batch["src_lin"]) + tgt = jnp.asarray(batch["tgt_lin"]) + src_cond = batch.get("src_condition") + if src_cond is not None: + src_cond = jnp.asarray(src_cond) + src_dim = src.shape[-1] + tgt_dim = tgt.shape[-1] + cond_dim = src_cond.shape[-1] if src_cond is not None else 0 + vf = models.VelocityField( hidden_dim=hidden_dim, - output_dim=input_dim, - # TODO(michalk8): the source is the condition - condition_dim=cond_dim, + output_dim=tgt_dim, + condition_dim=src_dim + cond_dim, ) + data_mfn = functools.partial(data_match_fn, type="linear") + model = genot.GENOT( vf, flow=flows.ConstantNoiseFlow(0.0), - data_match_fn=data_match_fn, - rng=rng_init, - optimizer=optax.adam(learning_rate=1e-3), - input_dim=input_dim, + data_match_fn=data_mfn, + source_dim=src_dim, + target_dim=tgt_dim, condition_dim=cond_dim, + rng=rng_init, + optimizer=optax.adam(learning_rate=1e-4), ) _logs = model(dl, n_iters=3, rng=rng_call) + res = model.transport(src, condition=src_cond) + + assert jnp.sum(jnp.isnan(res)) == 0 + assert res.shape[-1] == tgt_dim + + @pytest.mark.parametrize("dl", ["quad_dl", "conditional_quad_dl"]) + def test_genot_quad(self, rng: jax.Array, dl: str, request): + rng_init, rng_call = jax.random.split(rng) + hidden_dim = 7 + dl = request.getfixturevalue(dl) - # TODO(michalk8): generalize for gw/fgw batch = next(iter(dl)) - src = jnp.asarray(batch["src_lin"]) + src = jnp.asarray(batch["src_quad"]) + tgt = jnp.asarray(batch["tgt_quad"]) src_cond = batch.get("src_condition") if src_cond is not None: src_cond = jnp.asarray(src_cond) + src_dim = src.shape[-1] + tgt_dim = tgt.shape[-1] + cond_dim = src_cond.shape[-1] if src_cond is not None else 0 + + vf = models.VelocityField( + hidden_dim=hidden_dim, + output_dim=tgt_dim, + condition_dim=src_dim + cond_dim, + ) + data_mfn = functools.partial(data_match_fn, type="quadratic") + + model = genot.GENOT( + vf, + flow=flows.ConstantNoiseFlow(0.0), + data_match_fn=data_mfn, + source_dim=src_dim, + target_dim=tgt_dim, + condition_dim=cond_dim, + rng=rng_init, + optimizer=optax.adam(learning_rate=1e-4), + ) + + _logs = model(dl, n_iters=3, rng=rng_call) res = model.transport(src, condition=src_cond) assert jnp.sum(jnp.isnan(res)) == 0 + assert res.shape[-1] == tgt_dim From 08e24d81c99852c69f3ff81ea1f9d7e93b5eccda Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 12:08:32 +0100 Subject: [PATCH 143/186] fix linting --- tests/geometry/geodesic_test.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/geometry/geodesic_test.py b/tests/geometry/geodesic_test.py index aa1ac37c5..4cf7aa44b 100644 --- a/tests/geometry/geodesic_test.py +++ b/tests/geometry/geodesic_test.py @@ -13,16 +13,15 @@ # limitations under the License. from typing import Optional, Union - -import jax -import jax.experimental.sparse as jesp -import jax.numpy as jnp - import networkx as nx from networkx.algorithms import shortest_paths from networkx.generators import balanced_tree, random_graphs import pytest + +import jax +import jax.experimental.sparse as jesp +import jax.numpy as jnp import numpy as np from ott.geometry import geodesic, geometry, graph From 7d7da3a986fd9f79ed99f03f5c64e5df797569c4 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 12:40:40 +0100 Subject: [PATCH 144/186] fix data loading and add genot fused tests --- tests/neural/conftest.py | 56 ++++++++++++++++++++++++++++++++++---- tests/neural/genot_test.py | 44 ++++++++++++++++++++++++++++++ 2 files changed, 95 insertions(+), 5 deletions(-) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index ede2d4dc4..581796c0c 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -41,7 +41,8 @@ def _ot_data( ) if isinstance(condition, float): - cond_dim = lin_dim if cond_dim is None else cond_dim + _dim = lin_dim if lin_dim is not None else quad_dim + cond_dim = _dim if cond_dim is None else cond_dim condition = np.full((n, cond_dim), fill_value=condition) return datasets.OTData(lin=lin_data, quad=quad_data, condition=condition) @@ -80,9 +81,9 @@ def conditional_lin_dl() -> datasets.ConditionalLoader: rng = np.random.default_rng(42) src0 = _ot_data(rng, condition=0.0, lin_dim=d, cond_dim=cond_dim) - tgt0 = _ot_data(rng, offset=2.0) + tgt0 = _ot_data(rng, lin_dim=d, offset=2.0) src1 = _ot_data(rng, condition=1.0, lin_dim=d, cond_dim=cond_dim) - tgt1 = _ot_data(rng, offset=-2.0) + tgt1 = _ot_data(rng, lin_dim=d, offset=-2.0) src_ds = datasets.OTDataset(src0, tgt0) tgt_ds = datasets.OTDataset(src1, tgt1) @@ -106,5 +107,50 @@ def quad_dl(): @pytest.fixture() -def quad_dl_with_conds(): - pass +def conditional_quad_dl() -> datasets.ConditionalLoader: + n, d, cond_dim = 128, 2, 5 + rng = np.random.default_rng(11) + + src0 = _ot_data(rng, n=n, condition=0.0, cond_dim=cond_dim, quad_dim=d) + tgt0 = _ot_data(rng, n=n, quad_dim=d, cond_dim=cond_dim, offset=2.0) + src1 = _ot_data(rng, n=n, condition=1.0, quad_dim=d + 2) + tgt1 = _ot_data(rng, n=n, quad_dim=d + 2, offset=-2.0) + + src_ds = datasets.OTDataset(src0, tgt0) + tgt_ds = datasets.OTDataset(src1, tgt1) + + src_dl = DataLoader(src_ds, batch_size=16, shuffle=True) + tgt_dl = DataLoader(tgt_ds, batch_size=16, shuffle=True) + + return datasets.ConditionalLoader([src_dl, tgt_dl]) + + +@pytest.fixture() +def fused_dl(): + n, lin_dim, d = 128, 2 + rng = np.random.default_rng(11) + + src = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=d) + tgt = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=d + 2, offset=1.0) + ds = datasets.OTDataset(src, tgt) + + return DataLoader(ds, batch_size=16, shuffle=True) + + +@pytest.fixture() +def conditional_fused_dl() -> datasets.ConditionalLoader: + n, lin_dim, d = 128, 3, 2 + rng = np.random.default_rng(11) + + src0 = _ot_data(rng, n=n, condition=0.0, lin_dim=lin_dim, quad_dim=d) + tgt0 = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=d + 2, offset=2.0) + src1 = _ot_data(rng, n=n, condition=1.0, lin_dim=lin_dim, quad_dim=d) + tgt1 = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=d + 2, offset=-2.0) + + src_ds = datasets.OTDataset(src0, tgt0) + tgt_ds = datasets.OTDataset(src1, tgt1) + + src_dl = DataLoader(src_ds, batch_size=16, shuffle=True) + tgt_dl = DataLoader(tgt_ds, batch_size=16, shuffle=True) + + return datasets.ConditionalLoader([src_dl, tgt_dl]) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index b88864f6a..d68ae9511 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -121,3 +121,47 @@ def test_genot_quad(self, rng: jax.Array, dl: str, request): assert jnp.sum(jnp.isnan(res)) == 0 assert res.shape[-1] == tgt_dim + + @pytest.mark.parametrize("dl", ["fused_dl", "conditional_fused_dl"]) + def test_genot_fused(self, rng: jax.Array, dl: str, request): + rng_init, rng_call = jax.random.split(rng) + hidden_dim = 7 + dl = request.getfixturevalue(dl) + + batch = next(iter(dl)) + src_lin = jnp.asarray(batch["src_lin"]) + tgt_lin = jnp.asarray(batch["tgt_lin"]) + src_quad = jnp.asarray(batch["src_quad"]) + tgt_quad = jnp.asarray(batch["tgt_quad"]) + src_cond = batch.get("src_condition") + if src_cond is not None: + src_cond = jnp.asarray(src_cond) + src_dim = src_lin.shape[-1] + src_quad.shape[-1] + tgt_dim = tgt_lin.shape[-1] + tgt_quad.shape[-1] + cond_dim = src_cond.shape[-1] if src_cond is not None else 0 + + vf = models.VelocityField( + hidden_dim=hidden_dim, + output_dim=tgt_dim, + condition_dim=src_dim + cond_dim, + ) + + data_mfn = functools.partial(data_match_fn, type="fused") + + model = genot.GENOT( + vf, + flow=flows.ConstantNoiseFlow(0.0), + data_match_fn=data_mfn, + source_dim=src_dim, + target_dim=tgt_dim, + condition_dim=cond_dim, + rng=rng_init, + optimizer=optax.adam(learning_rate=1e-4), + ) + + _logs = model(dl, n_iters=3, rng=rng_call) + src = jnp.concatenate([src_lin, src_quad], axis=-1) + res = model.transport(src, condition=src_cond) + + assert jnp.sum(jnp.isnan(res)) == 0 + assert res.shape[-1] == tgt_dim From 4c8477a124b96f23dcaf95fcb24cff1b0d303cd3 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 13:44:59 +0100 Subject: [PATCH 145/186] genot tests passing --- tests/neural/conftest.py | 56 ++++++++++++++++++++++++++++------------ 1 file changed, 40 insertions(+), 16 deletions(-) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 581796c0c..b19962608 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -96,11 +96,12 @@ def conditional_lin_dl() -> datasets.ConditionalLoader: @pytest.fixture() def quad_dl(): - n, d = 128, 2 + n = 128 + quad_dim_src, quad_dim_tgt = 2, 4 rng = np.random.default_rng(11) - src = _ot_data(rng, n=n, quad_dim=d) - tgt = _ot_data(rng, n=n, quad_dim=d + 2, offset=1.0) + src = _ot_data(rng, n=n, quad_dim=quad_dim_src) + tgt = _ot_data(rng, n=n, quad_dim=quad_dim_tgt, offset=1.0) ds = datasets.OTDataset(src, tgt) return DataLoader(ds, batch_size=16, shuffle=True) @@ -108,13 +109,20 @@ def quad_dl(): @pytest.fixture() def conditional_quad_dl() -> datasets.ConditionalLoader: - n, d, cond_dim = 128, 2, 5 + n, cond_dim = 128, 5 + quad_dim_src, quad_dim_tgt = 2, 4 rng = np.random.default_rng(11) - src0 = _ot_data(rng, n=n, condition=0.0, cond_dim=cond_dim, quad_dim=d) - tgt0 = _ot_data(rng, n=n, quad_dim=d, cond_dim=cond_dim, offset=2.0) - src1 = _ot_data(rng, n=n, condition=1.0, quad_dim=d + 2) - tgt1 = _ot_data(rng, n=n, quad_dim=d + 2, offset=-2.0) + src0 = _ot_data( + rng, n=n, condition=0.0, cond_dim=cond_dim, quad_dim=quad_dim_src + ) + tgt0 = _ot_data( + rng, n=n, quad_dim=quad_dim_tgt, cond_dim=cond_dim, offset=2.0 + ) + src1 = _ot_data( + rng, n=n, condition=1.0, cond_dim=cond_dim, quad_dim=quad_dim_src + ) + tgt1 = _ot_data(rng, n=n, quad_dim=quad_dim_tgt, offset=-2.0) src_ds = datasets.OTDataset(src0, tgt0) tgt_ds = datasets.OTDataset(src1, tgt1) @@ -127,11 +135,12 @@ def conditional_quad_dl() -> datasets.ConditionalLoader: @pytest.fixture() def fused_dl(): - n, lin_dim, d = 128, 2 + n, lin_dim = 128, 6 + quad_dim_src, quad_dim_tgt = 2, 4 rng = np.random.default_rng(11) - src = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=d) - tgt = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=d + 2, offset=1.0) + src = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=quad_dim_src) + tgt = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=quad_dim_tgt, offset=1.0) ds = datasets.OTDataset(src, tgt) return DataLoader(ds, batch_size=16, shuffle=True) @@ -139,13 +148,28 @@ def fused_dl(): @pytest.fixture() def conditional_fused_dl() -> datasets.ConditionalLoader: - n, lin_dim, d = 128, 3, 2 + n, lin_dim, cond_dim = 128, 3, 7 + quad_dim_src, quad_dim_tgt = 2, 4 rng = np.random.default_rng(11) - src0 = _ot_data(rng, n=n, condition=0.0, lin_dim=lin_dim, quad_dim=d) - tgt0 = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=d + 2, offset=2.0) - src1 = _ot_data(rng, n=n, condition=1.0, lin_dim=lin_dim, quad_dim=d) - tgt1 = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=d + 2, offset=-2.0) + src0 = _ot_data( + rng, + n=n, + condition=0.0, + cond_dim=cond_dim, + lin_dim=lin_dim, + quad_dim=quad_dim_src + ) + tgt0 = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=quad_dim_tgt, offset=2.0) + src1 = _ot_data( + rng, + n=n, + condition=1.0, + cond_dim=cond_dim, + lin_dim=lin_dim, + quad_dim=quad_dim_src + ) + tgt1 = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=quad_dim_tgt, offset=-2.0) src_ds = datasets.OTDataset(src0, tgt0) tgt_ds = datasets.OTDataset(src1, tgt1) From 001d21dcd675e9f6ea2532ef8dd2d1d04df1d719 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 13:54:36 +0100 Subject: [PATCH 146/186] adapt docs --- docs/neural/flow_models.rst | 13 ++++++++----- src/ott/neural/flow_models/genot.py | 1 - src/ott/neural/flow_models/models.py | 5 +++-- src/ott/neural/flow_models/otfm.py | 3 ++- src/ott/neural/flow_models/utils.py | 14 +++++++++++++- 5 files changed, 26 insertions(+), 10 deletions(-) diff --git a/docs/neural/flow_models.rst b/docs/neural/flow_models.rst index 5f9799292..32fe6e7f3 100644 --- a/docs/neural/flow_models.rst +++ b/docs/neural/flow_models.rst @@ -28,9 +28,7 @@ GENOT .. autosummary:: :toctree: _autosummary - genot.GENOTBase - genot.GENOTLin - genot.GENOTQuad + genot.GENOT Models ------ @@ -44,5 +42,10 @@ Utils .. autosummary:: :toctree: _autosummary - layers.CyclicalTimeEncoder - samplers.uniform_sampler + utils.match_linear + utils.match_quadratic + utils.sample_joint + utils.sample_conditional + utils.cyclical_time_encoder + utils.uniform_sampler + utils.multivariate_normal diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index cd10784a2..51e218b8d 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -67,7 +67,6 @@ def __init__( self, velocity_field: models.VelocityField, flow: flows.BaseFlow, - # TODO(michalk8): all of these can be optional, explain in the docs data_match_fn: Callable[ [jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray], jnp.ndarray], source_dim: int, diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index eb164e12c..5cee1b05f 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -46,10 +46,11 @@ class VelocityField(nn.Module): output_dim: Dimensionality of the neural vector field. num_layers: Number of layers. condition_dim: Dimensionality of the embedding of the condition. - If :obj:`None`, TODO. + If :obj:`None`, the velocity field has no conditions. time_dim: Dimensionality of the time embedding. If :obj:`None`, set to ``hidden_dim``. - time_encoder: TODO. + time_encoder: Function to encode the time input to the time-dependent + velocity field. act_fn: Activation function. """ hidden_dim: int diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index e1ad5aaab..56fff2e2e 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -36,7 +36,8 @@ class OTFlowMatching: Args: velocity_field: Neural vector field parameterized by a neural network. flow: Flow between source and target distribution. - match_fn: TODO. + match_fn: Function to match data points from the source distribution and + the target distribution. time_sampler: Sampler for the time. # TODO(michalk8): expose all args for the train state? kwargs: TODO. diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index 00c246018..3149106bb 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -117,7 +117,19 @@ def sample_conditional( k: int = 1, uniform_marginals: bool = False, ) -> Tuple[jnp.ndarray, jnp.ndarray]: - """TODO.""" + """Sample indices from a transport matrix. + + Args: + rng: Random number generator. + tmat: Transport matrix. + k: Expected number of samples to sample per row. + uniform_marginals: If :obj:`True`, sample exactly `k` samples + per row, otherwise sample proportionally to the sums of the + rows of the transport matrix. + + Returns: + Source and target indices sampled from the transport matrix. + """ assert k > 0, "Number of samples per row must be positive." n, m = tmat.shape From 52d8466d7651b62cb9801ece391a09443321af1a Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 13:58:56 +0100 Subject: [PATCH 147/186] adapt docs --- docs/neural/index.rst | 1 - docs/neural/models.rst | 25 ------------------------- src/ott/neural/flow_models/genot.py | 2 +- src/ott/neural/flow_models/otfm.py | 11 +++++++++++ 4 files changed, 12 insertions(+), 27 deletions(-) delete mode 100644 docs/neural/models.rst diff --git a/docs/neural/index.rst b/docs/neural/index.rst index 06d9fd97b..9de1781f6 100644 --- a/docs/neural/index.rst +++ b/docs/neural/index.rst @@ -17,4 +17,3 @@ and solvers to estimate such neural networks. duality flow_models gaps - models diff --git a/docs/neural/models.rst b/docs/neural/models.rst deleted file mode 100644 index af6d4e33a..000000000 --- a/docs/neural/models.rst +++ /dev/null @@ -1,25 +0,0 @@ -ott.neural.models -================= -.. module:: ott.neural.models -.. currentmodule:: ott.neural.models - -This module implements models, network architectures and helper -functions which apply to various neural optimal transport solvers. - -Utils ------ -.. autosummary:: - :toctree: _autosummary - - base_solver.BaseOTMatcher - base_solver.OTMatcherLinear - base_solver.OTMatcherQuad - - -Neural networks ---------------- -.. autosummary:: - :toctree: _autosummary - - layers.MLPBlock - nets.RescalingMLP diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 51e218b8d..29f59f858 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -151,7 +151,7 @@ def __call__( loader: Data loader returning a dictionary with possible keys `src_lin`, `tgt_lin`, `src_quad`, `tgt_quad`, `src_conditions`. n_iters: Number of iterations to train the model. - rng: Random seed. + rng: Random number generator. Returns: Training logs. diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index 56fff2e2e..d048581b4 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -105,6 +105,17 @@ def __call__( # noqa: D102 n_iters: int, rng: Optional[jax.Array] = None, ) -> Dict[str, List[float]]: + """Train the OTFlowMatching model. + + Args: + loader: Data loader returning a dictionary with possible keys + `src_lin`, `tgt_lin`, `src_condition`. + n_iters: Number of iterations to train the model. + rng: Random number generator. + + Returns: + Training logs. + """ rng = utils.default_prng_key(rng) training_logs = {"loss": []} for batch in loader: From 9f230c8530d5453ff8973c02d3f34850807d61a2 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 14:03:36 +0100 Subject: [PATCH 148/186] add error message --- src/ott/neural/flow_models/genot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 29f59f858..5c64a5481 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -173,7 +173,7 @@ def prepare_data( src = jnp.concatenate([src_lin, src_quad], axis=1) tgt = jnp.concatenate([tgt_lin, tgt_quad], axis=1) else: - raise RuntimeError("TODO") + raise RuntimeError("Cannot infer OT problem type from data.") # TODO(michalk8): filter `None` from the `arrs`? return (src, batch.get("src_condition"), tgt), arrs From 6c816788cd28edc33e17d78a106fb9736d91361b Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 15:29:16 +0100 Subject: [PATCH 149/186] clean docs --- docs/spelling/technical.txt | 1 + src/ott/neural/flow_models/genot.py | 4 ++-- src/ott/neural/flow_models/models.py | 2 +- src/ott/neural/flow_models/otfm.py | 4 ++-- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/spelling/technical.txt b/docs/spelling/technical.txt index 7c7ba4ae9..f5a2ffb57 100644 --- a/docs/spelling/technical.txt +++ b/docs/spelling/technical.txt @@ -25,6 +25,7 @@ McCann Monge Moreau SGD +Schrödinger Schur Seidel Sinkhorn diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 5c64a5481..f115fe7a4 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -256,7 +256,7 @@ def transport( rng: Optional[jax.Array] = None, **kwargs: Any, ) -> jnp.ndarray: - """Transport data with the learnt plan. + """Transport data with the learned plan. This method pushes-forward the `source` to its conditional distribution by solving the neural ODE parameterized by the @@ -271,7 +271,7 @@ def transport( kwargs: Keyword arguments for the ODE solver. Returns: - The push-forward or pull-back distribution defined by the learnt + The push-forward or pull-back distribution defined by the learned transport plan. """ diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index 5cee1b05f..abf3ec8f7 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -111,7 +111,7 @@ def create_train_state( rng: Random number generator. optimizer: Optimizer. input_dim: Dimensionality of the velocity field. - condition_dim: Dimensionsanilty of the condition + condition_dim: Dimensionality of the condition to the velocity field. Returns: diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index d048581b4..ad1a70522 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -154,7 +154,7 @@ def transport( t1: float = 1.0, **kwargs: Any, ) -> jnp.ndarray: - """Transport data with the learnt map. + """Transport data with the learned map. This method pushes-forward the data by solving the neural ODE parameterized by the velocity field. @@ -167,7 +167,7 @@ def transport( kwargs: Keyword arguments for the ODE solver. Returns: - The push-forward or pull-back distribution defined by the learnt + The push-forward or pull-back distribution defined by the learned transport plan. """ From e77cc34f523d74ff3390a27e788a91819e7ceda2 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 15:46:59 +0100 Subject: [PATCH 150/186] comprise genot tests --- docs/spelling/misc.txt | 1 + docs/spelling/technical.txt | 2 + tests/neural/genot_test.py | 127 ++++++++++-------------------------- 3 files changed, 39 insertions(+), 91 deletions(-) diff --git a/docs/spelling/misc.txt b/docs/spelling/misc.txt index 26bc961ce..4be10fe05 100644 --- a/docs/spelling/misc.txt +++ b/docs/spelling/misc.txt @@ -1,4 +1,5 @@ Eulerian +Utils alg arg args diff --git a/docs/spelling/technical.txt b/docs/spelling/technical.txt index f5a2ffb57..f7997b48c 100644 --- a/docs/spelling/technical.txt +++ b/docs/spelling/technical.txt @@ -47,6 +47,7 @@ chromatin collinear covariance covariances +dataclass dataloaders dataset datasets @@ -111,6 +112,7 @@ preprocess preprocessing proteome prox +pytree quantile quantiles quantizes diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index d68ae9511..a5f061335 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -39,62 +39,40 @@ def data_match_fn( class TestGENOT: - # TODO(michalk8): test gw/fgw, k, etc. - @pytest.mark.parametrize("dl", ["lin_dl", "conditional_lin_dl"]) - def test_genot_linear(self, rng: jax.Array, dl: str, request): + @pytest.mark.parametrize( + "dl", [ + "lin_dl", "conditional_lin_dl", "quad_dl", "conditional_quad_dl", + "fused_dl", "conditional_fused_dl" + ] + ) + def test_genot(self, rng: jax.Array, dl: str, request): rng_init, rng_call = jax.random.split(rng) hidden_dim = 7 dl = request.getfixturevalue(dl) batch = next(iter(dl)) - src = jnp.asarray(batch["src_lin"]) - tgt = jnp.asarray(batch["tgt_lin"]) + src_lin = batch.get("src_lin") + if src_lin is not None: + src_lin = jnp.asarray(src_lin) + src_quad = batch.get("src_quad") + if src_quad is not None: + src_quad = jnp.asarray(src_quad) + tgt_lin = batch.get("tgt_lin") + if tgt_lin is not None: + tgt_lin = jnp.asarray(batch["tgt_lin"]) + tgt_quad = batch.get("tgt_quad") + if tgt_quad is not None: + tgt_quad = jnp.asarray(batch["tgt_quad"]) src_cond = batch.get("src_condition") if src_cond is not None: src_cond = jnp.asarray(src_cond) - src_dim = src.shape[-1] - tgt_dim = tgt.shape[-1] - cond_dim = src_cond.shape[-1] if src_cond is not None else 0 - vf = models.VelocityField( - hidden_dim=hidden_dim, - output_dim=tgt_dim, - condition_dim=src_dim + cond_dim, - ) - - data_mfn = functools.partial(data_match_fn, type="linear") - - model = genot.GENOT( - vf, - flow=flows.ConstantNoiseFlow(0.0), - data_match_fn=data_mfn, - source_dim=src_dim, - target_dim=tgt_dim, - condition_dim=cond_dim, - rng=rng_init, - optimizer=optax.adam(learning_rate=1e-4), - ) - - _logs = model(dl, n_iters=3, rng=rng_call) - res = model.transport(src, condition=src_cond) - - assert jnp.sum(jnp.isnan(res)) == 0 - assert res.shape[-1] == tgt_dim - - @pytest.mark.parametrize("dl", ["quad_dl", "conditional_quad_dl"]) - def test_genot_quad(self, rng: jax.Array, dl: str, request): - rng_init, rng_call = jax.random.split(rng) - hidden_dim = 7 - dl = request.getfixturevalue(dl) - - batch = next(iter(dl)) - src = jnp.asarray(batch["src_quad"]) - tgt = jnp.asarray(batch["tgt_quad"]) - src_cond = batch.get("src_condition") - if src_cond is not None: - src_cond = jnp.asarray(src_cond) - src_dim = src.shape[-1] - tgt_dim = tgt.shape[-1] + src_lin_dim = src_lin.shape[-1] if src_lin is not None else 0 + src_quad_dim = src_quad.shape[-1] if src_quad is not None else 0 + tgt_lin_shape = tgt_lin.shape[-1] if tgt_lin is not None else 0 + tgt_quad_shape = tgt_quad.shape[-1] if tgt_quad is not None else 0 + src_dim = src_lin_dim + src_quad_dim + tgt_dim = tgt_lin_shape + tgt_quad_shape cond_dim = src_cond.shape[-1] if src_cond is not None else 0 vf = models.VelocityField( @@ -103,50 +81,16 @@ def test_genot_quad(self, rng: jax.Array, dl: str, request): condition_dim=src_dim + cond_dim, ) - data_mfn = functools.partial(data_match_fn, type="quadratic") - - model = genot.GENOT( - vf, - flow=flows.ConstantNoiseFlow(0.0), - data_match_fn=data_mfn, - source_dim=src_dim, - target_dim=tgt_dim, - condition_dim=cond_dim, - rng=rng_init, - optimizer=optax.adam(learning_rate=1e-4), - ) - - _logs = model(dl, n_iters=3, rng=rng_call) - res = model.transport(src, condition=src_cond) - - assert jnp.sum(jnp.isnan(res)) == 0 - assert res.shape[-1] == tgt_dim - - @pytest.mark.parametrize("dl", ["fused_dl", "conditional_fused_dl"]) - def test_genot_fused(self, rng: jax.Array, dl: str, request): - rng_init, rng_call = jax.random.split(rng) - hidden_dim = 7 - dl = request.getfixturevalue(dl) - - batch = next(iter(dl)) - src_lin = jnp.asarray(batch["src_lin"]) - tgt_lin = jnp.asarray(batch["tgt_lin"]) - src_quad = jnp.asarray(batch["src_quad"]) - tgt_quad = jnp.asarray(batch["tgt_quad"]) - src_cond = batch.get("src_condition") - if src_cond is not None: - src_cond = jnp.asarray(src_cond) - src_dim = src_lin.shape[-1] + src_quad.shape[-1] - tgt_dim = tgt_lin.shape[-1] + tgt_quad.shape[-1] - cond_dim = src_cond.shape[-1] if src_cond is not None else 0 - - vf = models.VelocityField( - hidden_dim=hidden_dim, - output_dim=tgt_dim, - condition_dim=src_dim + cond_dim, - ) + if src_lin_dim > 0 and src_quad_dim == 0: + problem_type = "linear" + elif src_lin_dim == 0 and src_quad_dim > 0: + problem_type = "quadratic" + elif src_lin_dim > 0 and src_quad_dim > 0: + problem_type = "fused" + else: + raise ValueError("Unknown problem type") - data_mfn = functools.partial(data_match_fn, type="fused") + data_mfn = functools.partial(data_match_fn, type=problem_type) model = genot.GENOT( vf, @@ -160,7 +104,8 @@ def test_genot_fused(self, rng: jax.Array, dl: str, request): ) _logs = model(dl, n_iters=3, rng=rng_call) - src = jnp.concatenate([src_lin, src_quad], axis=-1) + src_terms = [term for term in [src_lin, src_quad] if term is not None] + src = jnp.concatenate(src_terms, axis=-1) res = model.transport(src, condition=src_cond) assert jnp.sum(jnp.isnan(res)) == 0 From d8603f76f55ce9f0e3240a9ee86f7d5d02e1c3d8 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Tue, 19 Mar 2024 16:36:00 +0100 Subject: [PATCH 151/186] change reference for GENOT --- docs/references.bib | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/references.bib b/docs/references.bib index a53f5a8a1..d07643e8c 100644 --- a/docs/references.bib +++ b/docs/references.bib @@ -842,7 +842,7 @@ @misc{klein_uscidda:23 eprint = {2310.09254}, eprintclass = {stat.ML}, eprinttype = {arXiv}, - title = {Generative Entropic Neural Optimal Transport To Map Within and Across Spaces}, + title = {Entropic (Gromov) Wasserstein Flow Matching with GENOT}, year = {2023}, } From 7813f833cfe9e22d32af9c9934ae87b1e1b7c1e6 Mon Sep 17 00:00:00 2001 From: Dominik Klein Date: Wed, 20 Mar 2024 16:04:51 +0100 Subject: [PATCH 152/186] add missing docstring --- src/ott/neural/flow_models/utils.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index 3149106bb..a189b8f0f 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -99,7 +99,15 @@ def match_quadratic( def sample_joint(rng: jax.Array, tmat: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: - """TODO.""" + """Sample from a transport matrix. + + Args: + rng: Random number generator. + tmat: Transport matrix. + + Returns: + Source and target indices sampled from the transport matrix. + """ n, m = tmat.shape tmat_flattened = tmat.flatten() indices = jax.random.choice( From 212ee012b830a1ad808fb1336dc90b74cdd3d5d2 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 25 Mar 2024 19:21:10 +0100 Subject: [PATCH 153/186] Modify behaviour of `ConditionalLoader` --- src/ott/neural/data/datasets.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index 0ec8edfe9..9cf1c085f 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -142,22 +142,20 @@ def __init__( def __next__(self) -> Item_t: if self._it == len(self): raise StopIteration + self._it += 1 ix = self._rng.choice(len(self._iterators)) iterator = self._iterators[ix] try: - data = next(iterator) - # TODO(michalk8): improve the logic a bit - self._it += 1 - return data + return next(iterator) except StopIteration: - self._iterators[ix] = iter(self.datasets[ix]) - if not self._iterators: - raise + # reset the consumed iterator and return it's first element + self._iterators[ix] = iterator = iter(self.datasets[ix]) + return next(iterator) def __iter__(self) -> "ConditionalLoader": - self._iterators = [iter(ds) for ds in self.datasets] self._it = 0 + self._iterators = [iter(ds) for ds in self.datasets] return self def __len__(self) -> int: From 95c71420c40e7a70c0b55e8e304a561d02cca5a9 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 25 Mar 2024 19:22:49 +0100 Subject: [PATCH 154/186] Update docstring --- src/ott/neural/data/datasets.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index 9cf1c085f..dcea1fd17 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -54,7 +54,8 @@ class OTDataset: tgt_data: Samples from the target distribution. src_conditions: Conditions for the source data. tgt_conditions: Conditions for the target data. - is_aligned: Whether the samples from `src_data` and `tgt_data` are paired. + is_aligned: Whether the samples from the source and the target data + are paired. If yes, the source and the target conditions must match. seed: Random seed. """ SRC_PREFIX = "src" @@ -67,7 +68,7 @@ def __init__( src_conditions: Optional[Sequence[Any]] = None, tgt_conditions: Optional[Sequence[Any]] = None, is_aligned: bool = False, - seed: Optional[int] = None + seed: Optional[int] = None, ): self.src_data = src_data self.tgt_data = tgt_data From 52a54d38b02192a4b8ffcad86ca67944f5579fe4 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Mon, 25 Mar 2024 20:08:43 +0100 Subject: [PATCH 155/186] Clean GENOT docs --- docs/neural/flow_models.rst | 1 - src/ott/neural/data/datasets.py | 2 +- src/ott/neural/flow_models/genot.py | 96 ++++++++++++++++------------ src/ott/neural/flow_models/models.py | 2 +- src/ott/neural/flow_models/otfm.py | 2 +- src/ott/neural/flow_models/utils.py | 20 +----- 6 files changed, 62 insertions(+), 61 deletions(-) diff --git a/docs/neural/flow_models.rst b/docs/neural/flow_models.rst index 32fe6e7f3..273f145f3 100644 --- a/docs/neural/flow_models.rst +++ b/docs/neural/flow_models.rst @@ -48,4 +48,3 @@ Utils utils.sample_conditional utils.cyclical_time_encoder utils.uniform_sampler - utils.multivariate_normal diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index dcea1fd17..28c49a5af 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -56,7 +56,7 @@ class OTDataset: tgt_conditions: Conditions for the target data. is_aligned: Whether the samples from the source and the target data are paired. If yes, the source and the target conditions must match. - seed: Random seed. + seed: Random seed used to match source and target when not aligned. """ SRC_PREFIX = "src" TGT_PREFIX = "tgt" diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index f115fe7a4..d2e579f23 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -28,37 +28,45 @@ __all__ = ["GENOT"] +# input: (src_lin, tgt_lin, src_quad, tgt_quad), output: (len(src), len(tgt)) +# all are optional because the problem can be linear/quadratic/fused +DataMatchFn_t = Callable[[ + Optional[jnp.ndarray], Optional[jnp.ndarray], Optional[jnp.ndarray], + Optional[jnp.ndarray] +], jnp.ndarray] -class GENOT: - """GENOT for entropic neural optimal transport :cite:`klein_uscidda:23`. - GENOT (Generative Entropic Neural Optimal Transport) is a framework for - learning neural optimal transport plans between two distributions. It - allows for learning linear and quadratic (Fused) Gromov-Wasserstein couplings, - in both the balanced and the unbalanced setting. +class GENOT: + """Generative Entropic Neural Optimal Transport :cite:`klein_uscidda:23`. + GENOT is a framework for learning neural optimal transport plans between + two distributions. It allows for learning linear and quadratic + (Fused) Gromov-Wasserstein couplings, in both the balanced and + the unbalanced setting. Args: - velocity_field: Neural vector field parameterized by a neural network. + velocity_field: Vector field parameterized by a neural network. flow: Flow between latent distribution and target distribution. - data_match_fn: OT solver to matching the source and the target distribution. + data_match_fn: Function to match source and target distributions. + The function accepts a 4-tuple ``(src_lin, tgt_lin, src_quad, tgt_quad)`` + and return the transport matrix of shape ``(len(src), len(tgt))``. + Either linear, quadratic or both linear and quadratic source and target + arrays are passed, corresponding to the linear, quadratic and + fused GW couplings, respectively. source_dim: Dimension of the source space. target_dim: Dimension of the target space. - condition_dim: Dimension of the conditions. + condition_dim: Dimension of the conditions. If :obj:`None`, the underlying + velocity field has no conditions. + n_samples_per_src: Number of samples drawn from the conditional distribution + per one source sample. time_sampler: Sampler for the time to learn the neural ODE. If :obj:`None`, the time is uniformly sampled. - # TODO(michalk8): rename - k_samples_per_x: Number of samples drawn from the conditional distribution - per single source sample. - latent_match_fn: Linear OT matcher to optimally pair the latent - distribution with the `k_samples_per_x` samples of the conditional - distribution (corresponding to one sample). If :obj:`None`, samples - from the latent distribution are randomly paired with the samples from - the conditional distribution. latent_noise_fn: Function to sample from the latent distribution in the target space. If :obj:`None`, the latent distribution is sampled from a multivariate normal distribution. - # TODO(michalk8): expose all args for the train state? + latent_match_fn: Function to pair the latent distribution with + the ``n_samples_per_src`` samples of the conditional distribution. + If :obj:`None`, no matching is performed. kwargs: Keyword arguments for :meth:`ott.neural.flow_models.models.VelocityField.create_train_state`. """ @@ -67,36 +75,33 @@ def __init__( self, velocity_field: models.VelocityField, flow: flows.BaseFlow, - data_match_fn: Callable[ - [jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray], jnp.ndarray], + data_match_fn: DataMatchFn_t, + *, source_dim: int, target_dim: int, - condition_dim: int, + condition_dim: Optional[int] = None, + n_samples_per_src: int = 1, time_sampler: Callable[[jax.Array, int], jnp.ndarray] = flow_utils.uniform_sampler, - # TODO(michalk8): rename, too descriptive - k_samples_per_x: int = 1, - latent_match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], - jnp.ndarray]] = None, latent_noise_fn: Optional[Callable[[jax.Array, Tuple[int, ...]], jnp.ndarray]] = None, + latent_match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], + jnp.ndarray]] = None, **kwargs: Any, ): self.vf = velocity_field self.flow = flow self.data_match_fn = data_match_fn self.time_sampler = time_sampler - self.latent_match_fn = latent_match_fn if latent_noise_fn is None: - latent_noise_fn = functools.partial( - flow_utils.multivariate_normal, dim=target_dim - ) + latent_noise_fn = functools.partial(multivariate_normal, dim=target_dim) self.latent_noise_fn = latent_noise_fn - self.k_samples_per_x = k_samples_per_x + self.latent_match_fn = latent_match_fn + self.n_samples_per_src = n_samples_per_src self.vf_state = self.vf.create_train_state( input_dim=target_dim, - condition_dim=source_dim + condition_dim, + condition_dim=source_dim + (condition_dim or 0), **kwargs ) self.step_fn = self._get_step_fn() @@ -120,10 +125,10 @@ def loss_fn( source_conditions: Optional[jnp.ndarray], rng: jax.Array ): x_t = self.flow.compute_xt(rng, time, latent, target) - cond = ( - source if source_conditions is None else - jnp.concatenate([source, source_conditions], axis=-1) - ) + if source_conditions is None: + cond = source + else: + cond = jnp.concatenate([source, source_conditions], axis=-1) v_t = vf_state.apply_fn({"params": params}, time, x_t, cond) u_t = self.flow.compute_ut(time, latent, target) @@ -151,7 +156,7 @@ def __call__( loader: Data loader returning a dictionary with possible keys `src_lin`, `tgt_lin`, `src_quad`, `tgt_quad`, `src_conditions`. n_iters: Number of iterations to train the model. - rng: Random number generator. + rng: Random key for seeding. Returns: Training logs. @@ -175,7 +180,6 @@ def prepare_data( else: raise RuntimeError("Cannot infer OT problem type from data.") - # TODO(michalk8): filter `None` from the `arrs`? return (src, batch.get("src_condition"), tgt), arrs rng = utils.default_prng_key(rng) @@ -188,14 +192,14 @@ def prepare_data( (src, src_cond, tgt), matching_data = prepare_data(batch) n = src.shape[0] - time = self.time_sampler(rng_time, n * self.k_samples_per_x) - latent = self.latent_noise_fn(rng_noise, (n, self.k_samples_per_x)) + time = self.time_sampler(rng_time, n * self.n_samples_per_src) + latent = self.latent_noise_fn(rng_noise, (n, self.n_samples_per_src)) tmat = self.data_match_fn(*matching_data) # (n, m) src_ixs, tgt_ixs = flow_utils.sample_conditional( # (n, k), (m, k) rng_resample, tmat, - k=self.k_samples_per_x, + k=self.n_samples_per_src, uniform_marginals=True, # TODO(michalk8): expose ) @@ -304,3 +308,15 @@ def solve_ode(x: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: source = jnp.concatenate([source, condition], axis=-1) return jax.jit(jax.vmap(solve_ode))(latent, source) + + +def multivariate_normal( + rng: jax.Array, + shape: Tuple[int, ...], + dim: int, + mean: float = 0.0, + cov: float = 1.0 +) -> jnp.ndarray: + mean = jnp.full(dim, fill_value=mean) + cov = jnp.diag(jnp.full(dim, fill_value=cov)) + return jax.random.multivariate_normal(rng, mean=mean, cov=cov, shape=shape) diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index abf3ec8f7..f88bcbfdf 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -86,7 +86,7 @@ def __call__( t = self.act_fn(nn.Dense(time_dim)(t)) x = self.act_fn(nn.Dense(self.hidden_dim)(x)) if self.condition_dim is not None: - assert condition is not None, "TODO." + assert condition is not None, "No condition was specified." condition = self.act_fn(nn.Dense(self.condition_dim)(condition)) feats = [t, x] + ([] if condition is None else [condition]) diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index ad1a70522..3e4381b65 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -34,7 +34,7 @@ class OTFlowMatching: With an extension to OT-FM :cite:`tong:23`, :cite:`pooladian:23`. Args: - velocity_field: Neural vector field parameterized by a neural network. + velocity_field: Vector field parameterized by a neural network. flow: Flow between source and target distribution. match_fn: Function to match data points from the source distribution and the target distribution. diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index a189b8f0f..f85ef159a 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -26,7 +26,6 @@ "sample_conditional", "cyclical_time_encoder", "uniform_sampler", - "multivariate_normal", ] ScaleCost_t = Union[float, Literal["mean", "max_cost", "median"]] @@ -100,7 +99,7 @@ def match_quadratic( def sample_joint(rng: jax.Array, tmat: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: """Sample from a transport matrix. - + Args: rng: Random number generator. tmat: Transport matrix. @@ -130,7 +129,7 @@ def sample_conditional( Args: rng: Random number generator. tmat: Transport matrix. - k: Expected number of samples to sample per row. + k: Expected number of samples to sample per source. uniform_marginals: If :obj:`True`, sample exactly `k` samples per row, otherwise sample proportionally to the sums of the rows of the transport matrix. @@ -138,7 +137,7 @@ def sample_conditional( Returns: Source and target indices sampled from the transport matrix. """ - assert k > 0, "Number of samples per row must be positive." + assert k > 0, "Number of samples per source must be positive." n, m = tmat.shape if uniform_marginals: @@ -208,16 +207,3 @@ def uniform_sampler( t = jax.random.uniform(rng, (1, 1), minval=low, maxval=high) mod_term = ((high - low) - offset) return (t + jnp.arange(num_samples)[:, None] / num_samples) % mod_term - - -def multivariate_normal( - rng: jax.Array, - shape: Tuple[int, ...], - dim: int, - mean: float = 0.0, - cov: float = 1.0 -) -> jnp.ndarray: - """TODO.""" - mean = jnp.full(dim, fill_value=mean) - cov = jnp.diag(jnp.full(dim, fill_value=cov)) - return jax.random.multivariate_normal(rng, mean=mean, cov=cov, shape=shape) From de2e4ac29e7efd5d452e74cab7362049c2d532f3 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 26 Mar 2024 18:03:40 +0100 Subject: [PATCH 156/186] Improve VF --- src/ott/neural/flow_models/models.py | 59 +++++++++++++++------------- tests/neural/genot_test.py | 9 ++--- tests/neural/otfm_test.py | 10 ++--- 3 files changed, 40 insertions(+), 38 deletions(-) diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index f88bcbfdf..cd182d20e 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Callable, Optional +from typing import Callable, Optional, Sequence import jax import jax.numpy as jnp @@ -35,29 +35,20 @@ class VelocityField(nn.Module): a target distribution given at :math:`t_1` by integrating :math:`v(t, x)` from :math:`t=t_0` to :math:`t=t_1`. - Each of the input, condition, and time embeddings are passed through a block - consisting of ``num_layers`` layers of dimension - ``hidden_dim``, ``condition_dim``, and ``time_embed_dim``, - respectively. The output of each block is concatenated and passed through - a final block of dimension ``joint_hidden_dim``. - Args: - hidden_dim: Dimensionality of the embedding of the data. - output_dim: Dimensionality of the neural vector field. - num_layers: Number of layers. - condition_dim: Dimensionality of the embedding of the condition. + hidden_dims: Dimensionality of the embedding of the data. + condition_dims: Dimensionality of the embedding of the condition. If :obj:`None`, the velocity field has no conditions. - time_dim: Dimensionality of the time embedding. - If :obj:`None`, set to ``hidden_dim``. + time_dims: Dimensionality of the time embedding. + If :obj:`None`, ``hidden_dims`` will be used. time_encoder: Function to encode the time input to the time-dependent velocity field. act_fn: Activation function. """ - hidden_dim: int output_dim: int - num_layers: int = 3 - condition_dim: Optional[int] = None - time_dim: Optional[int] = None + hidden_dims: Sequence[int] = (128, 128, 128) + condition_dims: Optional[Sequence[int]] = None + time_dims: Optional[Sequence[int]] = None time_encoder: Callable[[jnp.ndarray], jnp.ndarray] = utils.cyclical_time_encoder act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu @@ -79,24 +70,33 @@ def __call__( Returns: Output of the neural vector field of shape ``[batch, output_dim]``. """ - time_dim = self.hidden_dim if self.time_dim is None else self.time_dim + if self.condition_dims is None: + cond_dims = [None] * len(self.hidden_dims) + else: + cond_dims = self.condition_dims + time_dims = self.hidden_dims if self.time_dims is None else self.time_dims + + assert len(self.hidden_dims) == len(cond_dims), "TODO" + assert len(self.hidden_dims) == len(time_dims), "TODO" t = self.time_encoder(t) - for _ in range(self.num_layers): + for time_dim, cond_dim, hidden_dim in zip( + time_dims, cond_dims, self.hidden_dims + ): t = self.act_fn(nn.Dense(time_dim)(t)) - x = self.act_fn(nn.Dense(self.hidden_dim)(x)) - if self.condition_dim is not None: + x = self.act_fn(nn.Dense(hidden_dim)(x)) + if self.condition_dims is not None: assert condition is not None, "No condition was specified." - condition = self.act_fn(nn.Dense(self.condition_dim)(condition)) + condition = self.act_fn(nn.Dense(cond_dim)(condition)) - feats = [t, x] + ([] if condition is None else [condition]) + feats = [t, x] + ([] if self.condition_dims is None else [condition]) feats = jnp.concatenate(feats, axis=-1) joint_dim = feats.shape[-1] - for _ in range(self.num_layers): + for _ in range(len(self.hidden_dims)): feats = self.act_fn(nn.Dense(joint_dim)(feats)) - return nn.Dense(self.output_dim, use_bias=True)(feats) + return nn.Dense(self.output_dim)(feats) def create_train_state( self, @@ -111,14 +111,17 @@ def create_train_state( rng: Random number generator. optimizer: Optimizer. input_dim: Dimensionality of the velocity field. - condition_dim: Dimensionality of the condition - to the velocity field. + condition_dim: Dimensionality of the condition of the velocity field. Returns: The training state. """ t, x = jnp.ones((1, 1)), jnp.ones((1, input_dim)) - cond = None if self.condition_dim is None else jnp.ones((1, condition_dim)) + if self.condition_dims is not None: + assert condition_dim is not None, "TODO" + cond = jnp.ones((1, condition_dim)) + else: + cond = None params = self.init(rng, t, x, cond)["params"] return train_state.TrainState.create( diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index a5f061335..22bce3e39 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -46,8 +46,7 @@ class TestGENOT: ] ) def test_genot(self, rng: jax.Array, dl: str, request): - rng_init, rng_call = jax.random.split(rng) - hidden_dim = 7 + rng_init, rng_call = jax.random.split(rng, 2) dl = request.getfixturevalue(dl) batch = next(iter(dl)) @@ -76,9 +75,9 @@ def test_genot(self, rng: jax.Array, dl: str, request): cond_dim = src_cond.shape[-1] if src_cond is not None else 0 vf = models.VelocityField( - hidden_dim=hidden_dim, - output_dim=tgt_dim, - condition_dim=src_dim + cond_dim, + tgt_dim, + hidden_dims=[7, 7, 7], + condition_dims=[7, 7, 7], ) if src_lin_dim > 0 and src_quad_dim == 0: diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index a4db65fa5..078b5c50a 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -27,13 +27,13 @@ class TestOTFlowMatching: (3, "lin_dl_with_conds"), (4, "conditional_lin_dl")]) def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): - input_dim, hidden_dim = 2, 5 + dim = 2 # all dataloaders have this dim dl = request.getfixturevalue(dl) neural_vf = models.VelocityField( - hidden_dim=hidden_dim, - output_dim=input_dim, - condition_dim=hidden_dim if cond_dim > 0 else None, + dim, + hidden_dims=[5, 5, 5], + condition_dims=[5, 5, 5] if cond_dim > 0 else None, ) fm = otfm.OTFlowMatching( neural_vf, @@ -41,7 +41,7 @@ def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): match_fn=jax.jit(utils.match_linear), rng=rng, optimizer=optax.adam(learning_rate=1e-3), - input_dim=input_dim, + input_dim=dim, condition_dim=cond_dim, ) From 9b89fd7f8a934135f09f8ffad658b26a10bb36a0 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 26 Mar 2024 18:23:53 +0100 Subject: [PATCH 157/186] Simplify GENOT test --- tests/neural/genot_test.py | 73 ++++++++++++-------------------------- 1 file changed, 22 insertions(+), 51 deletions(-) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 22bce3e39..bd857aef8 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Literal +from typing import Literal, Optional import pytest import jax import jax.numpy as jnp +import jax.tree_util as jtu import optax @@ -25,54 +26,35 @@ def data_match_fn( - src_lin: jnp.ndarray, tgt_lin: jnp.ndarray, src_quad: jnp.ndarray, - tgt_quad: jnp.ndarray, *, type: Literal["linear", "quadratic", "fused"] -): - if type == "linear": + src_lin: Optional[jnp.ndarray], tgt_lin: Optional[jnp.ndarray], + src_quad: Optional[jnp.ndarray], tgt_quad: Optional[jnp.ndarray], *, + typ: Literal["lin", "quad", "fused"] +) -> jnp.ndarray: + if typ == "lin": return utils.match_linear(x=src_lin, y=tgt_lin) - if type == "quadratic": + if typ == "quad": return utils.match_quadratic(xx=src_quad, yy=tgt_quad) - if type == "fused": + if typ == "fused": return utils.match_quadratic(xx=src_quad, yy=tgt_quad, x=src_lin, y=tgt_lin) - raise NotImplementedError(f"Unknown type: {type}") + raise NotImplementedError(f"Unknown type: {typ}.") class TestGENOT: - @pytest.mark.parametrize( - "dl", [ - "lin_dl", "conditional_lin_dl", "quad_dl", "conditional_quad_dl", - "fused_dl", "conditional_fused_dl" - ] - ) + # TODO(michalk8): add conds + @pytest.mark.parametrize("dl", ["lin_dl", "quad_dl", "fused_dl"]) def test_genot(self, rng: jax.Array, dl: str, request): - rng_init, rng_call = jax.random.split(rng, 2) + rng_init, rng_call, rng_data = jax.random.split(rng, 3) + problem_type = dl.split("_")[0] dl = request.getfixturevalue(dl) batch = next(iter(dl)) - src_lin = batch.get("src_lin") - if src_lin is not None: - src_lin = jnp.asarray(src_lin) - src_quad = batch.get("src_quad") - if src_quad is not None: - src_quad = jnp.asarray(src_quad) - tgt_lin = batch.get("tgt_lin") - if tgt_lin is not None: - tgt_lin = jnp.asarray(batch["tgt_lin"]) - tgt_quad = batch.get("tgt_quad") - if tgt_quad is not None: - tgt_quad = jnp.asarray(batch["tgt_quad"]) + batch = jtu.tree_map(jnp.asarray, batch) src_cond = batch.get("src_condition") - if src_cond is not None: - src_cond = jnp.asarray(src_cond) - src_lin_dim = src_lin.shape[-1] if src_lin is not None else 0 - src_quad_dim = src_quad.shape[-1] if src_quad is not None else 0 - tgt_lin_shape = tgt_lin.shape[-1] if tgt_lin is not None else 0 - tgt_quad_shape = tgt_quad.shape[-1] if tgt_quad is not None else 0 - src_dim = src_lin_dim + src_quad_dim - tgt_dim = tgt_lin_shape + tgt_quad_shape - cond_dim = src_cond.shape[-1] if src_cond is not None else 0 + dims = jtu.tree_map(lambda x: x.shape[-1], batch) + src_dim = dims.get("src_lin", 0) + dims.get("src_quad", 0) + tgt_dim = dims.get("tgt_lin", 0) + dims.get("tgt_quad", 0) vf = models.VelocityField( tgt_dim, @@ -80,31 +62,20 @@ def test_genot(self, rng: jax.Array, dl: str, request): condition_dims=[7, 7, 7], ) - if src_lin_dim > 0 and src_quad_dim == 0: - problem_type = "linear" - elif src_lin_dim == 0 and src_quad_dim > 0: - problem_type = "quadratic" - elif src_lin_dim > 0 and src_quad_dim > 0: - problem_type = "fused" - else: - raise ValueError("Unknown problem type") - - data_mfn = functools.partial(data_match_fn, type=problem_type) - model = genot.GENOT( vf, flow=flows.ConstantNoiseFlow(0.0), - data_match_fn=data_mfn, + data_match_fn=functools.partial(data_match_fn, typ=problem_type), source_dim=src_dim, target_dim=tgt_dim, - condition_dim=cond_dim, + condition_dim=None if src_cond is None else src_cond.shape[-1], rng=rng_init, optimizer=optax.adam(learning_rate=1e-4), ) _logs = model(dl, n_iters=3, rng=rng_call) - src_terms = [term for term in [src_lin, src_quad] if term is not None] - src = jnp.concatenate(src_terms, axis=-1) + + src = jax.random.normal(rng_data, (3, src_dim)) res = model.transport(src, condition=src_cond) assert jnp.sum(jnp.isnan(res)) == 0 From 433da0cdd50039716c06895244377748b0253d41 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 26 Mar 2024 18:58:41 +0100 Subject: [PATCH 158/186] Better metadata wrapper in tests --- tests/neural/conftest.py | 81 ++++++++++++++++++++------------------ tests/neural/genot_test.py | 21 +++++----- tests/neural/otfm_test.py | 24 +++++------ 3 files changed, 62 insertions(+), 64 deletions(-) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index b19962608..8653e7d63 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional, Union +from typing import NamedTuple, Optional, Union import pytest @@ -21,6 +21,14 @@ from ott.neural.data import datasets +class OTLoader(NamedTuple): + loader: DataLoader + lin_dim: int = 0 + quad_src_dim: int = 0 + quad_tgt_dim: int = 0 + cond_dim: Optional[int] = None + + def _ot_data( rng: np.random.Generator, *, @@ -31,7 +39,8 @@ def _ot_data( cond_dim: Optional[int] = None, offset: float = 0.0 ) -> datasets.OTData: - assert lin_dim or quad_dim, "TODO" + assert lin_dim or quad_dim, \ + "Either linear or quadratic dimension has to be specified." lin_data = None if lin_dim is None else ( rng.normal(size=(n, lin_dim)) + offset @@ -50,7 +59,6 @@ def _ot_data( @pytest.fixture() def lin_dl() -> DataLoader: - """Returns a data loader for a simple Gaussian mixture.""" n, d = 128, 2 rng = np.random.default_rng(0) @@ -58,11 +66,14 @@ def lin_dl() -> DataLoader: tgt = _ot_data(rng, n=n, lin_dim=d, offset=1.0) ds = datasets.OTDataset(src, tgt) - return DataLoader(ds, batch_size=16, shuffle=True) + return OTLoader( + DataLoader(ds, batch_size=16, shuffle=True), + lin_dim=d, + ) @pytest.fixture() -def lin_dl_with_conds() -> DataLoader: +def lin_cond_dl() -> DataLoader: n, d, cond_dim = 128, 2, 3 rng = np.random.default_rng(13) @@ -72,39 +83,44 @@ def lin_dl_with_conds() -> DataLoader: tgt = _ot_data(rng, n=n, lin_dim=d, condition=tgt_cond) ds = datasets.OTDataset(src, tgt) - return DataLoader(ds, batch_size=16, shuffle=True) + return OTLoader( + DataLoader(ds, batch_size=16, shuffle=True), + lin_dim=d, + cond_dim=cond_dim, + ) @pytest.fixture() -def conditional_lin_dl() -> datasets.ConditionalLoader: - d, cond_dim = 2, 4 - rng = np.random.default_rng(42) - - src0 = _ot_data(rng, condition=0.0, lin_dim=d, cond_dim=cond_dim) - tgt0 = _ot_data(rng, lin_dim=d, offset=2.0) - src1 = _ot_data(rng, condition=1.0, lin_dim=d, cond_dim=cond_dim) - tgt1 = _ot_data(rng, lin_dim=d, offset=-2.0) - - src_ds = datasets.OTDataset(src0, tgt0) - tgt_ds = datasets.OTDataset(src1, tgt1) +def quad_dl(): + n, quad_src_dim, quad_tgt_dim = 128, 2, 4 + rng = np.random.default_rng(11) - src_dl = DataLoader(src_ds, batch_size=16, shuffle=True) - tgt_dl = DataLoader(tgt_ds, batch_size=16, shuffle=True) + src = _ot_data(rng, n=n, quad_dim=quad_src_dim) + tgt = _ot_data(rng, n=n, quad_dim=quad_tgt_dim, offset=1.0) + ds = datasets.OTDataset(src, tgt) - return datasets.ConditionalLoader([src_dl, tgt_dl]) + return OTLoader( + DataLoader(ds, batch_size=16, shuffle=True), + quad_src_dim=quad_src_dim, + quad_tgt_dim=quad_tgt_dim, + ) @pytest.fixture() -def quad_dl(): - n = 128 - quad_dim_src, quad_dim_tgt = 2, 4 +def fused_dl(): + n, lin_dim, quad_src_dim, quad_tgt_dim = 128, 6, 2, 4 rng = np.random.default_rng(11) - src = _ot_data(rng, n=n, quad_dim=quad_dim_src) - tgt = _ot_data(rng, n=n, quad_dim=quad_dim_tgt, offset=1.0) + src = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=quad_src_dim) + tgt = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=quad_tgt_dim, offset=1.0) ds = datasets.OTDataset(src, tgt) - return DataLoader(ds, batch_size=16, shuffle=True) + return OTLoader( + DataLoader(ds, batch_size=16, shuffle=True), + lin_dim=lin_dim, + quad_src_dim=quad_src_dim, + quad_tgt_dim=quad_tgt_dim, + ) @pytest.fixture() @@ -133,19 +149,6 @@ def conditional_quad_dl() -> datasets.ConditionalLoader: return datasets.ConditionalLoader([src_dl, tgt_dl]) -@pytest.fixture() -def fused_dl(): - n, lin_dim = 128, 6 - quad_dim_src, quad_dim_tgt = 2, 4 - rng = np.random.default_rng(11) - - src = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=quad_dim_src) - tgt = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=quad_dim_tgt, offset=1.0) - ds = datasets.OTDataset(src, tgt) - - return DataLoader(ds, batch_size=16, shuffle=True) - - @pytest.fixture() def conditional_fused_dl() -> datasets.ConditionalLoader: n, lin_dim, cond_dim = 128, 3, 7 diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index bd857aef8..35a2c5135 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -48,34 +48,33 @@ def test_genot(self, rng: jax.Array, dl: str, request): problem_type = dl.split("_")[0] dl = request.getfixturevalue(dl) - batch = next(iter(dl)) - batch = jtu.tree_map(jnp.asarray, batch) - src_cond = batch.get("src_condition") - - dims = jtu.tree_map(lambda x: x.shape[-1], batch) - src_dim = dims.get("src_lin", 0) + dims.get("src_quad", 0) - tgt_dim = dims.get("tgt_lin", 0) + dims.get("tgt_quad", 0) + src_dim = dl.lin_dim + dl.quad_src_dim + tgt_dim = dl.lin_dim + dl.quad_tgt_dim + cond_dim = dl.cond_dim vf = models.VelocityField( tgt_dim, hidden_dims=[7, 7, 7], - condition_dims=[7, 7, 7], + condition_dims=None if dl.cond_dim is None else [1, 3, 2], ) - model = genot.GENOT( vf, flow=flows.ConstantNoiseFlow(0.0), data_match_fn=functools.partial(data_match_fn, typ=problem_type), source_dim=src_dim, target_dim=tgt_dim, - condition_dim=None if src_cond is None else src_cond.shape[-1], + condition_dim=cond_dim, rng=rng_init, optimizer=optax.adam(learning_rate=1e-4), ) - _logs = model(dl, n_iters=3, rng=rng_call) + _logs = model(dl.loader, n_iters=3, rng=rng_call) + batch = next(iter(dl.loader)) + batch = jtu.tree_map(jnp.asarray, batch) src = jax.random.normal(rng_data, (3, src_dim)) + src_cond = batch.get("src_condition") + res = model.transport(src, condition=src_cond) assert jnp.sum(jnp.isnan(res)) == 0 diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 078b5c50a..8e0b4aff7 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -15,6 +15,7 @@ import jax import jax.numpy as jnp +import jax.tree_util as jtu import optax @@ -23,17 +24,15 @@ class TestOTFlowMatching: - @pytest.mark.parametrize(("cond_dim", "dl"), [(0, "lin_dl"), - (3, "lin_dl_with_conds"), - (4, "conditional_lin_dl")]) - def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): - dim = 2 # all dataloaders have this dim + @pytest.mark.parametrize("dl", ["lin_dl", "lin_cond_dl"]) + def test_fm(self, rng: jax.Array, dl: str, request): dl = request.getfixturevalue(dl) + dim, cond_dim = dl.lin_dim, dl.cond_dim neural_vf = models.VelocityField( dim, hidden_dims=[5, 5, 5], - condition_dims=[5, 5, 5] if cond_dim > 0 else None, + condition_dims=None if cond_dim is None else [4, 3, 2], ) fm = otfm.OTFlowMatching( neural_vf, @@ -45,17 +44,14 @@ def test_fm(self, rng: jax.Array, cond_dim: int, dl: str, request): condition_dim=cond_dim, ) - _logs = fm(dl, n_iters=3) + _logs = fm(dl.loader, n_iters=3) - batch = next(iter(dl)) - src = jnp.asarray(batch["src_lin"]) - tgt = jnp.asarray(batch["tgt_lin"]) + batch = next(iter(dl.loader)) + batch = jtu.tree_map(jnp.asarray, batch) src_cond = batch.get("src_condition") - if src_cond is not None: - src_cond = jnp.asarray(src_cond) - res_fwd = fm.transport(src, condition=src_cond) - res_bwd = fm.transport(tgt, t0=1.0, t1=0.0, condition=src_cond) + res_fwd = fm.transport(batch["src_lin"], condition=src_cond) + res_bwd = fm.transport(batch["tgt_lin"], t0=1.0, t1=0.0, condition=src_cond) # TODO(michalk8): better assertions assert jnp.sum(jnp.isnan(res_fwd)) == 0 From f8fcba7e8edcb65a3b66fa2099f71eae1db2e4af Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 26 Mar 2024 19:04:06 +0100 Subject: [PATCH 159/186] Fix condition in GENOT test --- tests/neural/genot_test.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 35a2c5135..3f4de227f 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -41,8 +41,9 @@ def data_match_fn( class TestGENOT: - # TODO(michalk8): add conds - @pytest.mark.parametrize("dl", ["lin_dl", "quad_dl", "fused_dl"]) + @pytest.mark.parametrize( + "dl", ["lin_dl", "quad_dl", "fused_dl", "lin_cond_dl"] + ) def test_genot(self, rng: jax.Array, dl: str, request): rng_init, rng_call, rng_data = jax.random.split(rng, 3) problem_type = dl.split("_")[0] @@ -50,12 +51,12 @@ def test_genot(self, rng: jax.Array, dl: str, request): src_dim = dl.lin_dim + dl.quad_src_dim tgt_dim = dl.lin_dim + dl.quad_tgt_dim - cond_dim = dl.cond_dim + cond_dim = dl.cnd_dim vf = models.VelocityField( tgt_dim, hidden_dims=[7, 7, 7], - condition_dims=None if dl.cond_dim is None else [1, 3, 2], + condition_dims=None if cond_dim is None else [1, 3, 2], ) model = genot.GENOT( vf, @@ -72,8 +73,9 @@ def test_genot(self, rng: jax.Array, dl: str, request): batch = next(iter(dl.loader)) batch = jtu.tree_map(jnp.asarray, batch) - src = jax.random.normal(rng_data, (3, src_dim)) src_cond = batch.get("src_condition") + batch_size = 4 if src_cond is None else src_cond.shape[0] + src = jax.random.normal(rng_data, (batch_size, src_dim)) res = model.transport(src, condition=src_cond) From 49a07a00ad544bbf25e9f3f3fe235f486fbf6eaa Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 26 Mar 2024 19:09:10 +0100 Subject: [PATCH 160/186] Add quad cond dl --- tests/neural/conftest.py | 19 +++++++++++++++++++ tests/neural/genot_test.py | 4 ++-- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 8653e7d63..3493115c5 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -106,6 +106,25 @@ def quad_dl(): ) +@pytest.fixture() +def quad_cond_dl(): + n, quad_src_dim, quad_tgt_dim, cond_dim = 128, 2, 4, 5 + rng = np.random.default_rng(414) + + src_cond = rng.normal(size=(n, cond_dim)) + tgt_cond = rng.normal(size=(n, cond_dim)) + src = _ot_data(rng, n=n, quad_dim=quad_src_dim, condition=src_cond) + tgt = _ot_data(rng, n=n, quad_dim=quad_tgt_dim, offset=1.0, cond_dim=tgt_cond) + ds = datasets.OTDataset(src, tgt) + + return OTLoader( + DataLoader(ds, batch_size=16, shuffle=True), + quad_src_dim=quad_src_dim, + quad_tgt_dim=quad_tgt_dim, + cond_dim=cond_dim, + ) + + @pytest.fixture() def fused_dl(): n, lin_dim, quad_src_dim, quad_tgt_dim = 128, 6, 2, 4 diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 3f4de227f..59a738e0a 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -42,7 +42,7 @@ def data_match_fn( class TestGENOT: @pytest.mark.parametrize( - "dl", ["lin_dl", "quad_dl", "fused_dl", "lin_cond_dl"] + "dl", ["lin_dl", "quad_dl", "fused_dl", "lin_cond_dl", "quad_cond_dl"] ) def test_genot(self, rng: jax.Array, dl: str, request): rng_init, rng_call, rng_data = jax.random.split(rng, 3) @@ -51,7 +51,7 @@ def test_genot(self, rng: jax.Array, dl: str, request): src_dim = dl.lin_dim + dl.quad_src_dim tgt_dim = dl.lin_dim + dl.quad_tgt_dim - cond_dim = dl.cnd_dim + cond_dim = dl.cond_dim vf = models.VelocityField( tgt_dim, From d1ae1de237dac8edece99fbcb58394378a3d108b Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 26 Mar 2024 22:49:07 +0100 Subject: [PATCH 161/186] Add conf fused DL --- tests/neural/conftest.py | 64 ++++++++++---------------------------- tests/neural/genot_test.py | 5 ++- 2 files changed, 20 insertions(+), 49 deletions(-) diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 3493115c5..92c23f6a6 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -143,60 +143,28 @@ def fused_dl(): @pytest.fixture() -def conditional_quad_dl() -> datasets.ConditionalLoader: - n, cond_dim = 128, 5 - quad_dim_src, quad_dim_tgt = 2, 4 +def fused_cond_dl(): + n, lin_dim, quad_src_dim, quad_tgt_dim, cond_dim = 128, 6, 2, 4, 7 rng = np.random.default_rng(11) - src0 = _ot_data( - rng, n=n, condition=0.0, cond_dim=cond_dim, quad_dim=quad_dim_src - ) - tgt0 = _ot_data( - rng, n=n, quad_dim=quad_dim_tgt, cond_dim=cond_dim, offset=2.0 - ) - src1 = _ot_data( - rng, n=n, condition=1.0, cond_dim=cond_dim, quad_dim=quad_dim_src + src_cond = rng.normal(size=(n, cond_dim)) + tgt_cond = rng.normal(size=(n, cond_dim)) + src = _ot_data( + rng, n=n, lin_dim=lin_dim, quad_dim=quad_src_dim, condition=src_cond ) - tgt1 = _ot_data(rng, n=n, quad_dim=quad_dim_tgt, offset=-2.0) - - src_ds = datasets.OTDataset(src0, tgt0) - tgt_ds = datasets.OTDataset(src1, tgt1) - - src_dl = DataLoader(src_ds, batch_size=16, shuffle=True) - tgt_dl = DataLoader(tgt_ds, batch_size=16, shuffle=True) - - return datasets.ConditionalLoader([src_dl, tgt_dl]) - - -@pytest.fixture() -def conditional_fused_dl() -> datasets.ConditionalLoader: - n, lin_dim, cond_dim = 128, 3, 7 - quad_dim_src, quad_dim_tgt = 2, 4 - rng = np.random.default_rng(11) - - src0 = _ot_data( + tgt = _ot_data( rng, n=n, - condition=0.0, - cond_dim=cond_dim, lin_dim=lin_dim, - quad_dim=quad_dim_src + quad_dim=quad_tgt_dim, + offset=1.0, + condition=tgt_cond ) - tgt0 = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=quad_dim_tgt, offset=2.0) - src1 = _ot_data( - rng, - n=n, - condition=1.0, - cond_dim=cond_dim, + ds = datasets.OTDataset(src, tgt) + + return OTLoader( + DataLoader(ds, batch_size=16, shuffle=True), lin_dim=lin_dim, - quad_dim=quad_dim_src + quad_src_dim=quad_src_dim, + quad_tgt_dim=quad_tgt_dim, ) - tgt1 = _ot_data(rng, n=n, lin_dim=lin_dim, quad_dim=quad_dim_tgt, offset=-2.0) - - src_ds = datasets.OTDataset(src0, tgt0) - tgt_ds = datasets.OTDataset(src1, tgt1) - - src_dl = DataLoader(src_ds, batch_size=16, shuffle=True) - tgt_dl = DataLoader(tgt_ds, batch_size=16, shuffle=True) - - return datasets.ConditionalLoader([src_dl, tgt_dl]) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 59a738e0a..086cc82ea 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -42,7 +42,10 @@ def data_match_fn( class TestGENOT: @pytest.mark.parametrize( - "dl", ["lin_dl", "quad_dl", "fused_dl", "lin_cond_dl", "quad_cond_dl"] + "dl", [ + "lin_dl", "quad_dl", "fused_dl", "lin_cond_dl", "quad_cond_dl", + "fused_cond_dl" + ] ) def test_genot(self, rng: jax.Array, dl: str, request): rng_init, rng_call, rng_data = jax.random.split(rng, 3) From f6c69bdf4d26a49830eb36dddb9cea4520a77d92 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 27 Mar 2024 00:18:36 +0100 Subject: [PATCH 162/186] Polish docs --- src/ott/neural/flow_models/genot.py | 35 +++++++++++++---------------- src/ott/neural/flow_models/otfm.py | 21 +++++++++-------- tests/neural/otfm_test.py | 3 +-- 3 files changed, 27 insertions(+), 32 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index d2e579f23..08e011c04 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -46,30 +46,27 @@ class GENOT: Args: velocity_field: Vector field parameterized by a neural network. - flow: Flow between latent distribution and target distribution. - data_match_fn: Function to match source and target distributions. - The function accepts a 4-tuple ``(src_lin, tgt_lin, src_quad, tgt_quad)`` - and return the transport matrix of shape ``(len(src), len(tgt))``. - Either linear, quadratic or both linear and quadratic source and target - arrays are passed, corresponding to the linear, quadratic and - fused GW couplings, respectively. - source_dim: Dimension of the source space. - target_dim: Dimension of the target space. + flow: Flow between the latent and the target distributions. + data_match_fn: Function to match samples from the source and the target + distributions with a ``(src_lin, tgt_lin, src_quad, tgt_quad) -> matching`` + signature. + source_dim: Dimensionality of the source distribution. + target_dim: Dimensionality of the target distribution. condition_dim: Dimension of the conditions. If :obj:`None`, the underlying velocity field has no conditions. + time_sampler: Time sampler with a ``(rng, n_samples) -> time`` signature. + latent_noise_fn: Function to sample from the latent distribution in the + target space with a ``(rng, shape) -> noise`` signature. + If :obj:`None`, multivariate normal distribution is used. + latent_match_fn: Function to match samples from the latent distribution + and the samples from the conditional distribution with a + ``(latent, samples) -> matching`` signature. If :obj:`None`, no matching + is performed. n_samples_per_src: Number of samples drawn from the conditional distribution per one source sample. - time_sampler: Sampler for the time to learn the neural ODE. If :obj:`None`, - the time is uniformly sampled. - latent_noise_fn: Function to sample from the latent distribution in the - target space. If :obj:`None`, the latent distribution is sampled from a - multivariate normal distribution. - latent_match_fn: Function to pair the latent distribution with - the ``n_samples_per_src`` samples of the conditional distribution. - If :obj:`None`, no matching is performed. kwargs: Keyword arguments for :meth:`ott.neural.flow_models.models.VelocityField.create_train_state`. - """ + """ # noqa: E501 def __init__( self, @@ -80,13 +77,13 @@ def __init__( source_dim: int, target_dim: int, condition_dim: Optional[int] = None, - n_samples_per_src: int = 1, time_sampler: Callable[[jax.Array, int], jnp.ndarray] = flow_utils.uniform_sampler, latent_noise_fn: Optional[Callable[[jax.Array, Tuple[int, ...]], jnp.ndarray]] = None, latent_match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], jnp.ndarray]] = None, + n_samples_per_src: int = 1, **kwargs: Any, ): self.vf = velocity_field diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index 3e4381b65..2c6be25a4 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -31,20 +31,18 @@ class OTFlowMatching: """(Optimal transport) flow matching :cite:`lipman:22`. - With an extension to OT-FM :cite:`tong:23`, :cite:`pooladian:23`. + With an extension to OT-FM :cite:`tong:23,pooladian:23`. Args: velocity_field: Vector field parameterized by a neural network. - flow: Flow between source and target distribution. - match_fn: Function to match data points from the source distribution and - the target distribution. - time_sampler: Sampler for the time. - # TODO(michalk8): expose all args for the train state? - kwargs: TODO. + flow: Flow between the source and the target distributions. + match_fn: Function to match samples from the source and the target + distributions. It has a ``(src, tgt) -> matching`` signature. + time_sampler: Time sampler with a ``(rng, n_samples) -> time`` signature. + kwargs: Keyword arguments for + :meth:`~ott.neural.flow_models.models.VelocityField.create_train_state`. """ - # TODO(michalk8): in the future, `input_dim`, `optimizer` and `rng` will be - # in a separate function def __init__( self, velocity_field: models.VelocityField, @@ -60,7 +58,9 @@ def __init__( self.time_sampler = time_sampler self.match_fn = match_fn - self.vf_state = self.vf.create_train_state(**kwargs) + self.vf_state = self.vf.create_train_state( + input_dim=self.vf.output_dim, **kwargs + ) self.step_fn = self._get_step_fn() def _get_step_fn(self) -> Callable: @@ -97,7 +97,6 @@ def loss_fn( return step_fn - # TODO(michalk8): refactor in the future PR to just do one step def __call__( # noqa: D102 self, loader: Iterable[Dict[str, np.ndarray]], diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 8e0b4aff7..7f08c55dd 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -25,7 +25,7 @@ class TestOTFlowMatching: @pytest.mark.parametrize("dl", ["lin_dl", "lin_cond_dl"]) - def test_fm(self, rng: jax.Array, dl: str, request): + def test_otfm(self, rng: jax.Array, dl: str, request): dl = request.getfixturevalue(dl) dim, cond_dim = dl.lin_dim, dl.cond_dim @@ -40,7 +40,6 @@ def test_fm(self, rng: jax.Array, dl: str, request): match_fn=jax.jit(utils.match_linear), rng=rng, optimizer=optax.adam(learning_rate=1e-3), - input_dim=dim, condition_dim=cond_dim, ) From 3b69c0f1ac56d9fdea2c015317d0ce422bb7d561 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 27 Mar 2024 00:22:51 +0100 Subject: [PATCH 163/186] Remove conditional loader --- docs/neural/data.rst | 2 +- src/ott/neural/data/datasets.py | 49 ++------------------------------- 2 files changed, 4 insertions(+), 47 deletions(-) diff --git a/docs/neural/data.rst b/docs/neural/data.rst index 25172dcd3..79a602746 100644 --- a/docs/neural/data.rst +++ b/docs/neural/data.rst @@ -11,5 +11,5 @@ Datasets .. autosummary:: :toctree: _autosummary + datasets.OTData datasets.OTDataset - datasets.ConditionalLoader diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/data/datasets.py index 28c49a5af..c5661aecc 100644 --- a/src/ott/neural/data/datasets.py +++ b/src/ott/neural/data/datasets.py @@ -13,11 +13,11 @@ # limitations under the License. import collections import dataclasses -from typing import Any, Dict, Iterable, Optional, Sequence +from typing import Any, Dict, Optional, Sequence import numpy as np -__all__ = ["OTData", "OTDataset", "ConditionalLoader"] +__all__ = ["OTData", "OTDataset"] Item_t = Dict[str, np.ndarray] @@ -47,7 +47,7 @@ def __len__(self) -> int: class OTDataset: - """Dataset for (conditional) optimal transport problems. + """Dataset for optimal transport problems. Args: src_data: Samples from the source distribution. @@ -118,46 +118,3 @@ def __getitem__(self, ix: int) -> Item_t: def __len__(self) -> int: return len(self.src_data) - - -class ConditionalLoader: - """Dataset for OT problems with conditions. - - This data loader wraps several data loaders and samples from them. - - Args: - datasets: Datasets to sample from. - seed: Random seed. - """ - - def __init__( - self, - datasets: Iterable[OTDataset], - seed: Optional[int] = None, - ): - self.datasets = tuple(datasets) - self._rng = np.random.default_rng(seed) - self._iterators = [] - self._it = 0 - - def __next__(self) -> Item_t: - if self._it == len(self): - raise StopIteration - self._it += 1 - - ix = self._rng.choice(len(self._iterators)) - iterator = self._iterators[ix] - try: - return next(iterator) - except StopIteration: - # reset the consumed iterator and return it's first element - self._iterators[ix] = iterator = iter(self.datasets[ix]) - return next(iterator) - - def __iter__(self) -> "ConditionalLoader": - self._it = 0 - self._iterators = [iter(ds) for ds in self.datasets] - return self - - def __len__(self) -> int: - return max((len(ds) for ds in self.datasets), default=0) From 0ff3ad64dd3cc50502124210b10ea8b956b0722b Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 27 Mar 2024 00:28:47 +0100 Subject: [PATCH 164/186] Fix link in the docs --- docs/neural/data.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/neural/data.rst b/docs/neural/data.rst index 79a602746..3ae0e4c53 100644 --- a/docs/neural/data.rst +++ b/docs/neural/data.rst @@ -3,7 +3,7 @@ ott.neural.data .. module:: ott.neural.data .. currentmodule:: ott.neural.data -The :mod:`ott.problems.data` contains data sets and data loaders needed +The :mod:`ott.neural.data` contains data sets and data loaders needed for solving (conditional) neural optimal transport problems. Datasets From c3ce78649f0ad550bfa964d52e68f66f9ae8bb8a Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 27 Mar 2024 00:42:28 +0100 Subject: [PATCH 165/186] Improve VF --- src/ott/neural/flow_models/models.py | 48 +++++++++++----------------- src/ott/neural/flow_models/otfm.py | 2 +- tests/neural/otfm_test.py | 2 +- 3 files changed, 21 insertions(+), 31 deletions(-) diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index cd182d20e..a1b8a5291 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -26,7 +26,7 @@ class VelocityField(nn.Module): - r"""Parameterized neural vector field. + r"""Neural vector field. This class learns a map :math:`v: \mathbb{R}\times \mathbb{R}^d \rightarrow \mathbb{R}^d` solving the ODE :math:`\frac{dx}{dt} = v(t, x)`. @@ -36,16 +36,16 @@ class VelocityField(nn.Module): from :math:`t=t_0` to :math:`t=t_1`. Args: + output_dims: TODO. hidden_dims: Dimensionality of the embedding of the data. condition_dims: Dimensionality of the embedding of the condition. If :obj:`None`, the velocity field has no conditions. time_dims: Dimensionality of the time embedding. - If :obj:`None`, ``hidden_dims`` will be used. - time_encoder: Function to encode the time input to the time-dependent - velocity field. + If :obj:`None`, ``hidden_dims`` is used. + time_encoder: Time encoder for the velocity field. act_fn: Activation function. """ - output_dim: int + output_dims: Sequence[int] hidden_dims: Sequence[int] = (128, 128, 128) condition_dims: Optional[Sequence[int]] = None time_dims: Optional[Sequence[int]] = None @@ -70,33 +70,27 @@ def __call__( Returns: Output of the neural vector field of shape ``[batch, output_dim]``. """ - if self.condition_dims is None: - cond_dims = [None] * len(self.hidden_dims) - else: - cond_dims = self.condition_dims time_dims = self.hidden_dims if self.time_dims is None else self.time_dims - assert len(self.hidden_dims) == len(cond_dims), "TODO" - assert len(self.hidden_dims) == len(time_dims), "TODO" - t = self.time_encoder(t) - for time_dim, cond_dim, hidden_dim in zip( - time_dims, cond_dims, self.hidden_dims - ): + for time_dim in time_dims: t = self.act_fn(nn.Dense(time_dim)(t)) + + for hidden_dim in self.hidden_dims: x = self.act_fn(nn.Dense(hidden_dim)(x)) - if self.condition_dims is not None: - assert condition is not None, "No condition was specified." - condition = self.act_fn(nn.Dense(cond_dim)(condition)) - feats = [t, x] + ([] if self.condition_dims is None else [condition]) - feats = jnp.concatenate(feats, axis=-1) - joint_dim = feats.shape[-1] + if self.condition_dims is not None: + assert condition is not None, "No condition was passed." + for cond_dim in self.condition_dims: + condition = self.act_fn(nn.Dense(cond_dim)(condition)) + feats = jnp.concatenate([t, x, condition], axis=-1) + else: + feats = jnp.concatenate([t, x], axis=-1) - for _ in range(len(self.hidden_dims)): - feats = self.act_fn(nn.Dense(joint_dim)(feats)) + for output_dim in self.output_dims: + feats = self.act_fn(nn.Dense(output_dim)(feats)) - return nn.Dense(self.output_dim)(feats) + return feats def create_train_state( self, @@ -117,11 +111,7 @@ def create_train_state( The training state. """ t, x = jnp.ones((1, 1)), jnp.ones((1, input_dim)) - if self.condition_dims is not None: - assert condition_dim is not None, "TODO" - cond = jnp.ones((1, condition_dim)) - else: - cond = None + cond = None if self.condition_dims is None else jnp.ones((1, condition_dim)) params = self.init(rng, t, x, cond)["params"] return train_state.TrainState.create( diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/flow_models/otfm.py index 2c6be25a4..f6ccd6e1b 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/flow_models/otfm.py @@ -59,7 +59,7 @@ def __init__( self.match_fn = match_fn self.vf_state = self.vf.create_train_state( - input_dim=self.vf.output_dim, **kwargs + input_dim=self.vf.output_dims[-1], **kwargs ) self.step_fn = self._get_step_fn() diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 7f08c55dd..8d746dd88 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -30,7 +30,7 @@ def test_otfm(self, rng: jax.Array, dl: str, request): dim, cond_dim = dl.lin_dim, dl.cond_dim neural_vf = models.VelocityField( - dim, + output_dims=[7, dim], hidden_dims=[5, 5, 5], condition_dims=None if cond_dim is None else [4, 3, 2], ) From 161dd4a498f4c525b9eb844edf0ce1acb469f479 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 27 Mar 2024 00:42:56 +0100 Subject: [PATCH 166/186] Fix GENOT test --- tests/neural/genot_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 086cc82ea..0005d56ba 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -57,7 +57,7 @@ def test_genot(self, rng: jax.Array, dl: str, request): cond_dim = dl.cond_dim vf = models.VelocityField( - tgt_dim, + output_dims=[15, tgt_dim], hidden_dims=[7, 7, 7], condition_dims=None if cond_dim is None else [1, 3, 2], ) From 69c3a4d474bd1ba93a879af39498c5c2d381f855 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 27 Mar 2024 00:50:20 +0100 Subject: [PATCH 167/186] Polish docs --- src/ott/neural/flow_models/models.py | 4 ++-- src/ott/neural/flow_models/utils.py | 19 +++++++++---------- tests/neural/genot_test.py | 2 +- tests/neural/otfm_test.py | 2 +- 4 files changed, 13 insertions(+), 14 deletions(-) diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/flow_models/models.py index a1b8a5291..a770b1fdd 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/flow_models/models.py @@ -36,8 +36,8 @@ class VelocityField(nn.Module): from :math:`t=t_0` to :math:`t=t_1`. Args: - output_dims: TODO. hidden_dims: Dimensionality of the embedding of the data. + output_dims: Dimensionality of the embedding of the output. condition_dims: Dimensionality of the embedding of the condition. If :obj:`None`, the velocity field has no conditions. time_dims: Dimensionality of the time embedding. @@ -45,8 +45,8 @@ class VelocityField(nn.Module): time_encoder: Time encoder for the velocity field. act_fn: Activation function. """ + hidden_dims: Sequence[int] output_dims: Sequence[int] - hidden_dims: Sequence[int] = (128, 128, 128) condition_dims: Optional[Sequence[int]] = None time_dims: Optional[Sequence[int]] = None time_encoder: Callable[[jnp.ndarray], diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index f85ef159a..dfbbe5c76 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -64,7 +64,6 @@ def match_quadratic( yy: jnp.ndarray, x: Optional[jnp.ndarray] = None, y: Optional[jnp.ndarray] = None, - # TODO(michalk8): expose for all the costs scale_cost: ScaleCost_t = 1.0, cost_fn: Optional[costs.CostFn] = None, **kwargs: Any @@ -98,14 +97,14 @@ def match_quadratic( def sample_joint(rng: jax.Array, tmat: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]: - """Sample from a transport matrix. + """Sample jointly from a transport matrix. Args: rng: Random number generator. - tmat: Transport matrix. + tmat: Transport matrix of shape ``[n, m]``. Returns: - Source and target indices sampled from the transport matrix. + Source and target indices of shape ``[n,]`` and ``[m,]``, respectively. """ n, m = tmat.shape tmat_flattened = tmat.flatten() @@ -124,18 +123,18 @@ def sample_conditional( k: int = 1, uniform_marginals: bool = False, ) -> Tuple[jnp.ndarray, jnp.ndarray]: - """Sample indices from a transport matrix. + """Sample conditionally from a transport matrix. Args: rng: Random number generator. - tmat: Transport matrix. + tmat: Transport matrix of shape ``[n, m]``. k: Expected number of samples to sample per source. uniform_marginals: If :obj:`True`, sample exactly `k` samples per row, otherwise sample proportionally to the sums of the rows of the transport matrix. Returns: - Source and target indices sampled from the transport matrix. + Source and target indices of shape ``[n, k]`` and ``[m, k]``, respectively. """ assert k > 0, "Number of samples per source must be positive." n, m = tmat.shape @@ -195,11 +194,11 @@ def uniform_sampler( num_samples: Number of samples to generate. low: Lower bound of the uniform distribution. high: Upper bound of the uniform distribution. - offset: Offset of the uniform distribution. If :obj:`None`, no offset is - used. + offset: Offset of the uniform distribution. + If :obj:`None`, no offset is used. Returns: - An array with `num_samples` samples of the time :math:`t`. + An array of shape ``[num_samples, 1]``. """ if offset is None: return jax.random.uniform(rng, (num_samples, 1), minval=low, maxval=high) diff --git a/tests/neural/genot_test.py b/tests/neural/genot_test.py index 0005d56ba..c37d91563 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/genot_test.py @@ -57,8 +57,8 @@ def test_genot(self, rng: jax.Array, dl: str, request): cond_dim = dl.cond_dim vf = models.VelocityField( - output_dims=[15, tgt_dim], hidden_dims=[7, 7, 7], + output_dims=[15, tgt_dim], condition_dims=None if cond_dim is None else [1, 3, 2], ) model = genot.GENOT( diff --git a/tests/neural/otfm_test.py b/tests/neural/otfm_test.py index 8d746dd88..00619dc57 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/otfm_test.py @@ -30,8 +30,8 @@ def test_otfm(self, rng: jax.Array, dl: str, request): dim, cond_dim = dl.lin_dim, dl.cond_dim neural_vf = models.VelocityField( - output_dims=[7, dim], hidden_dims=[5, 5, 5], + output_dims=[7, dim], condition_dims=None if cond_dim is None else [4, 3, 2], ) fm = otfm.OTFlowMatching( From 65f2ab3d6e144e4dd60451308f9090eb7df8ce1e Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 27 Mar 2024 17:20:11 +0100 Subject: [PATCH 168/186] Remove `uniform_marginals` argument --- src/ott/neural/flow_models/genot.py | 1 - src/ott/neural/flow_models/utils.py | 25 +++++++++---------------- 2 files changed, 9 insertions(+), 17 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 08e011c04..3f2929e71 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -197,7 +197,6 @@ def prepare_data( rng_resample, tmat, k=self.n_samples_per_src, - uniform_marginals=True, # TODO(michalk8): expose ) src, tgt = src[src_ixs], tgt[tgt_ixs] # (n, k, ...), # (m, k, ...) diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/neural/flow_models/utils.py index dfbbe5c76..6c67e45f0 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/neural/flow_models/utils.py @@ -121,17 +121,13 @@ def sample_conditional( tmat: jnp.ndarray, *, k: int = 1, - uniform_marginals: bool = False, ) -> Tuple[jnp.ndarray, jnp.ndarray]: """Sample conditionally from a transport matrix. Args: rng: Random number generator. tmat: Transport matrix of shape ``[n, m]``. - k: Expected number of samples to sample per source. - uniform_marginals: If :obj:`True`, sample exactly `k` samples - per row, otherwise sample proportionally to the sums of the - rows of the transport matrix. + k: Expected number of samples to sample per source sample. Returns: Source and target indices of shape ``[n, k]`` and ``[m, k]``, respectively. @@ -139,19 +135,16 @@ def sample_conditional( assert k > 0, "Number of samples per source must be positive." n, m = tmat.shape - if uniform_marginals: - indices = jnp.arange(n) - else: - src_marginals = tmat.sum(axis=1) - rng, rng_ixs = jax.random.split(rng, 2) - indices = jax.random.choice( - rng_ixs, a=n, p=src_marginals, shape=(len(src_marginals),) - ) - tmat = tmat[indices] + src_marginals = tmat.sum(axis=1) + rng, rng_ixs = jax.random.split(rng, 2) + indices = jax.random.choice(rng_ixs, a=n, p=src_marginals, shape=(n,)) + tmat = tmat[indices] + rngs = jax.random.split(rng, n) tgt_ixs = jax.vmap( - lambda row: jax.random.choice(rng, a=m, p=row, shape=(k,)) - )(tmat) # (m, k) + lambda rng, row: jax.random.choice(rng, a=m, p=row, shape=(k,)), + in_axes=[0, 0], + )(rngs, tmat) # (m, k) src_ixs = jnp.repeat(indices[:, None], k, axis=1) # (n, k) return src_ixs, tgt_ixs From ba64056baeca5e3dc68395f0f6ff7a66037ceae4 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 27 Mar 2024 17:28:35 +0100 Subject: [PATCH 169/186] Fix undefined variable --- src/ott/neural/flow_models/genot.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 3f2929e71..712d26ebb 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -120,7 +120,7 @@ def loss_fn( params: jnp.ndarray, time: jnp.ndarray, source: jnp.ndarray, target: jnp.ndarray, latent: jnp.ndarray, source_conditions: Optional[jnp.ndarray], rng: jax.Array - ): + ) -> jnp.ndarray: x_t = self.flow.compute_xt(rng, time, latent, target) if source_conditions is None: cond = source @@ -132,7 +132,7 @@ def loss_fn( return jnp.mean((v_t - u_t) ** 2) - grad_fn = jax.value_and_grad(loss_fn, has_aux=False) + grad_fn = jax.value_and_grad(loss_fn) loss, grads = grad_fn( vf_state.params, time, source, target, latent, source_conditions, rng ) @@ -244,7 +244,7 @@ def resample( in_axes, out_axes = (0, 1, cond_axis, 1, 1), (1, cond_axis, 1) resample_fn = jax.jit(jax.vmap(resample, in_axes, out_axes)) - rngs = jax.random.split(rng, self.k_samples_per_x) + rngs = jax.random.split(rng, self.n_samples_per_src) return resample_fn(rngs, src, src_cond, tgt, latent) def transport( From 80d292413cb1a6c1b731de0e8e7e930628574350 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 27 Mar 2024 17:30:14 +0100 Subject: [PATCH 170/186] Update `GENOT.transport` docs --- src/ott/neural/flow_models/genot.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/flow_models/genot.py index 712d26ebb..a6fb30c91 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/flow_models/genot.py @@ -258,21 +258,19 @@ def transport( ) -> jnp.ndarray: """Transport data with the learned plan. - This method pushes-forward the `source` to its conditional distribution by - solving the neural ODE parameterized by the - :attr:`~ott.neural.flows.genot.velocity_field` + This function pushes forward the source distribution to its conditional + distribution by solving the neural ODE. Args: source: Data to transport. condition: Condition of the input data. t0: Starting time of integration of neural ODE. t1: End time of integration of neural ODE. - rng: random seed for sampling from the latent distribution. - kwargs: Keyword arguments for the ODE solver. + rng: Random generate used to sample from the latent distribution. + kwargs: Keyword arguments for :func:`~diffrax.odesolve`. Returns: - The push-forward or pull-back distribution defined by the learned - transport plan. + The push-forward defined by the learned transport plan. """ def vf(t: jnp.ndarray, x: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: From e4aae7f5177e2c886e45ad947aafcc2fff0780b5 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 27 Mar 2024 17:34:27 +0100 Subject: [PATCH 171/186] Add `diffrax` to `conf.py` --- docs/conf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 5832efbda..571fc0cfd 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -63,13 +63,13 @@ "python": ("https://docs.python.org/3", None), "numpy": ("https://numpy.org/doc/stable/", None), "jax": ("https://jax.readthedocs.io/en/latest/", None), + "jaxopt": ("https://jaxopt.github.io/stable", None), "lineax": ("https://docs.kidger.site/lineax/", None), "flax": ("https://flax.readthedocs.io/en/latest/", None), - "scikit-sparse": ("https://scikit-sparse.readthedocs.io/en/latest/", None), + "optax": ("https://optax.readthedocs.io/en/latest/", None), + "diffrax": ("https://docs.kidger.site/diffrax/", None), "scipy": ("https://docs.scipy.org/doc/scipy/", None), "pot": ("https://pythonot.github.io/", None), - "jaxopt": ("https://jaxopt.github.io/stable", None), - "optax": ("https://optax.readthedocs.io/en/latest/", None), "matplotlib": ("https://matplotlib.org/stable/", None), } From 1d96fac1c7ceb683d52ba450c65375a1ddbc68a1 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 29 Mar 2024 19:05:56 +0100 Subject: [PATCH 172/186] Restructure files --- src/ott/__init__.py | 1 - src/ott/initializers/__init__.py | 7 + .../data => initializers/neural}/__init__.py | 2 +- .../neural/meta_initializer.py} | 195 +----------------- src/ott/neural/__init__.py | 2 +- src/ott/neural/{data => }/datasets.py | 0 src/ott/neural/gaps/monge_gap.py | 148 ------------- src/ott/neural/{gaps => methods}/__init__.py | 2 +- .../flows}/__init__.py | 2 +- .../flows.py => methods/flows/dynamics.py} | 6 +- .../{flow_models => methods/flows}/genot.py | 23 ++- .../{flow_models => methods/flows}/otfm.py | 17 +- .../map_estimator.py => methods/monge_gap.py} | 139 ++++++++++++- .../neural/{duality => methods}/neuraldual.py | 158 ++------------ src/ott/neural/networks/__init__.py | 14 ++ src/ott/neural/networks/icnn.py | 160 ++++++++++++++ .../{duality => networks/layers}/__init__.py | 2 +- .../{duality => networks/layers}/conjugate.py | 0 .../layers.py => networks/layers/posdef.py} | 3 +- .../neural/networks/layers/time_encoder.py | 34 +++ src/ott/neural/networks/potentials.py | 185 +++++++++++++++++ .../models.py => networks/velocity_field.py} | 6 +- src/ott/solvers/__init__.py | 2 +- .../{neural/flow_models => solvers}/utils.py | 19 -- tests/__init__.py | 0 tests/geometry/graph_test.py | 6 +- .../neural/meta_initializer_test.py | 6 +- tests/neural/__init__.py | 15 +- tests/neural/conftest.py | 2 +- tests/neural/map_estimator_test.py | 88 -------- tests/neural/{ => methods}/genot_test.py | 16 +- .../monge_gap_test.py} | 77 ++++++- tests/neural/{ => methods}/neuraldual_test.py | 22 +- tests/neural/{ => methods}/otfm_test.py | 12 +- tests/neural/{ => networks}/icnn_test.py | 6 +- tests/tools/plot_test.py | 5 +- 36 files changed, 721 insertions(+), 661 deletions(-) rename src/ott/{neural/data => initializers/neural}/__init__.py (94%) rename src/ott/{neural/duality/models.py => initializers/neural/meta_initializer.py} (51%) rename src/ott/neural/{data => }/datasets.py (100%) delete mode 100644 src/ott/neural/gaps/monge_gap.py rename src/ott/neural/{gaps => methods}/__init__.py (93%) rename src/ott/neural/{flow_models => methods/flows}/__init__.py (92%) rename src/ott/neural/{flow_models/flows.py => methods/flows/dynamics.py} (98%) rename src/ott/neural/{flow_models => methods/flows}/genot.py (94%) rename src/ott/neural/{flow_models => methods/flows}/otfm.py (93%) rename src/ott/neural/{gaps/map_estimator.py => methods/monge_gap.py} (63%) rename src/ott/neural/{duality => methods}/neuraldual.py (80%) create mode 100644 src/ott/neural/networks/__init__.py create mode 100644 src/ott/neural/networks/icnn.py rename src/ott/neural/{duality => networks/layers}/__init__.py (91%) rename src/ott/neural/{duality => networks/layers}/conjugate.py (100%) rename src/ott/neural/{duality/layers.py => networks/layers/posdef.py} (99%) create mode 100644 src/ott/neural/networks/layers/time_encoder.py create mode 100644 src/ott/neural/networks/potentials.py rename src/ott/neural/{flow_models/models.py => networks/velocity_field.py} (96%) rename src/ott/{neural/flow_models => solvers}/utils.py (90%) create mode 100644 tests/__init__.py rename tests/{ => initializers}/neural/meta_initializer_test.py (95%) delete mode 100644 tests/neural/map_estimator_test.py rename tests/neural/{ => methods}/genot_test.py (83%) rename tests/neural/{losses_test.py => methods/monge_gap_test.py} (64%) rename tests/neural/{ => methods}/neuraldual_test.py (86%) rename tests/neural/{ => methods}/otfm_test.py (85%) rename tests/neural/{ => networks}/icnn_test.py (93%) diff --git a/src/ott/__init__.py b/src/ott/__init__.py index dac0eb854..c40402511 100644 --- a/src/ott/__init__.py +++ b/src/ott/__init__.py @@ -25,7 +25,6 @@ ) with contextlib.suppress(ImportError): - # TODO(michalk8): add warning that neural module is not imported from . import neural from ._version import __version__ diff --git a/src/ott/initializers/__init__.py b/src/ott/initializers/__init__.py index 5406247dc..0fad8c3ff 100644 --- a/src/ott/initializers/__init__.py +++ b/src/ott/initializers/__init__.py @@ -11,4 +11,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import contextlib + from . import linear, quadratic + +with contextlib.suppress(ImportError): + from . import neural + +del contextlib diff --git a/src/ott/neural/data/__init__.py b/src/ott/initializers/neural/__init__.py similarity index 94% rename from src/ott/neural/data/__init__.py rename to src/ott/initializers/neural/__init__.py index 785604b21..77e74d166 100644 --- a/src/ott/neural/data/__init__.py +++ b/src/ott/initializers/neural/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import datasets +from . import meta_initializer diff --git a/src/ott/neural/duality/models.py b/src/ott/initializers/neural/meta_initializer.py similarity index 51% rename from src/ott/neural/duality/models.py rename to src/ott/initializers/neural/meta_initializer.py index b3ce94c35..be1f87909 100644 --- a/src/ott/neural/duality/models.py +++ b/src/ott/initializers/neural/meta_initializer.py @@ -12,206 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Any, Callable, Dict, Optional, Sequence, Tuple, Union +from typing import Any, Dict, Optional, Sequence, Tuple import jax import jax.numpy as jnp -import flax.linen as nn import optax +from flax import linen as nn from flax.core import frozen_dict from flax.training import train_state from ott import utils from ott.geometry import geometry -from ott.initializers.linear import initializers as lin_init -from ott.neural.duality import layers, neuraldual +from ott.initializers.linear import initializers from ott.problems.linear import linear_problem +from ott.solvers.linear import sinkhorn -__all__ = ["ICNN", "PotentialMLP", "MetaInitializer"] - -# wrap to silence docs linter -DEFAULT_KERNEL_INIT = lambda *a, **k: nn.initializers.normal()(*a, **k) -DEFAULT_RECTIFIER = nn.activation.relu -DEFAULT_ACTIVATION = nn.activation.relu - - -class ICNN(neuraldual.BaseW2NeuralDual): - """Input convex neural network (ICNN). - - Implementation of input convex neural networks as introduced in - :cite:`amos:17` with initialization schemes proposed by :cite:`bunne:22`. - - Args: - dim_data: data dimensionality. - dim_hidden: sequence specifying size of hidden dimensions. The - output dimension of the last layer is 1 by default. - ranks: ranks of the matrices :math:`A_i` used as low-rank factors - for the quadratic potentials. If a sequence is passed, it must contain - ``len(dim_hidden) + 2`` elements, where the last 2 elements correspond - to the ranks of the final layer with dimension 1 and the potentials, - respectively. - init_fn: Initializer for the kernel weight matrices. - The default is :func:`~flax.linen.initializers.normal`. - act_fn: choice of activation function used in network architecture, - needs to be convex. The default is :func:`~flax.linen.activation.relu`. - pos_weights: Enforce positive weights with a projection. - If :obj:`False`, the positive weights should be enforced with clipping - or regularization in the loss. - rectifier_fn: function to ensure the non negativity of the weights. - The default is :func:`~flax.linen.activation.relu`. - gaussian_map_samples: Tuple of source and target points, used to initialize - the ICNN to mimic the linear Bures map that morphs the (Gaussian - approximation) of the input measure to that of the target measure. If - :obj:`None`, the identity initialization is used, and ICNN mimics half the - squared Euclidean norm. - """ - - dim_data: int - dim_hidden: Sequence[int] - ranks: Union[int, Tuple[int, ...]] = 1 - init_fn: Callable[[jax.Array, Tuple[int, ...], Any], - jnp.ndarray] = DEFAULT_KERNEL_INIT - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = DEFAULT_ACTIVATION - pos_weights: bool = False - rectifier_fn: Callable[[jnp.ndarray], jnp.ndarray] = DEFAULT_RECTIFIER - gaussian_map_samples: Optional[Tuple[jnp.ndarray, jnp.ndarray]] = None - - def setup(self) -> None: # noqa: D102 - dim_hidden = list(self.dim_hidden) + [1] - *ranks, pos_def_rank = self._normalize_ranks() - - # final layer computes average, still with normalized rescaling - self.w_zs = [self._get_wz(dim) for dim in dim_hidden[1:]] - # subsequent layers re-injected into convex functions - self.w_xs = [ - self._get_wx(dim, rank) for dim, rank in zip(dim_hidden, ranks) - ] - self.pos_def_potentials = self._get_pos_def_potentials(pos_def_rank) - - @nn.compact - def __call__(self, x: jnp.ndarray) -> float: # noqa: D102 - w_x, *w_xs = self.w_xs - assert len(self.w_zs) == len(w_xs), (len(self.w_zs), len(w_xs)) - - z = self.act_fn(w_x(x)) - for w_z, w_x in zip(self.w_zs, w_xs): - z = self.act_fn(w_z(z) + w_x(x)) - z = z + self.pos_def_potentials(x) - - return z.squeeze() - - def _get_wz(self, dim: int) -> nn.Module: - if self.pos_weights: - return layers.PositiveDense( - dim, - kernel_init=self.init_fn, - use_bias=False, - rectifier_fn=self.rectifier_fn, - ) - - return nn.Dense( - dim, - kernel_init=self.init_fn, - use_bias=False, - ) - - def _get_wx(self, dim: int, rank: int) -> nn.Module: - return layers.PosDefPotentials( - rank=rank, - num_potentials=dim, - use_linear=True, - use_bias=True, - kernel_diag_init=nn.initializers.zeros, - kernel_lr_init=self.init_fn, - kernel_linear_init=self.init_fn, - bias_init=nn.initializers.zeros, - ) - - def _get_pos_def_potentials(self, rank: int) -> layers.PosDefPotentials: - kwargs = { - "num_potentials": 1, - "use_linear": True, - "use_bias": True, - "bias_init": nn.initializers.zeros - } - - if self.gaussian_map_samples is None: - return layers.PosDefPotentials( - rank=rank, - kernel_diag_init=nn.initializers.ones, - kernel_lr_init=nn.initializers.zeros, - kernel_linear_init=nn.initializers.zeros, - **kwargs, - ) - - source, target = self.gaussian_map_samples - return layers.PosDefPotentials.init_from_samples( - source, - target, - rank=self.dim_data, - kernel_diag_init=nn.initializers.zeros, - **kwargs, - ) - - def _normalize_ranks(self) -> Tuple[int, ...]: - # +2 for the newly added layer with 1 + the final potentials - n_ranks = len(self.dim_hidden) + 2 - if isinstance(self.ranks, int): - return (self.ranks,) * n_ranks - - assert len(self.ranks) == n_ranks, (len(self.ranks), n_ranks) - return tuple(self.ranks) - - @property - def is_potential(self) -> bool: # noqa: D102 - return True - - -class PotentialMLP(neuraldual.BaseW2NeuralDual): - """A generic, not-convex MLP. - - Args: - dim_hidden: sequence specifying size of hidden dimensions. The output - dimension of the last layer is automatically set to 1 if - :attr:`is_potential` is ``True``, or the dimension of the input otherwise - is_potential: Model the potential if ``True``, otherwise - model the gradient of the potential - act_fn: Activation function - """ - - dim_hidden: Sequence[int] - is_potential: bool = True - act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.leaky_relu - - @nn.compact - def __call__(self, x: jnp.ndarray) -> jnp.ndarray: # noqa: D102 - squeeze = x.ndim == 1 - if squeeze: - x = jnp.expand_dims(x, 0) - assert x.ndim == 2, x.ndim - n_input = x.shape[-1] - - z = x - for n_hidden in self.dim_hidden: - Wx = nn.Dense(n_hidden, use_bias=True) - z = self.act_fn(Wx(z)) - - if self.is_potential: - Wx = nn.Dense(1, use_bias=True) - z = Wx(z).squeeze(-1) - - quad_term = 0.5 * jax.vmap(jnp.dot)(x, x) - z += quad_term - else: - Wx = nn.Dense(n_input, use_bias=True) - z = x + Wx(z) - - return z.squeeze(0) if squeeze else z +__all__ = ["MetaInitializer"] @jax.tree_util.register_pytree_node_class -class MetaInitializer(lin_init.DefaultInitializer): +class MetaInitializer(initializers.DefaultInitializer): """Meta OT Initializer with a fixed geometry :cite:`amos:22`. This initializer consists of a predictive model that outputs the @@ -314,7 +135,7 @@ def update( def init_dual_a( # noqa: D102 self, - ot_prob: "linear_problem.LinearProblem", + ot_prob: linear_problem.LinearProblem, lse_mode: bool, rng: Optional[jax.Array] = None, ) -> jnp.ndarray: @@ -337,8 +158,6 @@ def init_dual_a( # noqa: D102 def _get_update_fn(self): """Return the implementation (and jitted) update function.""" - from ott.problems.linear import linear_problem - from ott.solvers.linear import sinkhorn def dual_obj_loss_single(params, a, b): f_pred = self._compute_f(a, b, params) diff --git a/src/ott/neural/__init__.py b/src/ott/neural/__init__.py index 10dac222c..3af88e56b 100644 --- a/src/ott/neural/__init__.py +++ b/src/ott/neural/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import data, duality, flow_models, gaps +from . import datasets, methods, networks diff --git a/src/ott/neural/data/datasets.py b/src/ott/neural/datasets.py similarity index 100% rename from src/ott/neural/data/datasets.py rename to src/ott/neural/datasets.py diff --git a/src/ott/neural/gaps/monge_gap.py b/src/ott/neural/gaps/monge_gap.py deleted file mode 100644 index f6136bf07..000000000 --- a/src/ott/neural/gaps/monge_gap.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Any, Callable, Literal, Optional, Tuple, Union - -import jax -import jax.numpy as jnp - -from ott.geometry import costs, pointcloud -from ott.solvers import linear -from ott.solvers.linear import sinkhorn - -__all__ = ["monge_gap", "monge_gap_from_samples"] - - -def monge_gap( - map_fn: Callable[[jnp.ndarray], jnp.ndarray], - reference_points: jnp.ndarray, - cost_fn: Optional[costs.CostFn] = None, - epsilon: Optional[float] = None, - relative_epsilon: Optional[bool] = None, - scale_cost: Union[int, float, Literal["mean", "max_cost", "median"]] = 1.0, - return_output: bool = False, - **kwargs: Any -) -> Union[float, Tuple[float, sinkhorn.SinkhornOutput]]: - r"""Monge gap regularizer :cite:`uscidda:23`. - - For a cost function :math:`c` and empirical reference measure - :math:`\hat{\rho}_n=\frac{1}{n}\sum_{i=1}^n \delta_{x_i}`, the - (entropic) Monge gap of a map function - :math:`T:\mathbb{R}^d\rightarrow\mathbb{R}^d` is defined as: - - .. math:: - \mathcal{M}^c_{\hat{\rho}_n, \varepsilon} (T) - = \frac{1}{n} \sum_{i=1}^n c(x_i, T(x_i)) - - W_{c, \varepsilon}(\hat{\rho}_n, T \sharp \hat{\rho}_n) - - See :cite:`uscidda:23` Eq. (8). This function is a thin wrapper that calls - :func:`~ott.neural.losses.monge_gap_from_samples`. - - Args: - map_fn: Callable corresponding to map :math:`T` in definition above. The - callable should be vectorized (e.g. using :func:`jax.vmap`), i.e, - able to process a *batch* of vectors of size `d`, namely - ``map_fn`` applied to an array returns an array of the same shape. - reference_points: Array of `[n,d]` points, :math:`\hat\rho_n` in paper - cost_fn: An object of class :class:`~ott.geometry.costs.CostFn`. - epsilon: Regularization parameter. See - :class:`~ott.geometry.pointcloud.PointCloud` - relative_epsilon: when `False`, the parameter ``epsilon`` specifies the - value of the entropic regularization parameter. When `True`, ``epsilon`` - refers to a fraction of the - :attr:`~ott.geometry.pointcloud.PointCloud.mean_cost_matrix`, which is - computed adaptively using ``source`` and ``target`` points. - scale_cost: option to rescale the cost matrix. Implemented scalings are - 'median', 'mean' and 'max_cost'. Alternatively, a float factor can be - given to rescale the cost such that ``cost_matrix /= scale_cost``. - return_output: boolean to also return the - :class:`~ott.solvers.linear.sinkhorn.SinkhornOutput`. - kwargs: holds the kwargs to instantiate the or - :class:`~ott.solvers.linear.sinkhorn.Sinkhorn` solver to - compute the regularized OT cost. - - Returns: - The Monge gap value and optionally the - :class:`~ott.solvers.linear.sinkhorn.SinkhornOutput` - """ - target = map_fn(reference_points) - return monge_gap_from_samples( - source=reference_points, - target=target, - cost_fn=cost_fn, - epsilon=epsilon, - relative_epsilon=relative_epsilon, - scale_cost=scale_cost, - return_output=return_output, - **kwargs - ) - - -def monge_gap_from_samples( - source: jnp.ndarray, - target: jnp.ndarray, - cost_fn: Optional[costs.CostFn] = None, - epsilon: Optional[float] = None, - relative_epsilon: Optional[bool] = None, - scale_cost: Union[int, float, Literal["mean", "max_cost", "median"]] = 1.0, - return_output: bool = False, - **kwargs: Any -) -> Union[float, Tuple[float, sinkhorn.SinkhornOutput]]: - r"""Monge gap, instantiated in terms of samples before / after applying map. - - .. math:: - \frac{1}{n} \sum_{i=1}^n c(x_i, y_i)) - - W_{c, \varepsilon}(\frac{1}{n}\sum_i \delta_{x_i}, - \frac{1}{n}\sum_i \delta_{y_i}) - - where :math:`W_{c, \varepsilon}` is an entropy-regularized optimal transport - cost, the :attr:`~ott.solvers.linear.sinkhorn.SinkhornOutput.ent_reg_cost`. - - Args: - source: samples from first measure, array of shape ``[n, d]``. - target: samples from second measure, array of shape ``[n, d]``. - cost_fn: a cost function between two points in dimension :math:`d`. - If :obj:`None`, :class:`~ott.geometry.costs.SqEuclidean` is used. - epsilon: Regularization parameter. See - :class:`~ott.geometry.pointcloud.PointCloud` - relative_epsilon: when `False`, the parameter ``epsilon`` specifies the - value of the entropic regularization parameter. When `True`, ``epsilon`` - refers to a fraction of the - :attr:`~ott.geometry.pointcloud.PointCloud.mean_cost_matrix`, which is - computed adaptively using ``source`` and ``target`` points. - scale_cost: option to rescale the cost matrix. Implemented scalings are - 'median', 'mean' and 'max_cost'. Alternatively, a float factor can be - given to rescale the cost such that ``cost_matrix /= scale_cost``. - return_output: boolean to also return the - :class:`~ott.solvers.linear.sinkhorn.SinkhornOutput`. - kwargs: holds the kwargs to instantiate the or - :class:`~ott.solvers.linear.sinkhorn.Sinkhorn` solver to - compute the regularized OT cost. - - Returns: - The Monge gap value and optionally the - :class:`~ott.solvers.linear.sinkhorn.SinkhornOutput` - """ - cost_fn = costs.SqEuclidean() if cost_fn is None else cost_fn - geom = pointcloud.PointCloud( - x=source, - y=target, - cost_fn=cost_fn, - epsilon=epsilon, - relative_epsilon=relative_epsilon, - scale_cost=scale_cost, - ) - gt_displacement_cost = jnp.mean(jax.vmap(cost_fn)(source, target)) - out = linear.solve(geom=geom, **kwargs) - loss = gt_displacement_cost - out.ent_reg_cost - return (loss, out) if return_output else loss diff --git a/src/ott/neural/gaps/__init__.py b/src/ott/neural/methods/__init__.py similarity index 93% rename from src/ott/neural/gaps/__init__.py rename to src/ott/neural/methods/__init__.py index 0ba36da05..a5836f921 100644 --- a/src/ott/neural/gaps/__init__.py +++ b/src/ott/neural/methods/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import map_estimator, monge_gap +from . import monge_gap, neuraldual diff --git a/src/ott/neural/flow_models/__init__.py b/src/ott/neural/methods/flows/__init__.py similarity index 92% rename from src/ott/neural/flow_models/__init__.py rename to src/ott/neural/methods/flows/__init__.py index a6239fa06..f5bba4cc5 100644 --- a/src/ott/neural/flow_models/__init__.py +++ b/src/ott/neural/methods/flows/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import flows, genot, models, otfm, utils +from . import dynamics, genot, otfm diff --git a/src/ott/neural/flow_models/flows.py b/src/ott/neural/methods/flows/dynamics.py similarity index 98% rename from src/ott/neural/flow_models/flows.py rename to src/ott/neural/methods/flows/dynamics.py index 2cde34833..3ca60168c 100644 --- a/src/ott/neural/flow_models/flows.py +++ b/src/ott/neural/methods/flows/dynamics.py @@ -20,7 +20,7 @@ "BaseFlow", "StraightFlow", "ConstantNoiseFlow", - "BrownianNoiseFlow", + "BrownianBridge", ] @@ -140,8 +140,8 @@ def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: return jnp.full_like(t, fill_value=self.sigma) -class BrownianNoiseFlow(StraightFlow): - r"""Brownian Bridge Flow. +class BrownianBridge(StraightFlow): + r"""Brownian Bridge. Sampler for sampling noise implicitly defined by a Schrödinger Bridge problem with parameter :math:`\sigma` such that diff --git a/src/ott/neural/flow_models/genot.py b/src/ott/neural/methods/flows/genot.py similarity index 94% rename from src/ott/neural/flow_models/genot.py rename to src/ott/neural/methods/flows/genot.py index a6fb30c91..a3bad5902 100644 --- a/src/ott/neural/flow_models/genot.py +++ b/src/ott/neural/methods/flows/genot.py @@ -23,8 +23,9 @@ from flax.training import train_state from ott import utils -from ott.neural.flow_models import flows, models -from ott.neural.flow_models import utils as flow_utils +from ott.neural.methods.flows import dynamics +from ott.neural.networks import velocity_field +from ott.solvers import utils as solver_utils __all__ = ["GENOT"] @@ -45,7 +46,7 @@ class GENOT: the unbalanced setting. Args: - velocity_field: Vector field parameterized by a neural network. + vf: Vector field parameterized by a neural network. flow: Flow between the latent and the target distributions. data_match_fn: Function to match samples from the source and the target distributions with a ``(src_lin, tgt_lin, src_quad, tgt_quad) -> matching`` @@ -70,15 +71,15 @@ class GENOT: def __init__( self, - velocity_field: models.VelocityField, - flow: flows.BaseFlow, + vf: velocity_field.VelocityField, + flow: dynamics.BaseFlow, data_match_fn: DataMatchFn_t, *, source_dim: int, target_dim: int, condition_dim: Optional[int] = None, time_sampler: Callable[[jax.Array, int], - jnp.ndarray] = flow_utils.uniform_sampler, + jnp.ndarray] = solver_utils.uniform_sampler, latent_noise_fn: Optional[Callable[[jax.Array, Tuple[int, ...]], jnp.ndarray]] = None, latent_match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], @@ -86,12 +87,12 @@ def __init__( n_samples_per_src: int = 1, **kwargs: Any, ): - self.vf = velocity_field + self.vf = vf self.flow = flow self.data_match_fn = data_match_fn self.time_sampler = time_sampler if latent_noise_fn is None: - latent_noise_fn = functools.partial(multivariate_normal, dim=target_dim) + latent_noise_fn = functools.partial(_multivariate_normal, dim=target_dim) self.latent_noise_fn = latent_noise_fn self.latent_match_fn = latent_match_fn self.n_samples_per_src = n_samples_per_src @@ -193,7 +194,7 @@ def prepare_data( latent = self.latent_noise_fn(rng_noise, (n, self.n_samples_per_src)) tmat = self.data_match_fn(*matching_data) # (n, m) - src_ixs, tgt_ixs = flow_utils.sample_conditional( # (n, k), (m, k) + src_ixs, tgt_ixs = solver_utils.sample_conditional( # (n, k), (m, k) rng_resample, tmat, k=self.n_samples_per_src, @@ -233,7 +234,7 @@ def resample( ) -> Tuple[jnp.ndarray, Optional[jnp.ndarray], jnp.ndarray]: tmat = self.latent_match_fn(latent, tgt) # (n, k) - src_ixs, tgt_ixs = flow_utils.sample_joint(rng, tmat) # (n,), (m,) + src_ixs, tgt_ixs = solver_utils.sample_joint(rng, tmat) # (n,), (m,) src, tgt = src[src_ixs], tgt[tgt_ixs] if src_cond is not None: src_cond = src_cond[src_ixs] @@ -304,7 +305,7 @@ def solve_ode(x: jnp.ndarray, cond: jnp.ndarray) -> jnp.ndarray: return jax.jit(jax.vmap(solve_ode))(latent, source) -def multivariate_normal( +def _multivariate_normal( rng: jax.Array, shape: Tuple[int, ...], dim: int, diff --git a/src/ott/neural/flow_models/otfm.py b/src/ott/neural/methods/flows/otfm.py similarity index 93% rename from src/ott/neural/flow_models/otfm.py rename to src/ott/neural/methods/flows/otfm.py index f6ccd6e1b..ebeb138b5 100644 --- a/src/ott/neural/flow_models/otfm.py +++ b/src/ott/neural/methods/flows/otfm.py @@ -22,8 +22,9 @@ from flax.training import train_state from ott import utils -from ott.neural.flow_models import flows, models -from ott.neural.flow_models import utils as flow_utils +from ott.neural.methods.flows import dynamics +from ott.neural.networks import velocity_field +from ott.solvers import utils as solver_utils __all__ = ["OTFlowMatching"] @@ -34,7 +35,7 @@ class OTFlowMatching: With an extension to OT-FM :cite:`tong:23,pooladian:23`. Args: - velocity_field: Vector field parameterized by a neural network. + vf: Vector field parameterized by a neural network. flow: Flow between the source and the target distributions. match_fn: Function to match samples from the source and the target distributions. It has a ``(src, tgt) -> matching`` signature. @@ -45,15 +46,15 @@ class OTFlowMatching: def __init__( self, - velocity_field: models.VelocityField, - flow: flows.BaseFlow, + vf: velocity_field.VelocityField, + flow: dynamics.BaseFlow, match_fn: Optional[Callable[[jnp.ndarray, jnp.ndarray], jnp.ndarray]] = None, time_sampler: Callable[[jax.Array, int], - jnp.ndarray] = flow_utils.uniform_sampler, + jnp.ndarray] = solver_utils.uniform_sampler, **kwargs: Any, ): - self.vf = velocity_field + self.vf = vf self.flow = flow self.time_sampler = time_sampler self.match_fn = match_fn @@ -127,7 +128,7 @@ def __call__( # noqa: D102 if self.match_fn is not None: tmat = self.match_fn(src, tgt) - src_ixs, tgt_ixs = flow_utils.sample_joint(rng_resample, tmat) + src_ixs, tgt_ixs = solver_utils.sample_joint(rng_resample, tmat) src, tgt = src[src_ixs], tgt[tgt_ixs] src_cond = None if src_cond is None else src_cond[src_ixs] diff --git a/src/ott/neural/gaps/map_estimator.py b/src/ott/neural/methods/monge_gap.py similarity index 63% rename from src/ott/neural/gaps/map_estimator.py rename to src/ott/neural/methods/monge_gap.py index 61c24f0c3..c108a3509 100644 --- a/src/ott/neural/gaps/map_estimator.py +++ b/src/ott/neural/methods/monge_gap.py @@ -18,6 +18,7 @@ Callable, Dict, Iterator, + Literal, Optional, Sequence, Tuple, @@ -32,12 +33,140 @@ from flax.training import train_state from ott import utils -from ott.neural.duality import neuraldual +from ott.geometry import costs, pointcloud +from ott.neural.networks import potentials +from ott.solvers import linear +from ott.solvers.linear import sinkhorn + +__all__ = ["monge_gap", "monge_gap_from_samples", "MongeGapEstimator"] + + +def monge_gap( + map_fn: Callable[[jnp.ndarray], jnp.ndarray], + reference_points: jnp.ndarray, + cost_fn: Optional[costs.CostFn] = None, + epsilon: Optional[float] = None, + relative_epsilon: Optional[bool] = None, + scale_cost: Union[int, float, Literal["mean", "max_cost", "median"]] = 1.0, + return_output: bool = False, + **kwargs: Any +) -> Union[float, Tuple[float, sinkhorn.SinkhornOutput]]: + r"""Monge gap regularizer :cite:`uscidda:23`. + + For a cost function :math:`c` and empirical reference measure + :math:`\hat{\rho}_n=\frac{1}{n}\sum_{i=1}^n \delta_{x_i}`, the + (entropic) Monge gap of a map function + :math:`T:\mathbb{R}^d\rightarrow\mathbb{R}^d` is defined as: -__all__ = ["MapEstimator"] + .. math:: + \mathcal{M}^c_{\hat{\rho}_n, \varepsilon} (T) + = \frac{1}{n} \sum_{i=1}^n c(x_i, T(x_i)) - + W_{c, \varepsilon}(\hat{\rho}_n, T \sharp \hat{\rho}_n) + See :cite:`uscidda:23` Eq. (8). This function is a thin wrapper that calls + :func:`~ott.neural.losses.monge_gap_from_samples`. -class MapEstimator: + Args: + map_fn: Callable corresponding to map :math:`T` in definition above. The + callable should be vectorized (e.g. using :func:`jax.vmap`), i.e, + able to process a *batch* of vectors of size `d`, namely + ``map_fn`` applied to an array returns an array of the same shape. + reference_points: Array of `[n,d]` points, :math:`\hat\rho_n` in paper + cost_fn: An object of class :class:`~ott.geometry.costs.CostFn`. + epsilon: Regularization parameter. See + :class:`~ott.geometry.pointcloud.PointCloud` + relative_epsilon: when `False`, the parameter ``epsilon`` specifies the + value of the entropic regularization parameter. When `True`, ``epsilon`` + refers to a fraction of the + :attr:`~ott.geometry.pointcloud.PointCloud.mean_cost_matrix`, which is + computed adaptively using ``source`` and ``target`` points. + scale_cost: option to rescale the cost matrix. Implemented scalings are + 'median', 'mean' and 'max_cost'. Alternatively, a float factor can be + given to rescale the cost such that ``cost_matrix /= scale_cost``. + return_output: boolean to also return the + :class:`~ott.solvers.linear.sinkhorn.SinkhornOutput`. + kwargs: holds the kwargs to instantiate the or + :class:`~ott.solvers.linear.sinkhorn.Sinkhorn` solver to + compute the regularized OT cost. + + Returns: + The Monge gap value and optionally the + :class:`~ott.solvers.linear.sinkhorn.SinkhornOutput` + """ + target = map_fn(reference_points) + return monge_gap_from_samples( + source=reference_points, + target=target, + cost_fn=cost_fn, + epsilon=epsilon, + relative_epsilon=relative_epsilon, + scale_cost=scale_cost, + return_output=return_output, + **kwargs + ) + + +def monge_gap_from_samples( + source: jnp.ndarray, + target: jnp.ndarray, + cost_fn: Optional[costs.CostFn] = None, + epsilon: Optional[float] = None, + relative_epsilon: Optional[bool] = None, + scale_cost: Union[int, float, Literal["mean", "max_cost", "median"]] = 1.0, + return_output: bool = False, + **kwargs: Any +) -> Union[float, Tuple[float, sinkhorn.SinkhornOutput]]: + r"""Monge gap, instantiated in terms of samples before / after applying map. + + .. math:: + \frac{1}{n} \sum_{i=1}^n c(x_i, y_i)) - + W_{c, \varepsilon}(\frac{1}{n}\sum_i \delta_{x_i}, + \frac{1}{n}\sum_i \delta_{y_i}) + + where :math:`W_{c, \varepsilon}` is an entropy-regularized optimal transport + cost, the :attr:`~ott.solvers.linear.sinkhorn.SinkhornOutput.ent_reg_cost`. + + Args: + source: samples from first measure, array of shape ``[n, d]``. + target: samples from second measure, array of shape ``[n, d]``. + cost_fn: a cost function between two points in dimension :math:`d`. + If :obj:`None`, :class:`~ott.geometry.costs.SqEuclidean` is used. + epsilon: Regularization parameter. See + :class:`~ott.geometry.pointcloud.PointCloud` + relative_epsilon: when `False`, the parameter ``epsilon`` specifies the + value of the entropic regularization parameter. When `True`, ``epsilon`` + refers to a fraction of the + :attr:`~ott.geometry.pointcloud.PointCloud.mean_cost_matrix`, which is + computed adaptively using ``source`` and ``target`` points. + scale_cost: option to rescale the cost matrix. Implemented scalings are + 'median', 'mean' and 'max_cost'. Alternatively, a float factor can be + given to rescale the cost such that ``cost_matrix /= scale_cost``. + return_output: boolean to also return the + :class:`~ott.solvers.linear.sinkhorn.SinkhornOutput`. + kwargs: holds the kwargs to instantiate the or + :class:`~ott.solvers.linear.sinkhorn.Sinkhorn` solver to + compute the regularized OT cost. + + Returns: + The Monge gap value and optionally the + :class:`~ott.solvers.linear.sinkhorn.SinkhornOutput` + """ + cost_fn = costs.SqEuclidean() if cost_fn is None else cost_fn + geom = pointcloud.PointCloud( + x=source, + y=target, + cost_fn=cost_fn, + epsilon=epsilon, + relative_epsilon=relative_epsilon, + scale_cost=scale_cost, + ) + gt_displacement_cost = jnp.mean(jax.vmap(cost_fn)(source, target)) + out = linear.solve(geom=geom, **kwargs) + loss = gt_displacement_cost - out.ent_reg_cost + return (loss, out) if return_output else loss + + +class MongeGapEstimator: r"""Mapping estimator between probability measures. It estimates a map :math:`T` by minimizing the loss: @@ -78,7 +207,7 @@ class MapEstimator: def __init__( self, dim_data: int, - model: neuraldual.BaseW2NeuralDual, + model: potentials.BasePotential, optimizer: Optional[optax.OptState] = None, fitting_loss: Optional[Callable[[jnp.ndarray, jnp.ndarray], Tuple[float, Optional[Any]]]] = None, @@ -114,7 +243,7 @@ def __init__( def setup( self, dim_data: int, - neural_net: neuraldual.BaseW2NeuralDual, + neural_net: potentials.BasePotential, optimizer: optax.OptState, ): """Setup all components required to train the network.""" diff --git a/src/ott/neural/duality/neuraldual.py b/src/ott/neural/methods/neuraldual.py similarity index 80% rename from src/ott/neural/duality/neuraldual.py rename to src/ott/neural/methods/neuraldual.py index c00acb76c..6845224f4 100644 --- a/src/ott/neural/duality/neuraldual.py +++ b/src/ott/neural/methods/neuraldual.py @@ -11,10 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import abc import warnings from typing import ( - Any, Callable, Dict, Iterator, @@ -28,138 +26,18 @@ import jax import jax.numpy as jnp -import flax.linen as nn import optax -from flax import struct -from flax.core import frozen_dict -from flax.training import train_state from ott import utils from ott.geometry import costs -from ott.neural.duality import conjugate, models -from ott.problems.linear import potentials +from ott.neural.networks import icnn, potentials +from ott.neural.networks.layers import conjugate +from ott.problems.linear import potentials as dual_potentials -__all__ = ["W2NeuralTrainState", "BaseW2NeuralDual", "W2NeuralDual"] +__all__ = ["W2NeuralDual"] Train_t = Dict[Literal["train_logs", "valid_logs"], Dict[str, List[float]]] -Callback_t = Callable[[int, potentials.DualPotentials], None] - -PotentialValueFn_t = Callable[[jnp.ndarray], jnp.ndarray] -PotentialGradientFn_t = Callable[[jnp.ndarray], jnp.ndarray] - - -class W2NeuralTrainState(train_state.TrainState): - """Adds information about the model's value and gradient to the state. - - This extends :class:`~flax.training.train_state.TrainState` to include - the potential methods from the - :class:`~ott.neural.duality.neuraldual.BaseW2NeuralDual` used during training. - - Args: - potential_value_fn: the potential's value function - potential_gradient_fn: the potential's gradient function - """ - potential_value_fn: Callable[ - [frozen_dict.FrozenDict[str, jnp.ndarray], Optional[PotentialValueFn_t]], - PotentialValueFn_t] = struct.field(pytree_node=False) - potential_gradient_fn: Callable[[frozen_dict.FrozenDict[str, jnp.ndarray]], - PotentialGradientFn_t] = struct.field( - pytree_node=False - ) - - -class BaseW2NeuralDual(abc.ABC, nn.Module): - """Base class for the neural solver models.""" - - @property - @abc.abstractmethod - def is_potential(self) -> bool: - """Indicates if the module implements a potential value or a vector field. - - Returns: - ``True`` if the module defines a potential, ``False`` if it defines a - vector field. - """ - - def potential_value_fn( - self, - params: frozen_dict.FrozenDict[str, jnp.ndarray], - other_potential_value_fn: Optional[PotentialValueFn_t] = None, - ) -> PotentialValueFn_t: - r"""Return a function giving the value of the potential. - - Applies the module if :attr:`is_potential` is ``True``, otherwise - constructs the value of the potential from the gradient with - - .. math:: - - g(y) = -f(\nabla_y g(y)) + y^T \nabla_y g(y) - - where :math:`\nabla_y g(y)` is detached for the envelope theorem - :cite:`danskin:67,bertsekas:71` - to give the appropriate first derivatives of this construction. - - Args: - params: parameters of the module - other_potential_value_fn: function giving the value of the other - potential. Only needed when :attr:`is_potential` is ``False``. - - Returns: - A function that can be evaluated to obtain a potential value, or a linear - interpolation of a potential. - """ - if self.is_potential: - return lambda x: self.apply({"params": params}, x) - - assert other_potential_value_fn is not None, \ - "The value of the gradient-based potential depends " \ - "on the value of the other potential." - - def value_fn(x: jnp.ndarray) -> jnp.ndarray: - squeeze = x.ndim == 1 - if squeeze: - x = jnp.expand_dims(x, 0) - grad_g_x = jax.lax.stop_gradient(self.apply({"params": params}, x)) - value = -other_potential_value_fn(grad_g_x) + \ - jax.vmap(jnp.dot)(grad_g_x, x) - return value.squeeze(0) if squeeze else value - - return value_fn - - def potential_gradient_fn( - self, - params: frozen_dict.FrozenDict[str, jnp.ndarray], - ) -> PotentialGradientFn_t: - """Return a function returning a vector or the gradient of the potential. - - Args: - params: parameters of the module - - Returns: - A function that can be evaluated to obtain the potential's gradient - """ - if self.is_potential: - return jax.vmap(jax.grad(self.potential_value_fn(params))) - return lambda x: self.apply({"params": params}, x) - - def create_train_state( - self, - rng: jax.Array, - optimizer: optax.OptState, - input: Union[int, Tuple[int, ...]], - **kwargs: Any, - ) -> W2NeuralTrainState: - """Create initial training state.""" - params = self.init(rng, jnp.ones(input))["params"] - - return W2NeuralTrainState.create( - apply_fn=self.apply, - params=params, - tx=optimizer, - potential_value_fn=self.potential_value_fn, - potential_gradient_fn=self.potential_gradient_fn, - **kwargs, - ) +Callback_t = Callable[[int, dual_potentials.DualPotentials], None] class W2NeuralDual: @@ -228,8 +106,8 @@ class W2NeuralDual: def __init__( self, dim_data: int, - neural_f: Optional[BaseW2NeuralDual] = None, - neural_g: Optional[BaseW2NeuralDual] = None, + neural_f: Optional[potentials.BasePotential] = None, + neural_g: Optional[potentials.BasePotential] = None, optimizer_f: Optional[optax.OptState] = None, optimizer_g: Optional[optax.OptState] = None, num_train_iters: int = 20000, @@ -266,9 +144,9 @@ def __init__( # set default neural architectures if neural_f is None: - neural_f = models.ICNN(dim_data=dim_data, dim_hidden=[64, 64, 64, 64]) + neural_f = icnn.ICNN(dim_data=dim_data, dim_hidden=[64, 64, 64, 64]) if neural_g is None: - neural_g = models.ICNN(dim_data=dim_data, dim_hidden=[64, 64, 64, 64]) + neural_g = icnn.ICNN(dim_data=dim_data, dim_hidden=[64, 64, 64, 64]) self.neural_f = neural_f self.neural_g = neural_g @@ -285,8 +163,8 @@ def __init__( def setup( self, rng: jax.Array, - neural_f: BaseW2NeuralDual, - neural_g: BaseW2NeuralDual, + neural_f: potentials.BasePotential, + neural_g: potentials.BasePotential, dim_data: int, optimizer_f: optax.OptState, optimizer_g: optax.OptState, @@ -301,13 +179,13 @@ def setup( f"the `W2NeuralDual` setting, with positive weights " \ f"being {self.pos_weights}." if isinstance( - neural_f, models.ICNN + neural_f, icnn.ICNN ) and neural_f.pos_weights is not self.pos_weights: warnings.warn(warn_str, stacklevel=2) neural_f.pos_weights = self.pos_weights if isinstance( - neural_g, models.ICNN + neural_g, icnn.ICNN ) and neural_g.pos_weights is not self.pos_weights: warnings.warn(warn_str, stacklevel=2) neural_g.pos_weights = self.pos_weights @@ -325,7 +203,7 @@ def setup( # default to using back_and_forth with the non-convex models if self.back_and_forth is None: - self.back_and_forth = isinstance(neural_f, models.PotentialMLP) + self.back_and_forth = isinstance(neural_f, potentials.PotentialMLP) if self.num_inner_iters == 1 and self.parallel_updates: self.train_step_parallel = self.get_step_fn( @@ -359,8 +237,8 @@ def __call__( # noqa: D102 validloader_source: Iterator[jnp.ndarray], validloader_target: Iterator[jnp.ndarray], callback: Optional[Callback_t] = None, - ) -> Union[potentials.DualPotentials, Tuple[potentials.DualPotentials, - Train_t]]: + ) -> Union[dual_potentials.DualPotentials, + Tuple[dual_potentials.DualPotentials, Train_t]]: logs = self.train_fn( trainloader_source, trainloader_target, @@ -643,7 +521,7 @@ def step_fn(state_f, state_g, batch): def to_dual_potentials( self, finetune_g: bool = True - ) -> potentials.DualPotentials: + ) -> dual_potentials.DualPotentials: r"""Return the Kantorovich dual potentials from the trained potentials. Args: @@ -664,7 +542,7 @@ def g_value_finetuned(y: jnp.ndarray) -> jnp.ndarray: ) return -f_value(grad_g_y) + jnp.dot(grad_g_y, y) - return potentials.DualPotentials( + return dual_potentials.DualPotentials( f=f_value, g=g_value_prediction if not finetune_g or self.conjugate_solver is None else g_value_finetuned, diff --git a/src/ott/neural/networks/__init__.py b/src/ott/neural/networks/__init__.py new file mode 100644 index 000000000..5f2fd8636 --- /dev/null +++ b/src/ott/neural/networks/__init__.py @@ -0,0 +1,14 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from . import icnn, layers, potentials, velocity_field diff --git a/src/ott/neural/networks/icnn.py b/src/ott/neural/networks/icnn.py new file mode 100644 index 000000000..c6896dac4 --- /dev/null +++ b/src/ott/neural/networks/icnn.py @@ -0,0 +1,160 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Callable, Optional, Sequence, Tuple, Union + +import jax +import jax.numpy as jnp + +from flax import linen as nn + +from ott.neural.networks import potentials +from ott.neural.networks.layers import posdef + +__all__ = ["ICNN"] + +DEFAULT_KERNEL_INIT = lambda *a, **k: nn.initializers.normal()(*a, **k) +DEFAULT_RECTIFIER = nn.activation.relu +DEFAULT_ACTIVATION = nn.activation.relu + + +class ICNN(potentials.BasePotential): + """Input convex neural network (ICNN). + + Implementation of input convex neural networks as introduced in + :cite:`amos:17` with initialization schemes proposed by :cite:`bunne:22`. + + Args: + dim_data: data dimensionality. + dim_hidden: sequence specifying size of hidden dimensions. The + output dimension of the last layer is 1 by default. + ranks: ranks of the matrices :math:`A_i` used as low-rank factors + for the quadratic potentials. If a sequence is passed, it must contain + ``len(dim_hidden) + 2`` elements, where the last 2 elements correspond + to the ranks of the final layer with dimension 1 and the potentials, + respectively. + init_fn: Initializer for the kernel weight matrices. + The default is :func:`~flax.linen.initializers.normal`. + act_fn: choice of activation function used in network architecture, + needs to be convex. The default is :func:`~flax.linen.activation.relu`. + pos_weights: Enforce positive weights with a projection. + If :obj:`False`, the positive weights should be enforced with clipping + or regularization in the loss. + rectifier_fn: function to ensure the non negativity of the weights. + The default is :func:`~flax.linen.activation.relu`. + gaussian_map_samples: Tuple of source and target points, used to initialize + the ICNN to mimic the linear Bures map that morphs the (Gaussian + approximation) of the input measure to that of the target measure. If + :obj:`None`, the identity initialization is used, and ICNN mimics half the + squared Euclidean norm. + """ + + dim_data: int + dim_hidden: Sequence[int] + ranks: Union[int, Tuple[int, ...]] = 1 + init_fn: Callable[[jax.Array, Tuple[int, ...], Any], + jnp.ndarray] = DEFAULT_KERNEL_INIT + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = DEFAULT_ACTIVATION + pos_weights: bool = False + rectifier_fn: Callable[[jnp.ndarray], jnp.ndarray] = DEFAULT_RECTIFIER + gaussian_map_samples: Optional[Tuple[jnp.ndarray, jnp.ndarray]] = None + + def setup(self) -> None: # noqa: D102 + dim_hidden = list(self.dim_hidden) + [1] + *ranks, pos_def_rank = self._normalize_ranks() + + # final layer computes average, still with normalized rescaling + self.w_zs = [self._get_wz(dim) for dim in dim_hidden[1:]] + # subsequent layers re-injected into convex functions + self.w_xs = [ + self._get_wx(dim, rank) for dim, rank in zip(dim_hidden, ranks) + ] + self.pos_def_potentials = self._get_pos_def_potentials(pos_def_rank) + + @nn.compact + def __call__(self, x: jnp.ndarray) -> float: # noqa: D102 + w_x, *w_xs = self.w_xs + assert len(self.w_zs) == len(w_xs), (len(self.w_zs), len(w_xs)) + + z = self.act_fn(w_x(x)) + for w_z, w_x in zip(self.w_zs, w_xs): + z = self.act_fn(w_z(z) + w_x(x)) + z = z + self.pos_def_potentials(x) + + return z.squeeze() + + def _get_wz(self, dim: int) -> nn.Module: + if self.pos_weights: + return posdef.PositiveDense( + dim, + kernel_init=self.init_fn, + use_bias=False, + rectifier_fn=self.rectifier_fn, + ) + + return nn.Dense( + dim, + kernel_init=self.init_fn, + use_bias=False, + ) + + def _get_wx(self, dim: int, rank: int) -> nn.Module: + return posdef.PosDefPotentials( + rank=rank, + num_potentials=dim, + use_linear=True, + use_bias=True, + kernel_diag_init=nn.initializers.zeros, + kernel_lr_init=self.init_fn, + kernel_linear_init=self.init_fn, + bias_init=nn.initializers.zeros, + ) + + def _get_pos_def_potentials(self, rank: int) -> posdef.PosDefPotentials: + kwargs = { + "num_potentials": 1, + "use_linear": True, + "use_bias": True, + "bias_init": nn.initializers.zeros + } + + if self.gaussian_map_samples is None: + return posdef.PosDefPotentials( + rank=rank, + kernel_diag_init=nn.initializers.ones, + kernel_lr_init=nn.initializers.zeros, + kernel_linear_init=nn.initializers.zeros, + **kwargs, + ) + + source, target = self.gaussian_map_samples + return posdef.PosDefPotentials.init_from_samples( + source, + target, + rank=self.dim_data, + kernel_diag_init=nn.initializers.zeros, + **kwargs, + ) + + def _normalize_ranks(self) -> Tuple[int, ...]: + # +2 for the newly added layer with 1 + the final potentials + n_ranks = len(self.dim_hidden) + 2 + if isinstance(self.ranks, int): + return (self.ranks,) * n_ranks + + assert len(self.ranks) == n_ranks, (len(self.ranks), n_ranks) + return tuple(self.ranks) + + @property + def is_potential(self) -> bool: # noqa: D102 + return True diff --git a/src/ott/neural/duality/__init__.py b/src/ott/neural/networks/layers/__init__.py similarity index 91% rename from src/ott/neural/duality/__init__.py rename to src/ott/neural/networks/layers/__init__.py index ef76b42fa..237c5f275 100644 --- a/src/ott/neural/duality/__init__.py +++ b/src/ott/neural/networks/layers/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import conjugate, layers, models, neuraldual +from . import conjugate, posdef, time_encoder diff --git a/src/ott/neural/duality/conjugate.py b/src/ott/neural/networks/layers/conjugate.py similarity index 100% rename from src/ott/neural/duality/conjugate.py rename to src/ott/neural/networks/layers/conjugate.py diff --git a/src/ott/neural/duality/layers.py b/src/ott/neural/networks/layers/posdef.py similarity index 99% rename from src/ott/neural/duality/layers.py rename to src/ott/neural/networks/layers/posdef.py index 6ed857452..41663ffe3 100644 --- a/src/ott/neural/duality/layers.py +++ b/src/ott/neural/networks/layers/posdef.py @@ -16,7 +16,7 @@ import jax import jax.numpy as jnp -import flax.linen as nn +from flax import linen as nn __all__ = ["PositiveDense", "PosDefPotentials"] @@ -25,7 +25,6 @@ Dtype = Any Array = jnp.ndarray -# wrap to silence docs linter DEFAULT_KERNEL_INIT = lambda *a, **k: nn.initializers.lecun_normal()(*a, **k) DEFAULT_BIAS_INIT = nn.initializers.zeros DEFAULT_RECTIFIER = nn.activation.relu diff --git a/src/ott/neural/networks/layers/time_encoder.py b/src/ott/neural/networks/layers/time_encoder.py new file mode 100644 index 000000000..b02bd125c --- /dev/null +++ b/src/ott/neural/networks/layers/time_encoder.py @@ -0,0 +1,34 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import jax.numpy as jnp + +__all__ = ["cyclical_time_encoder"] + + +def cyclical_time_encoder(t: jnp.ndarray, n_freqs: int = 128) -> jnp.ndarray: + r"""Encode time :math:`t` into a cyclical representation. + + Time :math:`t` is encoded as :math:`cos(\hat{t})` and :math:`sin(\hat{t})` + where :math:`\hat{t} = [2\pi t, 2\pi 2 t,\dots, 2\pi n_f t]`. + + Args: + t: Time of shape ``[n, 1]``. + n_freqs: Frequency :math:`n_f` of the cyclical encoding. + + Returns: + Encoded time of shape ``[n, 2 * n_freqs]``. + """ + freq = 2 * jnp.arange(n_freqs) * jnp.pi + t = freq * t + return jnp.concatenate([jnp.cos(t), jnp.sin(t)], axis=-1) diff --git a/src/ott/neural/networks/potentials.py b/src/ott/neural/networks/potentials.py new file mode 100644 index 000000000..6a08e0048 --- /dev/null +++ b/src/ott/neural/networks/potentials.py @@ -0,0 +1,185 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import abc +from typing import Any, Callable, Optional, Sequence, Tuple, Union + +import jax +import jax.numpy as jnp + +import optax +from flax import linen as nn +from flax import struct +from flax.core import frozen_dict +from flax.training import train_state + +__all__ = ["PotentialTrainState", "BasePotential", "PotentialMLP"] + +PotentialValueFn_t = Callable[[jnp.ndarray], jnp.ndarray] +PotentialGradientFn_t = Callable[[jnp.ndarray], jnp.ndarray] + + +class PotentialTrainState(train_state.TrainState): + """Adds information about the model's value and gradient to the state. + + This extends :class:`~flax.training.train_state.TrainState` to include + the potential methods from the + :class:`~ott.neural.duality.neuraldual.BaseW2NeuralDual` used during training. + + Args: + potential_value_fn: the potential's value function + potential_gradient_fn: the potential's gradient function + """ + potential_value_fn: Callable[ + [frozen_dict.FrozenDict[str, jnp.ndarray], Optional[PotentialValueFn_t]], + PotentialValueFn_t] = struct.field(pytree_node=False) + potential_gradient_fn: Callable[[frozen_dict.FrozenDict[str, jnp.ndarray]], + PotentialGradientFn_t] = struct.field( + pytree_node=False + ) + + +class BasePotential(abc.ABC, nn.Module): + """Base class for the neural solver models.""" + + @property + @abc.abstractmethod + def is_potential(self) -> bool: + """Indicates if the module implements a potential value or a vector field. + + Returns: + ``True`` if the module defines a potential, ``False`` if it defines a + vector field. + """ + + def potential_value_fn( + self, + params: frozen_dict.FrozenDict[str, jnp.ndarray], + other_potential_value_fn: Optional[PotentialValueFn_t] = None, + ) -> PotentialValueFn_t: + r"""Return a function giving the value of the potential. + + Applies the module if :attr:`is_potential` is ``True``, otherwise + constructs the value of the potential from the gradient with + + .. math:: + + g(y) = -f(\nabla_y g(y)) + y^T \nabla_y g(y) + + where :math:`\nabla_y g(y)` is detached for the envelope theorem + :cite:`danskin:67,bertsekas:71` + to give the appropriate first derivatives of this construction. + + Args: + params: parameters of the module + other_potential_value_fn: function giving the value of the other + potential. Only needed when :attr:`is_potential` is ``False``. + + Returns: + A function that can be evaluated to obtain a potential value, or a linear + interpolation of a potential. + """ + if self.is_potential: + return lambda x: self.apply({"params": params}, x) + + assert other_potential_value_fn is not None, \ + "The value of the gradient-based potential depends " \ + "on the value of the other potential." + + def value_fn(x: jnp.ndarray) -> jnp.ndarray: + squeeze = x.ndim == 1 + if squeeze: + x = jnp.expand_dims(x, 0) + grad_g_x = jax.lax.stop_gradient(self.apply({"params": params}, x)) + value = -other_potential_value_fn(grad_g_x) + \ + jax.vmap(jnp.dot)(grad_g_x, x) + return value.squeeze(0) if squeeze else value + + return value_fn + + def potential_gradient_fn( + self, + params: frozen_dict.FrozenDict[str, jnp.ndarray], + ) -> PotentialGradientFn_t: + """Return a function returning a vector or the gradient of the potential. + + Args: + params: parameters of the module + + Returns: + A function that can be evaluated to obtain the potential's gradient + """ + if self.is_potential: + return jax.vmap(jax.grad(self.potential_value_fn(params))) + return lambda x: self.apply({"params": params}, x) + + def create_train_state( + self, + rng: jax.Array, + optimizer: optax.OptState, + input: Union[int, Tuple[int, ...]], + **kwargs: Any, + ) -> PotentialTrainState: + """Create initial training state.""" + params = self.init(rng, jnp.ones(input))["params"] + + return PotentialTrainState.create( + apply_fn=self.apply, + params=params, + tx=optimizer, + potential_value_fn=self.potential_value_fn, + potential_gradient_fn=self.potential_gradient_fn, + **kwargs, + ) + + +class PotentialMLP(BasePotential): + """Potential MLP. + + Args: + dim_hidden: sequence specifying size of hidden dimensions. The output + dimension of the last layer is automatically set to 1 if + :attr:`is_potential` is ``True``, or the dimension of the input otherwise. + is_potential: Model the potential if ``True``, otherwise + model the gradient of the potential. + act_fn: Activation function. + """ + + dim_hidden: Sequence[int] + is_potential: bool = True + act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.leaky_relu + + @nn.compact + def __call__(self, x: jnp.ndarray) -> jnp.ndarray: # noqa: D102 + squeeze = x.ndim == 1 + if squeeze: + x = jnp.expand_dims(x, 0) + assert x.ndim == 2, x.ndim + n_input = x.shape[-1] + + z = x + for n_hidden in self.dim_hidden: + Wx = nn.Dense(n_hidden, use_bias=True) + z = self.act_fn(Wx(z)) + + if self.is_potential: + Wx = nn.Dense(1, use_bias=True) + z = Wx(z).squeeze(-1) + + quad_term = 0.5 * jax.vmap(jnp.dot)(x, x) + z += quad_term + else: + Wx = nn.Dense(n_input, use_bias=True) + z = x + Wx(z) + + return z.squeeze(0) if squeeze else z diff --git a/src/ott/neural/flow_models/models.py b/src/ott/neural/networks/velocity_field.py similarity index 96% rename from src/ott/neural/flow_models/models.py rename to src/ott/neural/networks/velocity_field.py index a770b1fdd..55bbfabfc 100644 --- a/src/ott/neural/flow_models/models.py +++ b/src/ott/neural/networks/velocity_field.py @@ -16,11 +16,11 @@ import jax import jax.numpy as jnp -import flax.linen as nn import optax +from flax import linen as nn from flax.training import train_state -from ott.neural.flow_models import utils +from ott.neural.networks.layers import time_encoder __all__ = ["VelocityField"] @@ -50,7 +50,7 @@ class VelocityField(nn.Module): condition_dims: Optional[Sequence[int]] = None time_dims: Optional[Sequence[int]] = None time_encoder: Callable[[jnp.ndarray], - jnp.ndarray] = utils.cyclical_time_encoder + jnp.ndarray] = time_encoder.cyclical_time_encoder act_fn: Callable[[jnp.ndarray], jnp.ndarray] = nn.silu @nn.compact diff --git a/src/ott/solvers/__init__.py b/src/ott/solvers/__init__.py index 1303312f9..283fca465 100644 --- a/src/ott/solvers/__init__.py +++ b/src/ott/solvers/__init__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from . import linear, quadratic, was_solver +from . import linear, quadratic, utils, was_solver diff --git a/src/ott/neural/flow_models/utils.py b/src/ott/solvers/utils.py similarity index 90% rename from src/ott/neural/flow_models/utils.py rename to src/ott/solvers/utils.py index 6c67e45f0..6c48a2577 100644 --- a/src/ott/neural/flow_models/utils.py +++ b/src/ott/solvers/utils.py @@ -24,7 +24,6 @@ "match_quadratic", "sample_joint", "sample_conditional", - "cyclical_time_encoder", "uniform_sampler", ] @@ -150,24 +149,6 @@ def sample_conditional( return src_ixs, tgt_ixs -def cyclical_time_encoder(t: jnp.ndarray, n_freqs: int = 128) -> jnp.ndarray: - r"""Encode time :math:`t` into a cyclical representation. - - Time :math:`t` is encoded as :math:`cos(\hat{t})` and :math:`sin(\hat{t})` - where :math:`\hat{t} = [2\pi t, 2\pi 2 t,\dots, 2\pi n_f t]`. - - Args: - t: Time of shape ``[n, 1]``. - n_freqs: Frequency :math:`n_f` of the cyclical encoding. - - Returns: - Encoded time of shape ``[n, 2 * n_freqs]``. - """ - freq = 2 * jnp.arange(n_freqs) * jnp.pi - t = freq * t - return jnp.concatenate([jnp.cos(t), jnp.sin(t)], axis=-1) - - def uniform_sampler( rng: jax.Array, num_samples: int, diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/geometry/graph_test.py b/tests/geometry/graph_test.py index 46e4f825b..14485c3b6 100644 --- a/tests/geometry/graph_test.py +++ b/tests/geometry/graph_test.py @@ -20,9 +20,9 @@ import pytest import jax +import jax.experimental.sparse as jesp import jax.numpy as jnp import numpy as np -from jax.experimental import sparse from ott.geometry import geometry, graph from ott.problems.linear import linear_problem @@ -259,7 +259,7 @@ def callback( data: jnp.ndarray, rows: jnp.ndarray, cols: jnp.ndarray, shape: Tuple[int, int] ) -> float: - G = sparse.BCOO((data, jnp.c_[rows, cols]), shape=shape).todense() + G = jesp.BCOO((data, jnp.c_[rows, cols]), shape=shape).todense() geom = graph.Graph.from_graph(G, t=1.0) solver = sinkhorn.Sinkhorn(lse_mode=False, **kwargs) @@ -274,7 +274,7 @@ def callback( eps = 1e-3 G = random_graph(20, p=0.5) - G = sparse.BCOO.fromdense(G) + G = jesp.BCOO.fromdense(G) w, rows, cols = G.data, G.indices[:, 0], G.indices[:, 1] v_w = jax.random.normal(rng, shape=w.shape) diff --git a/tests/neural/meta_initializer_test.py b/tests/initializers/neural/meta_initializer_test.py similarity index 95% rename from tests/neural/meta_initializer_test.py rename to tests/initializers/neural/meta_initializer_test.py index 117ca6b22..3e04556f9 100644 --- a/tests/neural/meta_initializer_test.py +++ b/tests/initializers/neural/meta_initializer_test.py @@ -18,11 +18,11 @@ import jax import jax.numpy as jnp -import flax.linen as nn +from flax import linen as nn from ott.geometry import pointcloud from ott.initializers.linear import initializers as linear_init -from ott.neural.duality import models as nn_init +from ott.initializers.neural import meta_initializer as meta_init from ott.problems.linear import linear_problem from ott.solvers.linear import sinkhorn @@ -109,7 +109,7 @@ def test_meta_initializer(self, rng: jax.Array, lse_mode: bool): # overfit the initializer to the problem. meta_model = MetaMLP(n) - meta_initializer = nn_init.MetaInitializer(geom, meta_model) + meta_initializer = meta_init.MetaInitializer(geom, meta_model) for _ in range(50): _, _, meta_initializer.state = meta_initializer.update( meta_initializer.state, a=a, b=b diff --git a/tests/neural/__init__.py b/tests/neural/__init__.py index f642d8b21..278074b14 100644 --- a/tests/neural/__init__.py +++ b/tests/neural/__init__.py @@ -1,3 +1,16 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import pytest -_ = pytest.importorskip("flax") +_ = pytest.importorskip("ott.neural") diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index 92c23f6a6..f4c25c514 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -18,7 +18,7 @@ import numpy as np from torch.utils.data import DataLoader -from ott.neural.data import datasets +from ott.neural import datasets class OTLoader(NamedTuple): diff --git a/tests/neural/map_estimator_test.py b/tests/neural/map_estimator_test.py deleted file mode 100644 index cee66e40e..000000000 --- a/tests/neural/map_estimator_test.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright OTT-JAX -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Optional - -import pytest - -import jax.numpy as jnp - -from ott import datasets -from ott.geometry import pointcloud -from ott.neural.duality import models -from ott.neural.gaps import map_estimator, monge_gap -from ott.tools import sinkhorn_divergence - - -@pytest.mark.fast() -class TestMapEstimator: - - def test_map_estimator_convergence(self): - """Tests convergence of a simple - map estimator with Sinkhorn divergence fitting loss - and Monge (coupling) gap regularizer. - """ - - # define the fitting loss and the regularizer - def fitting_loss( - samples: jnp.ndarray, - mapped_samples: jnp.ndarray, - ) -> Optional[float]: - r"""Sinkhorn divergence fitting loss.""" - div = sinkhorn_divergence.sinkhorn_divergence( - pointcloud.PointCloud, - x=samples, - y=mapped_samples, - ).divergence - return div, None - - def regularizer(x, y): - gap, out = monge_gap.monge_gap_from_samples(x, y, return_output=True) - return gap, out.n_iters - - # define the model - model = models.PotentialMLP(dim_hidden=[16, 8], is_potential=False) - - # generate data - train_dataset, valid_dataset, dim_data = ( - datasets.create_gaussian_mixture_samplers( - name_source="simple", - name_target="circle", - train_batch_size=30, - valid_batch_size=30, - ) - ) - - # fit the map - solver = map_estimator.MapEstimator( - dim_data=dim_data, - fitting_loss=fitting_loss, - regularizer=regularizer, - model=model, - regularizer_strength=1.0, - num_train_iters=15, - logging=True, - valid_freq=5, - ) - neural_state, logs = solver.train_map_estimator( - *train_dataset, *valid_dataset - ) - - # check if the loss has decreased during training - assert logs["train"]["total_loss"][0] > logs["train"]["total_loss"][-1] - - # check dimensionality of the mapped source - source = next(train_dataset.source_iter) - mapped_source = neural_state.apply_fn({"params": neural_state.params}, - source) - assert mapped_source.shape[1] == dim_data diff --git a/tests/neural/genot_test.py b/tests/neural/methods/genot_test.py similarity index 83% rename from tests/neural/genot_test.py rename to tests/neural/methods/genot_test.py index c37d91563..086ea7a80 100644 --- a/tests/neural/genot_test.py +++ b/tests/neural/methods/genot_test.py @@ -22,7 +22,9 @@ import optax -from ott.neural.flow_models import flows, genot, models, utils +from ott.neural.methods.flows import dynamics, genot +from ott.neural.networks import velocity_field +from ott.solvers import utils as solver_utils def data_match_fn( @@ -31,11 +33,13 @@ def data_match_fn( typ: Literal["lin", "quad", "fused"] ) -> jnp.ndarray: if typ == "lin": - return utils.match_linear(x=src_lin, y=tgt_lin) + return solver_utils.match_linear(x=src_lin, y=tgt_lin) if typ == "quad": - return utils.match_quadratic(xx=src_quad, yy=tgt_quad) + return solver_utils.match_quadratic(xx=src_quad, yy=tgt_quad) if typ == "fused": - return utils.match_quadratic(xx=src_quad, yy=tgt_quad, x=src_lin, y=tgt_lin) + return solver_utils.match_quadratic( + xx=src_quad, yy=tgt_quad, x=src_lin, y=tgt_lin + ) raise NotImplementedError(f"Unknown type: {typ}.") @@ -56,14 +60,14 @@ def test_genot(self, rng: jax.Array, dl: str, request): tgt_dim = dl.lin_dim + dl.quad_tgt_dim cond_dim = dl.cond_dim - vf = models.VelocityField( + vf = velocity_field.VelocityField( hidden_dims=[7, 7, 7], output_dims=[15, tgt_dim], condition_dims=None if cond_dim is None else [1, 3, 2], ) model = genot.GENOT( vf, - flow=flows.ConstantNoiseFlow(0.0), + flow=dynamics.ConstantNoiseFlow(0.0), data_match_fn=functools.partial(data_match_fn, typ=problem_type), source_dim=src_dim, target_dim=tgt_dim, diff --git a/tests/neural/losses_test.py b/tests/neural/methods/monge_gap_test.py similarity index 64% rename from tests/neural/losses_test.py rename to tests/neural/methods/monge_gap_test.py index e1e13f193..68d885537 100644 --- a/tests/neural/losses_test.py +++ b/tests/neural/methods/monge_gap_test.py @@ -11,14 +11,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import Optional + import pytest import jax +import jax.numpy as jnp import numpy as np -from ott.geometry import costs -from ott.neural.duality import models -from ott.neural.gaps import monge_gap +from ott import datasets +from ott.geometry import costs, pointcloud +from ott.neural.methods import monge_gap +from ott.neural.networks import potentials +from ott.tools import sinkhorn_divergence @pytest.mark.fast() @@ -34,7 +39,7 @@ def test_monge_gap_non_negativity( rng1, rng2 = jax.random.split(rng, 2) reference_points = jax.random.normal(rng1, (n_samples, n_features)) - model = models.PotentialMLP(dim_hidden=[8, 8], is_potential=False) + model = potentials.PotentialMLP(dim_hidden=[8, 8], is_potential=False) params = model.init(rng2, x=reference_points[0]) target = model.apply(params, reference_points) @@ -122,3 +127,67 @@ def test_monge_gap_from_samples_different_cost( np.testing.assert_array_equal( np.isfinite(monge_gap_from_samples_value_cost_fn), True ) + + +@pytest.mark.fast() +class TestMongeGapEstimator: + + def test_map_estimator_convergence(self): + """Tests convergence of a simple + map estimator with Sinkhorn divergence fitting loss + and Monge (coupling) gap regularizer. + """ + + # define the fitting loss and the regularizer + def fitting_loss( + samples: jnp.ndarray, + mapped_samples: jnp.ndarray, + ) -> Optional[float]: + r"""Sinkhorn divergence fitting loss.""" + div = sinkhorn_divergence.sinkhorn_divergence( + pointcloud.PointCloud, + x=samples, + y=mapped_samples, + ).divergence + return div, None + + def regularizer(x, y): + gap, out = monge_gap.monge_gap_from_samples(x, y, return_output=True) + return gap, out.n_iters + + # define the model + model = potentials.PotentialMLP(dim_hidden=[16, 8], is_potential=False) + + # generate data + train_dataset, valid_dataset, dim_data = ( + datasets.create_gaussian_mixture_samplers( + name_source="simple", + name_target="circle", + train_batch_size=30, + valid_batch_size=30, + ) + ) + + # fit the map + solver = monge_gap.MongeGapEstimator( + dim_data=dim_data, + fitting_loss=fitting_loss, + regularizer=regularizer, + model=model, + regularizer_strength=1.0, + num_train_iters=15, + logging=True, + valid_freq=5, + ) + neural_state, logs = solver.train_map_estimator( + *train_dataset, *valid_dataset + ) + + # check if the loss has decreased during training + assert logs["train"]["total_loss"][0] > logs["train"]["total_loss"][-1] + + # check dimensionality of the mapped source + source = next(train_dataset.source_iter) + mapped_source = neural_state.apply_fn({"params": neural_state.params}, + source) + assert mapped_source.shape[1] == dim_data diff --git a/tests/neural/neuraldual_test.py b/tests/neural/methods/neuraldual_test.py similarity index 86% rename from tests/neural/neuraldual_test.py rename to tests/neural/methods/neuraldual_test.py index 5aef77aba..b0d847abb 100644 --- a/tests/neural/neuraldual_test.py +++ b/tests/neural/methods/neuraldual_test.py @@ -19,9 +19,11 @@ import numpy as np from ott import datasets -from ott.neural.duality import conjugate, models, neuraldual +from ott.neural.methods import neuraldual +from ott.neural.networks import icnn, potentials +from ott.neural.networks.layers import conjugate -ModelPair_t = Tuple[neuraldual.BaseW2NeuralDual, neuraldual.BaseW2NeuralDual] +ModelPair_t = Tuple[potentials.BasePotential, potentials.BasePotential] DatasetPair_t = Tuple[datasets.Dataset, datasets.Dataset] @@ -37,16 +39,16 @@ def ds(request: Tuple[str, str]) -> DatasetPair_t: def neural_models(request: str) -> ModelPair_t: if request.param == "icnns": return ( - models.ICNN(dim_data=2, - dim_hidden=[32]), models.ICNN(dim_data=2, dim_hidden=[32]) + icnn.ICNN(dim_data=2, + dim_hidden=[32]), icnn.ICNN(dim_data=2, dim_hidden=[32]) ) if request.param == "mlps": - return models.PotentialMLP(dim_hidden=[32] - ), models.PotentialMLP(dim_hidden=[32]), + return potentials.PotentialMLP(dim_hidden=[32] + ), potentials.PotentialMLP(dim_hidden=[32]), if request.param == "mlps-grad": return ( - models.PotentialMLP(dim_hidden=[32]), - models.PotentialMLP(is_potential=False, dim_hidden=[128]) + potentials.PotentialMLP(dim_hidden=[32]), + potentials.PotentialMLP(is_potential=False, dim_hidden=[128]) ) raise ValueError(f"Invalid request: {request.param}") @@ -82,7 +84,7 @@ def decreasing(losses: Sequence[float]) -> bool: train_dataset, valid_dataset = ds if test_gaussian_init: - neural_f = models.ICNN( + neural_f = icnn.ICNN( dim_data=2, dim_hidden=[32], gaussian_map_samples=[ @@ -90,7 +92,7 @@ def decreasing(losses: Sequence[float]) -> bool: next(train_dataset.target_iter) ] ) - neural_g = models.ICNN( + neural_g = icnn.ICNN( dim_data=2, dim_hidden=[32], gaussian_map_samples=[ diff --git a/tests/neural/otfm_test.py b/tests/neural/methods/otfm_test.py similarity index 85% rename from tests/neural/otfm_test.py rename to tests/neural/methods/otfm_test.py index 00619dc57..0eb311fa6 100644 --- a/tests/neural/otfm_test.py +++ b/tests/neural/methods/otfm_test.py @@ -19,7 +19,9 @@ import optax -from ott.neural.flow_models import flows, models, otfm, utils +from ott.neural.methods.flows import dynamics, otfm +from ott.neural.networks import velocity_field +from ott.solvers import utils as solver_utils class TestOTFlowMatching: @@ -29,15 +31,15 @@ def test_otfm(self, rng: jax.Array, dl: str, request): dl = request.getfixturevalue(dl) dim, cond_dim = dl.lin_dim, dl.cond_dim - neural_vf = models.VelocityField( + vf = velocity_field.VelocityField( hidden_dims=[5, 5, 5], output_dims=[7, dim], condition_dims=None if cond_dim is None else [4, 3, 2], ) fm = otfm.OTFlowMatching( - neural_vf, - flows.ConstantNoiseFlow(0.0), - match_fn=jax.jit(utils.match_linear), + vf, + dynamics.ConstantNoiseFlow(0.0), + match_fn=jax.jit(solver_utils.match_linear), rng=rng, optimizer=optax.adam(learning_rate=1e-3), condition_dim=cond_dim, diff --git a/tests/neural/icnn_test.py b/tests/neural/networks/icnn_test.py similarity index 93% rename from tests/neural/icnn_test.py rename to tests/neural/networks/icnn_test.py index a60682a06..b07e4994f 100644 --- a/tests/neural/icnn_test.py +++ b/tests/neural/networks/icnn_test.py @@ -17,7 +17,7 @@ import jax.numpy as jnp import numpy as np -from ott.neural.duality import models +from ott.neural.networks import icnn @pytest.mark.fast() @@ -29,7 +29,7 @@ def test_icnn_convexity(self, rng: jax.Array): dim_hidden = (64, 64) # define icnn model - model = models.ICNN(n_features, dim_hidden=dim_hidden) + model = icnn.ICNN(n_features, dim_hidden=dim_hidden) # initialize model rng1, rng2 = jax.random.split(rng, 2) @@ -55,7 +55,7 @@ def test_icnn_hessian(self, rng: jax.Array): # define icnn model n_features = 2 dim_hidden = (64, 64) - model = models.ICNN(n_features, dim_hidden=dim_hidden) + model = icnn.ICNN(n_features, dim_hidden=dim_hidden) # initialize model rng1, rng2 = jax.random.split(rng) diff --git a/tests/tools/plot_test.py b/tests/tools/plot_test.py index 1f9f9ba01..2d8ba55ac 100644 --- a/tests/tools/plot_test.py +++ b/tests/tools/plot_test.py @@ -16,14 +16,13 @@ import matplotlib.pyplot as plt -import ott from ott.geometry import pointcloud from ott.problems.linear import linear_problem from ott.solvers.linear import sinkhorn from ott.tools import plot -class TestSoftSort: +class TestPlotting: def test_plot(self, monkeypatch): monkeypatch.setattr(plt, "show", lambda: None) @@ -44,5 +43,5 @@ def test_plot(self, monkeypatch): plott = plot.Plot() _ = plott(ots[0]) fig = plt.figure(figsize=(8, 5)) - plott = ott.tools.plot.Plot(fig=fig, title="test") + plott = plot.Plot(fig=fig, title="test") plott.animate(ots, frame_rate=2, titles=["test1", "test2"]) From ef6afd1f779ca03e6c6c70f49e33de6cf1cc21e5 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 29 Mar 2024 19:15:41 +0100 Subject: [PATCH 173/186] Fix neural init tests import --- tests/initializers/neural/__init__.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 tests/initializers/neural/__init__.py diff --git a/tests/initializers/neural/__init__.py b/tests/initializers/neural/__init__.py new file mode 100644 index 000000000..8c23e4ba8 --- /dev/null +++ b/tests/initializers/neural/__init__.py @@ -0,0 +1,16 @@ +# Copyright OTT-JAX +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pytest + +_ = pytest.importorskip("ott.initializers.neural") From 73c2527dfd2288164eea266002a90bfe62eff931 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Fri, 29 Mar 2024 20:12:20 +0100 Subject: [PATCH 174/186] Update `docs/` --- docs/neural/data.rst | 15 ----------- docs/neural/datasets.rst | 15 +++++++++++ docs/neural/duality.rst | 37 --------------------------- docs/neural/flow_models.rst | 50 ------------------------------------- docs/neural/gaps.rst | 15 ----------- docs/neural/index.rst | 8 +++--- docs/neural/methods.rst | 37 +++++++++++++++++++++++++++ docs/neural/networks.rst | 33 ++++++++++++++++++++++++ docs/solvers/index.rst | 11 ++++++++ 9 files changed, 99 insertions(+), 122 deletions(-) delete mode 100644 docs/neural/data.rst create mode 100644 docs/neural/datasets.rst delete mode 100644 docs/neural/duality.rst delete mode 100644 docs/neural/flow_models.rst delete mode 100644 docs/neural/gaps.rst create mode 100644 docs/neural/methods.rst create mode 100644 docs/neural/networks.rst diff --git a/docs/neural/data.rst b/docs/neural/data.rst deleted file mode 100644 index 3ae0e4c53..000000000 --- a/docs/neural/data.rst +++ /dev/null @@ -1,15 +0,0 @@ -ott.neural.data -=============== -.. module:: ott.neural.data -.. currentmodule:: ott.neural.data - -The :mod:`ott.neural.data` contains data sets and data loaders needed -for solving (conditional) neural optimal transport problems. - -Datasets --------- -.. autosummary:: - :toctree: _autosummary - - datasets.OTData - datasets.OTDataset diff --git a/docs/neural/datasets.rst b/docs/neural/datasets.rst new file mode 100644 index 000000000..67d5e3b6b --- /dev/null +++ b/docs/neural/datasets.rst @@ -0,0 +1,15 @@ +ott.neural.datasets +=================== +.. module:: ott.neural.datasets +.. currentmodule:: ott.neural + +The :mod:`ott.neural.datasets` contains datasets and needed for solving +(conditional) neural optimal transport problems. + +Datasets +-------- +.. autosummary:: + :toctree: _autosummary + + datasets.OTData + datasets.OTDataset diff --git a/docs/neural/duality.rst b/docs/neural/duality.rst deleted file mode 100644 index 25dc89daa..000000000 --- a/docs/neural/duality.rst +++ /dev/null @@ -1,37 +0,0 @@ -ott.neural.duality -================== -.. module:: ott.neural.duality -.. currentmodule:: ott.neural.duality - -This module implements various solvers to estimate optimal transport between -two probability measures, through samples, parameterized as neural networks. -These solvers build upon dual formulation of the optimal transport problem. - -Solvers -------- -.. autosummary:: - :toctree: _autosummary - - neuraldual.W2NeuralDual - neuraldual.BaseW2NeuralDual - -Conjugate Solvers ------------------ -.. autosummary:: - :toctree: _autosummary - - conjugate.FenchelConjugateLBFGS - conjugate.FenchelConjugateSolver - conjugate.ConjugateResults - -Models ------- -.. autosummary:: - :toctree: _autosummary - - neuraldual.W2NeuralTrainState - neuraldual.BaseW2NeuralDual - neuraldual.W2NeuralDual - models.ICNN - models.PotentialMLP - models.MetaInitializer diff --git a/docs/neural/flow_models.rst b/docs/neural/flow_models.rst deleted file mode 100644 index 273f145f3..000000000 --- a/docs/neural/flow_models.rst +++ /dev/null @@ -1,50 +0,0 @@ -ott.neural.flow_models -====================== -.. module:: ott.neural.flow_models -.. currentmodule:: ott.neural.flow_models - -This module implements various solvers building upon flow matching -:cite:`lipman:22` to match distributions. - -Flows ------ -.. autosummary:: - :toctree: _autosummary - - flows.BaseFlow - flows.StraightFlow - flows.ConstantNoiseFlow - flows.BrownianNoiseFlow - -OT Flow Matching ----------------- -.. autosummary:: - :toctree: _autosummary - - otfm.OTFlowMatching - -GENOT ------ -.. autosummary:: - :toctree: _autosummary - - genot.GENOT - -Models ------- -.. autosummary:: - :toctree: _autosummary - - models.VelocityField - -Utils ------ -.. autosummary:: - :toctree: _autosummary - - utils.match_linear - utils.match_quadratic - utils.sample_joint - utils.sample_conditional - utils.cyclical_time_encoder - utils.uniform_sampler diff --git a/docs/neural/gaps.rst b/docs/neural/gaps.rst deleted file mode 100644 index abf621e24..000000000 --- a/docs/neural/gaps.rst +++ /dev/null @@ -1,15 +0,0 @@ -ott.neural.gaps -=============== -.. module:: ott.neural.gaps -.. currentmodule:: ott.neural.gaps - -This module implements gap models. - -Monge gap ---------- -.. autosummary:: - :toctree: _autosummary - - map_estimator.MapEstimator - monge_gap.monge_gap - monge_gap.monge_gap_from_samples diff --git a/docs/neural/index.rst b/docs/neural/index.rst index 9de1781f6..5cf025cdc 100644 --- a/docs/neural/index.rst +++ b/docs/neural/index.rst @@ -1,7 +1,6 @@ ott.neural ========== .. module:: ott.neural -.. currentmodule:: ott.neural In contrast to most methods presented in :mod:`ott.solvers`, which output vectors or matrices, the goal of the :mod:`ott.neural` module is to parameterize @@ -13,7 +12,6 @@ and solvers to estimate such neural networks. .. toctree:: :maxdepth: 2 - data - duality - flow_models - gaps + datasets + methods + networks diff --git a/docs/neural/methods.rst b/docs/neural/methods.rst new file mode 100644 index 000000000..028651a34 --- /dev/null +++ b/docs/neural/methods.rst @@ -0,0 +1,37 @@ +ott.neural.methods +================== +.. module:: ott.neural.methods +.. currentmodule:: ott.neural.methods + +Monge Gap +--------- +.. autosummary:: + :toctree: _autosummary + + monge_gap.monge_gap + monge_gap.monge_gap_from_samples + monge_gap.MongeGapEstimator + +Neural Dual +----------- +.. autosummary:: + :toctree: _autosummary + + neuraldual.W2NeuralDual + +ott.neural.methods.flows +======================== +.. module:: ott.neural.methods.flows +.. currentmodule:: ott.neural.methods.flows + +Flows +----- +.. autosummary:: + :toctree: _autosummary + + otfm.OTFlowMatching + genot.GENOT + dynamics.BaseFlow + dynamics.StraightFlow + dynamics.ConstantNoiseFlow + dynamics.BrownianBridge diff --git a/docs/neural/networks.rst b/docs/neural/networks.rst new file mode 100644 index 000000000..647243192 --- /dev/null +++ b/docs/neural/networks.rst @@ -0,0 +1,33 @@ +ott.neural.networks +=================== +.. module:: ott.neural.networks +.. currentmodule:: ott.neural.networks + +Networks +-------- +.. autosummary:: + :toctree: _autosummary + + icnn.ICNN + velocity_field.VelocityField + potentials.BasePotential + potentials.PotentialMLP + potentials.PotentialTrainState + + +ott.neural.networks.layers +========================== +.. module:: ott.neural.networks.layers +.. currentmodule:: ott.neural.networks.layers + +Layers +------ +.. autosummary:: + :toctree: _autosummary + + conjugate.FenchelConjugateSolver + conjugate.FenchelConjugateLBFGS + conjugate.ConjugateResults + posdef.PositiveDense + posdef.PosDefPotentials + time_encoder.cyclical_time_encoder diff --git a/docs/solvers/index.rst b/docs/solvers/index.rst index ddfbc9230..d23b4cdac 100644 --- a/docs/solvers/index.rst +++ b/docs/solvers/index.rst @@ -23,3 +23,14 @@ Wasserstein Solver :toctree: _autosummary was_solver.WassersteinSolver + +Utilities +--------- +.. autosummary:: + :toctree: _autosummary + + utils.match_linear + utils.match_quadratic + utils.sample_joint + utils.sample_conditional + utils.uniform_sampler From 0418e788bfa1bb7b8e2cb4db8f9b08b55dde425f Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 09:43:19 +0200 Subject: [PATCH 175/186] Update Monge Gap --- docs/tutorials/Monge_Gap.ipynb | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/tutorials/Monge_Gap.ipynb b/docs/tutorials/Monge_Gap.ipynb index 99fbc17ca..be9098a09 100644 --- a/docs/tutorials/Monge_Gap.ipynb +++ b/docs/tutorials/Monge_Gap.ipynb @@ -40,8 +40,8 @@ "\n", "from ott import datasets\n", "from ott.geometry import costs, pointcloud\n", - "from ott.neural import losses, models\n", - "from ott.neural.solvers import map_estimator\n", + "from ott.neural.methods import monge_gap\n", + "from ott.neural.networks import potentials\n", "from ott.solvers.linear import acceleration\n", "from ott.tools import sinkhorn_divergence" ] @@ -58,7 +58,7 @@ "T^\\star \\in \\arg\\min_{\\substack{T:\\mathbb{R}^d \\rightarrow \\mathbb{R}^d \\\\ T \\sharp \\mu = \\nu}} \\int c(x,T(x)) \\mathrm{d}\\mu(x)\n", "$$\n", "\n", - "We show how to use the {func}`~ott.neural.losses.monge_gap`, a regularizer proposed by {cite}`uscidda:23` to do so. Computing an OT map can be split into two goals: move mass efficiently from $\\mu$ to $T\\sharp\\mu$ (this is the objective), while, at the same time, making sure $T\\sharp\\mu$ \"lands\" on $\\nu$ (the constraint).\n", + "We show how to use the {func}`~ott.neural.methods.monge_gap.monge_gap`, a regularizer proposed by {cite}`uscidda:23` to do so. Computing an OT map can be split into two goals: move mass efficiently from $\\mu$ to $T\\sharp\\mu$ (this is the objective), while, at the same time, making sure $T\\sharp\\mu$ \"lands\" on $\\nu$ (the constraint).\n", "\n", "The first requirement (efficiency) can be quantified with the **Monge gap** $\\mathcal{M}_\\mu^c$, a non-negative regularizer defined through $\\mu$ and $c$, and which takes as an argument any map $T : \\mathbb{R}^d \\rightarrow \\mathbb{R}^d$. The value $\\mathcal{M}_\\mu^c(T)$ quantifies how $T$ moves mass efficiently between $\\mu$ and $T \\sharp \\mu$, and only cancels $\\mathcal{M}_\\mu^c(T) = 0$ i.f.f. $T$ is optimal between $\\mu$ and $T \\sharp \\mu$ for the cost $c$.\n", "\n", @@ -68,7 +68,7 @@ "\\min_{T:\\mathbb{R}^d \\rightarrow \\mathbb{R}^d} \\Delta(T\\sharp \\mu, \\nu) + \\lambda_\\mathrm{MG} \\mathcal{M}_\\mu^c(T)\n", "$$\n", "\n", - "We parameterize maps $T$ as neural networks $\\{T_\\theta\\}_{\\theta \\in \\mathbb{R}^d}$, using the {class}`~ott.neural.solvers.map_estimator.MapEstimator` solver. For the squared-Euclidean cost, this method provides a simple alternative to the {class}`~ott.neural.solvers.neuraldual.W2NeuralDual` solver, but one that does not require parameterizing networks as gradients of convex functions." + "We parameterize maps $T$ as neural networks $\\{T_\\theta\\}_{\\theta \\in \\mathbb{R}^d}$, using the {class}`~ott.neural.methods.monge_gap.MongeGapEstimator` solver. For the squared Euclidean cost, this method provides a simple alternative to the {class}`~ott.neural.methods.neuraldual.W2NeuralDual` solver, but one that does not require parameterizing networks as gradients of convex functions." ] }, { @@ -293,7 +293,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAxYAAAJOCAYAAAAqFJGJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOydd5gURf7GP7OZXXbJOUeVYATMCfFERcB8Zgx3J4oBwx2emM74M2AC8ymenqcYQDFgQlFAFASVoJIl57CwLBv798dbNd0zOzO7C0uu93nmmd3p6urqmp6q7/uNIc/zPBwcHBwcHBwcHBwcHLYDSbt6AA4ODg4ODg4ODg4Oez4csXBwcHBwcHBwcHBw2G44YuHg4ODg4ODg4ODgsN1wxMLBwcHBwcHBwcHBYbvhiIWDg4ODg4ODg4ODw3bDEQsHBwcHBwcHBwcHh+2GIxYODg4ODg4ODg4ODtsNRywcHBwcHBwcHBwcHLYbjlg4ODg4ODg4ODg4OGw3HLFwcHBw2MfQr18/QqEQLVu23NVDcXBwcHDYi+CIhYPDHoK8vDyee+45TjvtNJo0aUJGRgbp6enUq1ePrl27csUVV/Diiy+yePHiXT1Uh70ULVu2JBQKbdfr66+/3tW3sUdj+PDhEfPZpk2bCp23ePFikpOTI85duHDhjh2sg4PDPoeUXT0ABweH8vHdd9/x5z//mUWLFpU5tmbNGtasWcOUKVN45ZVXaNCgAStWrNgFo3RwcIjG119/zYknngjAV199xQknnFCl/c+fP5+JEydy1FFHJWz33//+l9LS0iq9toODg0M0HLFwcNjNMXv2bE455RQ2bdoEQO/evTnnnHNo3749aWlprFmzhp9//pnPP/+cr776aheP1mFvxmeffUZhYWHMY5dffjlTpkwBYPr06XH7aNWq1Q4Z276IjIwMtm7dymuvvVYusXjttdciznFwcHDYEXDEwsFhN8ftt98eJhWvvPIK/fr1K9Pm5JNP5pZbbmH16tWMGDFiJ4/QYV9B+/bt4x7LysoK/92pU6edMZx9Hr1792bEiBGMGDGCJ598krS0tJjtpk6dyqxZswDo06cPb7311s4cpoODwz4EF2Ph4LAbo6SkhI8++giALl26xCQVQdSrV49rr712J4zMwcFhV+P8888nLS2NdevWhdeJWLDWiq5du7L//vvvrOE5ODjsg3DEwsFhN8bq1avJz88HoG3btlXS58KFCxk4cCAdO3YkOzubzMxM2rVrx9/+9reELiwLFy4MB30OHz484TVskG8sIhQMPl24cCEFBQU88cQTHHHEEdStW5dQKMTdd99d5rwJEyZw1VVXsd9++5GTk0NaWhpNmzalV69eDBs2jA0bNsQdz9y5cxk4cCCdO3emRo0aVKtWjdatW9OvX7+w+872YP78+Tz22GOcccYZtGzZkmrVqlGtWjVatGjB+eefz5gxYxKeHz0npaWlvPDCCxx11FHUqlWLrKwsDjzwQO6//362bNlS7nh+/fVX+vXrR7NmzcjIyKBZs2ZceOGFTJ48ebvvdVuRl5fHW2+9xVVXXcXBBx9MjRo1SE1NpV69ehx//PE8+uijbN68OWEfdo7s8zF27FjOPfdcmjVrRmpqaswsV6NHj6Znz57Uq1ePzMxM2rdvz6233hqOQ0r0rAYxdepUrr76avbbbz+qV69OVlYW++23H/3792f27Nll2tvfi42vADjxxBPLBLOX91tKhNq1a3P66acDPnmIRnFxMf/73/8AuOSSSyrV/6hRozj33HNp3rw5GRkZ1KxZky5dunDPPfewfv36hOdOmjSJwYMHc8IJJ9CwYUPS0tLIycmhQ4cO9O/fP2xBSYTZs2dz3XXX0alTJ7Kzs0lLS6Nx48YcfPDBXHHFFbz11lsUFBREnPP1119XOFFA9PMUxN133x0+DrBx40buvfdeDjnkEGrWrBn3u9ueOduW+3Vw2O3gOTg47LZYu3atB3iAd9BBB213f6+++qqXnp4e7jP6lZyc7D3wwAMxz12wYEG43SuvvJLwOi1atPAA77LLLitz7JVXXgn3M3nyZO/ggw8uM4677ror3H7Lli3eBRdcEHfMsc4J4pFHHvFSU1PjnhcKhbw77rijgjNYFvPnzy93bIB38cUXe0VFRTH7CM7JzJkzvZNOOiluP926dfM2b94cdzxvvfVW3O84JSXFe+mll7zLLrvMA7wWLVps831H4/jjjw9fp7zj8V6tWrXyfv3117jXCH7X//znP8ucH30/11xzTdxrNWzY0Js6dWrCZ9XzPK+kpMQbOHCgFwqF4vaVkpLiPf/88xHnBX8viV7l/ZaiEXxWvvrqK++9997zAC8tLc1bu3ZtmfYfffRReIyrVq3y7rrrrvD5CxYsiHmNdevWed27d0847vr163vfffdduWNMtNYMGzYs7n2OGDHCS0tLK7ef6dOnR5z31VdfRcxPIiRaO4LzNHv2bK9ly5YJv7vtnbNtvV8Hh90NLsbCwWE3Ru3atWnRogV//PEHP//8M//3f//HrbfeSlJS5Y2NH330Ef369cPzPKpXr87NN99Mjx49SElJYeLEiTz44IOsWbOGf/7zn9SsWZP+/fvvgDuKxJVXXsn06dO59NJLOf/882nYsCGLFi0iPT0dgNLSUvr06cPnn38OQLt27bjmmmvo0qULmZmZLF++nIkTJ8aNK3nkkUf4+9//DsCBBx5I//79adeuHTVr1uT3339n6NChfPfdd9x7773UrVuX66+/vtL3UFJSQlpaGqeccgonn3wyHTp0oHbt2qxbt47Zs2czbNgwZs6cyeuvv07r1q255557Evb3l7/8hUmTJnHZZZdx3nnnhefk4Ycf5rvvvuOHH37gvvvu48EHHyxz7uTJk7nooosoLi4mPT2dgQMHctppp5Gens7333/PAw88QP/+/enQoUOl73N7UVxcTOfOnenduzddunShcePGeJ7HH3/8wciRIxkxYgQLFiygb9++/PTTT2RkZMTt67333mP69Ol07tyZgQMH0qlTJ/Lz8/npp5/CbR5++GGeeeYZAJo1a8agQYPo0qULBQUFfPrppwwZMoRzzjmnXAvQddddF+7nuOOOo1+/frRu3ZrMzEx+/vlnnnjiCWbOnMnf/vY3GjZsSO/evQFo0qQJ06dPZ/LkyVxxxRUAvPzyy3Tt2jWi/6ZNm1Z6LoM4/fTTw8/biBEjuPrqqyOOW0uGtdqUh4KCAnr06MHUqVNJTk7mwgsv5LTTTqNVq1YUFRXxzTffMGTIEFatWsVpp53GtGnTaNGiRUQfxcXF1KpViz59+nDcccfRrl07srKyWLZsGVOnTuWpp55izZo1DBgwgP3335/u3btHnL9y5Uouv/xyCgsLqV+/PgMGDAhbNPPz85k7dy7jxo1j1KhR2zV3FcU555zD0qVLue666+jduze1atVizpw54fve3jnb3e7XwWG7sKuZjYODQ2I8+uijERqrli1betdff7335ptvevPnz69QH4WFhV7jxo09wKtevbo3bdq0Mm0WLlzoNWrUyAO8zMxMb/Xq1RHHd4TFAvBeeumluP08+eST4XZnnnmmt3Xr1pjtSkpKvCVLlkR8NnPmzLCl4q677vJKS0tjnnfxxReH52XdunUJ7ysWNm/e7C1btizu8dLSUq9fv34e4GVlZXkbNmwo0yZ6Tl577bUybbZu3ep16tTJA7w6derEtH506dLFA7zU1FRv3LhxZY4vWbLEa9q0aVwN//agPIvF7NmzE57/+eefe0lJSQmfieAcnXTSSXGfh+XLl3sZGRke4LVt27bMs+x5njdhwoQIDXGsZ/Wzzz4r9znNz88Pa6pbtGhR5nupjAa9Ioi2WHie5/Xv398DvKOOOiqibW5urletWjUP8EaMGOF5nleuxcJagmrWrOlNmTIl5hiCa8WFF15Y5viSJUu8vLy8uPewYcMG78ADD/QA75hjjilz/N///neFNPRbtmzxtmzZEvHZjrBYJCUleZ9++mncfrZ3zrbnfh0cdjc4YuHgsJujpKTEu+KKK+Kaxhs0aOCdf/753gcffBBTePY8ucfY9g899FDca73++uvhdg8//HDEsR1BLLp3757wvq0Q3LRpU2/Tpk0JrxkNO2ddunSJOy+e53nr168Puw698MILlbpGRbF27VovOTnZA7x33nmnzPHgnJx11llx+3nuuefC7X7++eeIYz/88EP42IABA+L2EXwWdiaxqAj69u3rAV6vXr1iHg8KevHceDzP8x566KFw2w8//DBuu4EDByYkFpYwnH322QnHPWvWrHA/n332WcSxnUEsJk6cGP5s3rx54bYvv/yyB3g1atTw8vPzPc9LTCw2bdrk1ahRwwO8p59+OuE4nnnmmTCJTeSaFw+jRo0Kj2PNmjURx+6//34P8GrVqlXpfncEsbjiiivi9lEVc7Y99+vgsLvBBW87OOzmSEpK4t///jefffYZPXv2JCUl0oNx5cqVvPXWW/Tu3Ztu3boxb968Mn188cUXgIIVrVtGLJx77rnUqFEj4pwdiYsuuijusZ9++oklS5YAcg+qXr16pfoePXo0AGeffXY4ADMWatasSefOnQEVItxeFBUVsWTJEn799VdmzJjBjBkzWLZsGXXq1AHg559/Tnh+ojk57LDDwn/Pnz8/4ljw+7r88svj9nHmmWdSs2bNhGPYGVi9ejVz5swJz9GMGTPCrjrlzdHRRx8dM1Dbws5F3bp1OfXUU+O2u/TSS+Mey83NDQf/nnPOOQnHc8ABB1C3bl2gap6hyuLII48MJ3d4/fXXw59bN6hzzz03oWuZxbhx49i4cSNQ/j0fd9xxgJ73H3/8MWHbvLw8Fi5cyMyZM8PfdWpqavh49PfdqFEjANavX8/7779f7rh3NBL9Jqtizna3+3Vw2B64GAsHhz0EJ598MieffDK5ublMmDCByZMnM2XKFL755pvwxjZlyhSOPfZYfvzxx/BmBTBjxgxAxckS+VmnpaVxyCGH8PXXX4fP2ZE48MAD4x6bNm1a+O9jjz22Uv3+8ccfrF69GoDbbruN2267rULnbWvF8qKiIl544QVee+01pk2bFreIHKhSeiIkSgdau3bt8N+2tomFzeiVlpbGQQcdFLeP1NRUDjnkkF1STHHChAk89dRTfPHFF6xbty5uu/LmKNFzA/7zfvDBByeMR+rcuTNpaWkxv69p06aFK1VfcMEFXHDBBQmvabGrqt5fcskl3HXXXbz++uvceeedLF68OEyMEhGoIIIZ0oLrR3mIdc9r1qxhyJAhvPvuu8yZMwfP8+KeH/199+7dm5o1a7JhwwbOPPNMTjjhBM444wyOO+44Dj74YJKTkys8tqpAouetKuZsd7tfB4ftgbNYODjsYcjJyeHUU0/lzjvv5IMPPmDlypW8/PLL1KpVC4Dly5dzxx13RJxjhbj69euX23/Dhg0jztmRsGOOhaCwUZkNG2DVqlXbNJ6KpHKNxrp16zjyyCMZMGAA33//fUJSAYTTB8dDZmZm3GNBIbmkpKTMOEDkozxBpEGDBgmP7wjcfffdHHPMMYwYMaLcZ6u8OUr03ADhtJ7lBSsnJydHkLUgduYzVBW4+OKLAZgzZw6TJk3i9ddfx/M8WrZsyTHHHFOhPqrqnn/88Uf2339/HnzwQWbPnp2QVEDZ77tOnTp88MEHNGnSBM/z+Oqrr7jpppvo0qULtWvX5qyzzuLDDz/cprFuCxI9b1UxZ7vb/To4bA+cxcLBYQ9Heno6l19+OY0bN6Znz56Asua88MILZbS1iVyCdgV2lCYuKHTfeeednHvuuRU6L1g9uqK44YYbwm4Nffv25YorruDAAw+kfv36ZGRkhOe8efPmLF68uFwha3uxu33HAF9++WU4G1br1q255ZZbOOaYY2jevDlZWVlh974777yTe++9t9z+doYGN/gMPf/88xx11FEVOq880rOj0Lp1a44++mgmTJjAa6+9FrZIXXzxxRV+JoL3PHXq1Ah3pUQIZrYqLCzkvPPOY+3ataSmpnLdddfRp08f2rdvT61atcIZ3+bPn0+bNm0AYv4mjj32WObOncu7777Lxx9/zDfffMOSJUvIzc1l5MiRjBw5klNOOYX33nsvIRmvCiR63qpizmD3ul8Hh+2BIxYODnsJTjnlFJo1a8bixYtZv349a9euDWtsrVZ25cqV5fZjTfTRmtwgSbEuIvGQl5dXqbHHgvVZB1lhKlMx2MYzgFx/OnXqtN3jiYXc3FzeeustQH7YQf/2aJRXHGt7YQXatWvXUlJSklAYqshzUJV48cUXAY1x0qRJcS0JVWUlq1WrFitWrAi7w8VDSUlJ3O8l+AxlZmbusGeoKnHppZcyYcIEXn75ZbZu3QpUrihe8J7r1au3Talwx44dG47/eeaZZ7jqqqtitqvId52RkcFFF10UjnFYsGABH330EU8//TSzZ8/m008/5fbbb+fxxx8Pn1PRdaoq1iiomjmz2Jb7dXDY3eBcoRwc9iI0btw4/HdQS2mFogULFiQUtoqKisKxDdGCVHZ2dvjvRELyunXrWLt2beUGHgOHHnpo+O9vvvmmUue2bt06HIQ+YcKE7R5LPMyZM4eioiIAzj///Ljtfvvtt3KrSm8vbAB6YWFhwuDn4uLiiHoPOwMzZ84EVHk6kXtSVVRBB+jYsSOgBACJhMvp06fHrWR88MEHh39D2/MM7UwL0nnnnUd6enqYVBx++OG0b9++wucfcsgh4b+39Z7tdw2JfxPb8l23atWKAQMGMHny5LAAH13DpqLrVKxq6duCqpizeKjI/To47G5wxMLBYS/Bli1bmDVrFqA4jKAmrUePHoBcDl555ZW4fbzzzjvhQHB7jkWtWrXC2YQSCQVvvvlmlbj7HHTQQTRr1gyAl156qVKCeXJyMqeddhoAn332Gb/++ut2jycWiouLw38n0oA+99xzO+T6QQS/r1dffTVuu5EjR+5w60k07DwlmqNp06bx/fffV8n1TjrpJEBxOp988kncdv/5z3/iHqtXrx5HHHEEAG+88Ua51o94CGZjikdiqgo1a9akb9++pKenk56ezmWXXVap83v06BF2s3nqqae26Xdckd9EaWlp2Iq1LcjJyQkXGowO/A5mC0u0Tv3vf//b5usHURVzVh4S3a+Dw+4GRywcHHZjbN68mcMPP5wPP/wwoea1tLSU6667LpwpqHfv3hGa0r59+4atGffff384g1AQixcv5pZbbgHk+hErZalNl/j+++/HTGv7+++/lwkc31YkJSVx6623ArBkyRIuvfTSuIHRpaWlLFu2LOKz2267jeTkZEpLSznnnHPCqWtjoaSkhP/+978J28RC27Ztw/P86quvxhQqRo8ezdChQyvV77agW7duYSvPs88+y/jx48u0Wb58efg73plo164dAOPHj2fu3Llljq9evbpSLjvl4bLLLgv78t94440xhbHvvvuOYcOGJexn8ODBgFzezjnnHDZs2BC3bUFBAcOGDQtbCyyCiQdi/WaqGm+++SZbt25l69at9O/fv1Ln1qxZkwEDBgAwceJEBg4cmHDdWblyJS+99FLEZ/a7Bhg+fHjM82677TamTp0at99PP/2U5cuXxz2+ceNGfvjhB0Ba/SBq1aoVzuL0yiuvxHS5Gj9+PE8++WTc/iuDqpiz7blfB4fdDrumfIaDg0NFsGnTpnCRpiZNmnjXXnut9/rrr3vffvut99NPP3lff/219/jjj3udO3cOt6tRo0bM4mEffvihFwqFPMDLzs72/vWvf3kTJkzwJk2a5A0ZMsSrX79+uI9nnnkm5njGjBkTbtO0aVPvpZde8n788Udv3Lhx3p133unVqFHDa9u2rVevXr0KFchLVOTM81Qk7+STTw63b9++vffEE09448eP96ZOnep9/PHH3p133um1a9cuZpGrxx9/PGJebr31Vu+TTz7xpk6d6k2cONF74403vOuuuy5cETdR1dt4OP3008PX6NGjh/fuu+96U6ZM8T7++GPvyiuv9JKTk7127dpVyZyUV6Rw0qRJXkpKigd4GRkZ3m233eZ9++233g8//OA9/fTTXqNGjbzU1FTvoIMO2qkF8t5+++3wscaNG3tPPfWUN2HCBG/ChAneI4884jVq1MgLhULekUcembDInj0W67uOxgMPPBBu37x5c++ZZ57xfvjhB+/bb7/1Bg8e7FWrVs1r2bJl+Hvp169fzH5uuOGGcD8NGzb07r77bu+LL77wpk2b5o0fP94bPny4d+WVV3q1atXygJiFHG2hx1atWnnvv/++99tvv3lz5szx5syZ4+Xm5pZ7L0HEKpBXGZRXeXvr1q3e4YcfHm5z0EEHeUOHDvXGjx/vTZs2zRs7dqz39NNPe3369PHS0tK8ww47LOL8zZs3h9eS5ORk729/+5s3ZswYb8qUKd6bb77pnXTSSR7gHX300XGf5csuu8xLTU31TjvtNO+JJ57wvvjiC2/q1KneuHHjvGHDhnkHHHBA+NzHH3+8zD08//zz4eMdOnTw/ve//3lTp071vvjiC2/gwIFeenq6d9RRR1W4QF552N452977dXDYneCIhYPDboz8/HyvYcOG4U2lvFe7du28KVOmxO1v+PDh4SrTsV7JycneAw88kHBM119/fdzzmzdv7s2aNavClbfLIxae53l5eXneOeecU+69xxM2X3jhBS8zM7Pc89PS0rw5c+aUO55oLFq0yGvevHnCOZk5c2aVzElFqp+/8cYbXlpaWsyxpKSkeC+88IJ32WWX7VRi4Xmed/nllyd87p544olyhbnKEIvS0lLvb3/7W9xr1q1b15s8ebLXrFkzD/CuvvrquP3cc889YcKW6JWVleVt2bKlTB+24nKsV3lV7KOxo4mF53lebm6ud9ZZZ1VozTnxxBPLnD9mzBgvIyMj7jknnHCCN2PGjITEoiLXvvrqq72SkpIy1y8pKQlXcY/16ty5s7d8+fKEz1NliMX2ztn23q+Dw+4ERywcHHZzlJSUeBMmTPDuuece79RTT/Vat27tZWVlecnJyV5OTo63//77e+eff773xhtveAUFBeX2t2DBAu+GG27wDjjgAC8rK8urVq2a16ZNG+8vf/mL98svv1RoTG+88YZ33HHHeTk5OV61atW8/fbbzxs0aJC3Zs0az/O8KiUWFmPHjvUuueQSr1WrVl61atW8tLQ0r1mzZt4ZZ5zhPf/88wk1vytWrPDuuece7+ijj/bq1q3rpaSkeFlZWV779u29s88+23vuuee81atXV3gs0VizZo136623eu3bt/fS09O9GjVqeAcddJB31113eevWrfM8r2rmpCLEwvM8b+bMmd4ll1ziNW7c2EtLS/OaNGninXfeed6kSZM8z/N2CbHwPM977bXXvGOPPdbLzs720tPTvRYtWniXXHKJ9/3333ueV74wVxliYfH+++97f/rTn7zatWt7GRkZXtu2bb3rr7/eW7x4sed5nlejRg0P8P7xj38k7Gf+/Pne3//+d69Lly5e7dq1veTkZC87O9vr0KGDd9FFF3mvvvpqwmfw3Xff9f70pz959evXjyApuyOxsPj222+9q666yttvv/287OxsLyUlxatdu7bXtWtX79prr/U+/vhjr7i4OOa5M2bM8C6++GKvcePGXmpqqlevXj3v+OOP91544QWvpKQk4bO8bt067/XXX/euuOIKr0uXLl6TJk28tLQ0r1q1al779u29yy67zPv2228Tjr24uNgbNmyY17VrVy8rK8vLysryDjzwQO/+++8Pk7+qJBbbM2dVcb8ODrsLQp63g5OqOzg4ODg47IZYsmRJRIKAK6+8chePyMHBwWHPhgvednBwcHDYJxHMDGQzQDk4ODg4bDucxcLBwcHBYa9DXl4eubm5EVmZgpg2bRrHH388mzZt4rDDDquyGhoODg4O+zJc5W0HBwcHh70Oq1ev5oADDqBv37707NmT/fbbj/T0dJYtW8aYMWP497//TX5+PqFQiCFDhuzq4To4ODjsFXAWCwcHBweHvQ4LFy4sN+d/WloaL774IpdeeulOGpWDg4PD3g1HLBwcHBwc9joUFRUxcuRIxowZw+TJk1m9ejXr1q0jMzOTli1b0qNHD6677jpatGixq4fq4ODgsNfAEQsHBwcHBwcHBwcHh+2Gywrl4ODg4ODg4ODg4LDdcMTCwcHBwcHBwcHBwWG74YiFg4ODg4ODg4ODg8N2wxELBwcHBwcHBwcHB4fthiMWDg4ODg4ODg4ODg7bDVcgrwIoLS1l2bJlZGdnEwqFdvVwHBwcHBwcHBwcHHYKPM9j06ZNNG7cmKSkxDYJRywqgGXLltGsWbNdPQwHBwcHBwcHBweHXYLFixfTtGnThG0csagAsrOzAU1oTk7OLh6Ng4ODg4ODg4ODw85Bbm4uzZo1C8vDieCIRQVg3Z9ycnIcsXBwcHBwcHBwcNjnUJFwABe87eDg4ODg4ODg4OCw3XDEwsHBwcHBwcHBwcFhu+GIhYODg4ODg4ODg4PDdsMRCwcHBwcHBwcHBweH7YYjFg4ODg4ODg4ODg4O2w1HLBwcHBwcHBwcHBwcthuOWDg4ODg4ODg4ODg4bDdcHQsHBwcHhypDt27dmDx5cvj/rl278sMPP+zCETk47NkoLi6muLh4Vw/DYS9AUlISqampFapHsa1wxMLBwcHBYbvgb1JZQCpQHxnES5k8eQ6hUHUgDwDP8yrU53777cfs2bPD/7dv357ff/+9Ckft4LB7Y8uWLaxZs4a8vLxdPRSHvQipqalkZ2dTt25dkpOTq7x/RywcHBwcHLYJSUlJeF4KUAdoAfwJ6Ai0BzKBLcBsYCbwGfAHoVAaoVAxpaWlZfpLRFBmz14VQVCys7PZtGlT+Nzs7Gxyc3N3xG06OOx0FBYWsnjxYlJTU2nUqBHp6ek7VMvssPfD8zxKSkrYvHkzGzZsID8/n2bNmlU5uXDEwsHBwWEno2fPnnz66afh/0855RTGjBmzC0dUeUjIyQE6ATcCvYH0GC27mfe7gQ+Ax/G8mYRCobD1Qn2lUj5BmQ58Cixm06Z80z4VKGXTpsII4hFEy5YtWbBgwXbfs4PDzsKqVatITk6mRYsWO0Sr7LDvonr16tSoUYNFixaxZs0aGjRoUKX9h7yK2qX3YeTm5lKjRg02btxITk7Orh6Og4NDAnTs2JFZs2aF/+/QoQMzZ87chSMSymrj07DaeCgEiqisu9Cugu6lPnApcCeQXYmzNwH/Av4DrDKflUdQCoBliGB4wE/Ay8Aa4ALgJOA9YASwFs3rnju/Dvs2PM9jzpw51KpVi3r16u3q4TjspVi+fDl5eXm0adOmXGtYZeRgZ7FwcHDY45HIhWbWrGXb5ONfVcjIyKCgoJTKuQulACXhPnanAGifVAwCBkYd3QT8gQT5NHS/0aQjG3gEaIisGBlAP+ITlLXASrRdNcEnIWcCjyKC8jQiEE2BPwMdgNZANURK5hLtjgVFjmA47JYoKiqipKSEatWq7eqhOOzFyM7OZsOGDRQVFZGWllZl/Tpi4eDgsMei4i40u0aorJy70Gwz5k+AeUgzL8K0rQHQQbRp04b58+fHPHbEEUfw3XffldtHUlISup9L8UnFb8C7wBRgUYyzmgNdgLOB/QOfN0UE5WLg74gURmMlskrUAhoAQZeQLOBKRMD+DVyGLCF2fj1kpSgBDkVE526sOxZEumM5OOwusPFHzgXKYUfCPl+x4t22B45YODg47JHYHh//nSFURroL3QysR0J4GhK2rSC9GHgQ+B6oDZyBBPBsIB9ZXtZT0QDosmPAXCvJvKeYv1MQcSlh0qRZEcSlU6dOzJgxI9xPp06dmD59ugnU7oSsC38ADwE/mHF3R5aH1sgKsRWYD8wCxiJXpW7I0hECnkJk4DzTV4eo0a9FpKI+EMsdZAGQC/zF3Mvnpn0Te/dorouAYvOeDpwL9MS6Yzly4bC7YmcHa2/ZsoUlS5ZQWFhIWloaTZs2JTMzc6eOwWHnYUc9Xy7GogJwMRYODrsXqtLHf0csgRpfHeAIZFFZjDTo4RZAMyTo/4608deiWIGgSXolsML01RS59fjkCHJjjt+35OQAjZBLUK75+0REXBqbMa1DQvpc4BtgKSI0mdjAaLk2FZh+nkaWg6eQJSHWuKNRCHwJDAU2oO8rBXjLHFuECIn9HguQ1aamGWc0liDi0RDN3WbkAtXKjC+IoOUijUirx2PAw+yo58DBYVuwdetWFixYQKtWrcjIyNih15o/fz7vvvsuX3/9PXPn/kFBgYfnQSgE6ekh2rZtwQknHM7ZZ59N69atd+hYHHYuKvOcuRgLBweHvRaJffwrgqCP/8NlNNaHHHIIP/30U9yzDz30UH788ceYxw477DCmTp2KBPJkRGJ6EluT/x9gHIoVuMYcjxbO6yOh2LoDZRFL4167dm3WrVsXOC8LOAg4CvjOnHsPkQQgH5GKusiCcgKyIEwBXgUWAgeaz4qQu1EectkaBZyFLDGxXJiikQacChyHBPkXkaUiC5GVVESgLLFYhranWNlK8hCpqBM4Xh0RnDvQ3AYFIEuySs19BInFQHPdF5zlwmGfwrJly7jvvof49NOJ5OXVJiWlOxkZl5KV1ZpQKAPP20pBwXx+/nkWP/74Oc899xannHIUgwcPonHjWGTfwUFwFosKwFksHBx2D/juTxcjl6EtSLN+GLHdZRKhFPgH8ALS5oPvKpSCCEAychtKxcY7SJteTNm0pvbcdHNOyLTLQm4+tyBrAcBI4H7gb0jAXoU06g0QCQiiBJhj3jsGPn8NCfZboq4btDDkAJcDg/EJQAEiDXmmvX0FzeIFiPi8bT4/HlkJmiB3rr8iwpFKYv1UrO1lJSIp7yEi0Nf0vR7ojAjPfHOtmjHOn4m+l3ZEkoQC9Ez0AG41nwXvybpDRVstchFRm4LnFSa4FweHnYMdbbEYNWoU99zzGMuX1yQ7+1pyck4iFEqN297zisjN/ZJNm4bSqNFG7rrrZvr27Vvl43LYuXAWCwcHhz0K3bp1Y/LkyWU+39YMRyIV9vU20pqH8F1dYgnwFnlIeLXZiuog4nAy8F+0FDZDBGUuEvTrm37aInecIiTEr0CC+fvmvToKHD/ZXL8dfnzEHGAG8v+/ALkiPQgMAfqg+ABMHyuB5cBGJOwWILLQChGORebz8YgslCJLQzBgvZq57hvAaGQpOcOMoyVyGVqFBOtMRDyqUZaU5CMrSm1kXfjJ3Pda4HxEiIrMy0MEw8IjNqGw2BwY/2NAVzOGNaa/dej7iLV55SKC0NjcQ/A6aej7Cj5bwePJ5txiIolFDrJc9HdWC4e9Hi+//DL/+tczFBX1pXHjm0hKKj+GIhRKpUaNnmRnH8fKlUO48cb7WLduHVdcccVOGLHDngZHLBwcHCqEI488kkmTJoX/j5VJyA8Gs+4t9ZDA6vu5T548nVAoEwmv5Wc4isz81Bw4BWm22+FnfoolwN+N4he+R0K5FXhL8IXhVabPnubzUebcR5Hm2wrMlsDkmnMeQRr2LsB1ZkxFiBxYwbWDOe4BtwMfAU8CFyKB/abAXS4A3kGkwWZWCmrbG6O4iMlIY98BP2A9LXBvIUROfkSk4nYU07ASxSwkmTlriMiPJWpBVANqIDJzPRL0XzPz08h8Br4Vp9i8Rwv60fcAPnGpg4T571EA+EOm7RZEAnPMWKOx0ly3RoxrYeblPXxLVjQseYrGGYjwrY1xzMFh78CoUaP417+eAfrTqNGVlT4/KSmTRo0Gs2ZNQ+699xnq1KlDnz59qn6gDns0HLFwcHCIi0T1ISZN+i2q0nE6EpibIteZ9kgYtprnP1CGoC9QgPAWoDBhhqPIzE/XIT/9HMoKrF3N+13IkjAECdaNgEtQkHdzcw8FKJB4NCID85BWHnzN+UuoyvPZQBt8ob0mEug/Q65AFwEfIw18olSrfYFeKI7hYSSot0MEZgIwCT+z0uX49R+CmZWeNGO4EgWsBzX6VsgOISG9JiIuSabfXHyBviG+0B49j/Z/e781UCzHWERY+iMiU4KIRAo+aQxalGL1jWmXj+YqCVltnkXPBvjuSqWIFFoLU1P0DOab+4k37tZmPEuA/aLmBnPNEtN/kLhkIHI4O8aYHRz2fCxbtox77nmMoqK+20QqgqhT50pWrFjB3Xc/SteuXV3MhUMEYqmEHBwc9nEkJyebeg91UA2A61C2ndHIKjDa/H8dEuZqoEDfe4DXkbXgGhQs3MW8zkI+9TY70GFIE98Kz8suk/rOD9L+KzAGaZWrEVtgtdhixvECEoJTkOb6VERyMpCA/joiQtcBTyDh9kUUtHwXSov6Jco0dB0iQh6KyfgMuQLlAVcgobububfhwJvm/Q7z+deI2NyKiMKDqG5DCSIJHwADkEXjNjNPnZDr0kEouHu1uZdBwH2IIBWbe7YkIIRI0ndm7m1MxWrkXtXAzHcswT/achH8OxuRxCbAMWjbsC5Q1poUtFzEsoLMQ4TqAjNnVyAC9RwiLBeg7+UuRIh6I9J2uXk/wczLK8hqEW/cNt1wYZw29j2axG5Ez3AoHAzv4LA34b77HmL58po0aHBT+Y3LQSgUon79gSxfXoP77nuoCka3bZgzZw5XXHEFrVq1Ij09nerVq9OiRQtOP/10XnnllTLtP/30U3r16kX9+vVJS0ujcePGnH/++UyZMiVm/y1btiQUCrFw4cKYx/v160coFGL48OFxP58xYwbnn38+jRo1Ijk5mbvvvjvcrri4mJdffpkePXpQt25d0tPTadq0KT169ODpp6Oz2wlffvklZ511Fo0aNSItLY369etz5plnVqgO0c7CbmGxePvttxk2bBg///wzhYWFtG3blosuuoiBAweSmho/oCgaw4cP5/LLL0/Y5pNPPqFnz57bO2QHh70W5deHWIlcWFKQcJ2N/O7PQYJbOpEByEGhzkOa7p7IqjEEua4cAfwU9nEvm/mpFAnl1q1qC5ExE03NZ6uRtr4B8ADS/D9jPgshMlQHBU6fYu6hwBxbh9yG2pt7vhmfBP0ZWR1eNWP9zNzjfYg8xVqnOiGNfLCfC5DgfKOZx5fN2J8xY7nEjCUFX3M/GcVM9DPnQWR8Q3AZf8+MqzsiLkVIcK9N7AxLFtGpcKNRhL4vzJhC+IHQBMbrF/UTliILyndmDCciVzBbwHArsmD9F5HHhsgFrRuyOASzaM1EpPYblO1qEH7NCosC8x4r9W00CfkFkcCpaI5snE4m69cX7NJq7Q4OVYn58+fz6acTyc6+r0IxFRVBcnIW2dkD+PTTweEA4J2JGTNmcPTRR5Obm8t+++1Hr169SE5OZsmSJXzzzTcsXbo0Qh684447uO+++wiFQhx11FE0b96cX3/9lREjRvDuu+/ywgsvVHnMyMSJE7n66qtp1KgRxx13HPn5+WRnK/Pdxo0b6dWrF+PHjyc1NZWjjjqKxo0bs2LFCn755Re+/PJLrrvuuoj+brnlFh577DGSkpLo0qULxx57LIsWLeL9999n9OjRvPjii+XKwDsDu5xY3HjjjTz55JOkpKTQvXt3qlevztixY/nHP/7B6NGj+eyzzypd1r5NmzYcc8wxMY81aRK9ETk4OFjErw/xGdI4z0buKKlI4EsFrkYuQ+loSdmEMgfZKsuxBFUPWR8GI4H3eWRV+DhAbGx1Z5uFaSHwIRK0/yBSGC414z7cnGeDc/+Cgo6tlv9c4AYkfIfwSUU6cptKxbcO1MJPkToE1Y4oNdc+CxGEimzSqVH9PIB8+W9HblCbTX+PIjesG4kMNL4CCeJ3RPUZtBIUo5iPr8x1bCzDUtM2SCoswYsmARbRQvQWlP71ZPxMU5hr2D7seEvwDeEjzf3WBO7FJ2D2uUkxbd5GsTj3IEKx3vQdzJDVCTgdpZSdgIjYn83/h+ATzF/NeJrGuC97b4uQ1WgqSrV7EorNaIq++2z0He+aau0ODlWNd999l7y82jRpclKV9pud3Z1ly2rzzjvvcOutt5Z/QhViyJAh5Obmct9993H77bdHHMvPz49IHDJmzBjuu+8+MjIy+OCDDzj55JPDx/79739z1VVXcfXVV3P44YfTsWNHqgovvvgigwYN4v777ycpKdJB6IorrmD8+PEccsghvPfee7Rs2TJ8rLi4mI8++qhMX4899hht27bl3Xff5cADDwwf++abb+jVqxdXX301xxxzDO3atauye9gW7FJiMWrUKJ588kmqV6/OuHHjOPTQQwFYs2YN3bt3Z/z48dxxxx08+uijler3mGOOKWOacnBwSIzY9SG+R4LtWiR4XYyEvJrIbeVUFMybj/z4tyLBbityf2kbfRXzskIpwFVIez8G392lI3ILykPE4FEiKzxfglyw0vGDt39HdSE+Ao4099HYjH8MIjKD8IXyUvMKWhvq4gdh27iSTERGpgPfIvecSE1SxZBJJJGqi8jJtcjVpyESmOuieU5CGvq1SOiuHtVfCn5xua1m3CvQ95OGn92pIWUtKpZcVARLTNu2iDQ2xHfDKg70beMXUpAl5llksbmJyFoXdu7/bdr0NnNQAz+OZTWyIhQTmUY4FcWmHAmMQM9ebfy5saRkGH58TBD/RVaruohcnBwYfz4Kom+FXL4gVrV2i9NOO63M5u/gsDvi66+/JyWle8KUstuCpKQ0UlK6M27cD+xkXsHKlXKJPO2008ocq1atGscdd1z4fytDXnPNNRGkAuDKK69k1KhRfPjhhzz55JO88MILVTbG9u3bc99995UhFT///DPvvfceGRkZjB49uozCOyUlJSIovrS0NOxC9eabb0aQCoDjjjuOO+64g7///e88//zzlZaZqxq7NMbigQceAGDQoEFhUgFQt25dnnnmGQCGDh3Kxo0bd8n4HBz2FSQnJ+NbCW5EguSNKK6hFnLjmYBciC4APkEC35lI8NyMApWb4btNbUSuRbFgCYbVeA9EAuViM47rkBvMh2ZMi1EQ8UcoPuEUJDQ2Rdr805HL0bvI0jIX1Yd4B5GTAWY8f+BbM0rwte1BNDCfrQh89hMSOi9B6V2L4txXPASF+KuQW9VjwMHmHh5G1pWzUEXrhWh5HmKOnxGjzw1oXmz61XQz7rbm3I3ET9saHFcoxisIG6/QAN13HpEE0cK6qY1ChOFqZGWxpMLDD5oOthmMSFfIHEtG5KUuysBlC/+tQKmEL0AE8s9oDv8PEbWXzfELkXvV+fjxMaBYmsdQEP07wGlEZv2yblRBa1EqcvEbg2J9bPKC+nz88URCoeqEQqEygoODw+6CLVu2MHfuH2RkdNgh/WdkdGDu3IVs2bJlh/QfD926dQOgf//+fPrpp2zdujVmu+LiYiZMmAAo9iEWrrxSwexfffVVlY6xb9++Zm+NxJgxYwA4/fTTK+RFM23aNJYtW0abNm047LDDYrY54YQTALlf7WrsstVw6dKlYVPVhRdeWOb4McccQ7NmzSgoKODjjz/e2cNzcNgnEAopWLW0NBlpuq27zYVII9wPkYizkRb8N6QlfgsJ2lchi4ANsL0S+fnnIQFtXrwrE0kuspDWehIiKKeiAm0Po3iMtxCZKEECoI0tSDOvdPOeadr/F/nq32vGcxbSbI/EF5yt2060IC3BUWSp0Bx/DhGpOyhLOiqLIJEagojKr+a6N6MYkHtMuznmeFrU+WuQRj8HEawcfLKTYdpsRpr8shtbYkRbMuy1LYlYFWgXHZuxDL9Gx1VRfdq2K1Ggdh/0vAQzWtn3ECIWtdBcj0CEYhEimB8ii9bF+HUw6iE3sMHm+L3oGT0HEeBnzPX+SVkXtlJ8Fz8bm1KELEH2cxt0X9e8nkTE5VA8rzahUFql3XZBe539HYZCobhuvA4O24IlS5ZQUOCRnt66/MbbgPT01mzd6rFkyZId0n883HrrrfTo0YPvv/+enj17kpOTQ9euXbn55psj3KDWrl0bJh3x4kDatJFlc+nSpTGPbyuC7k1B/PHHHwDsv//+MY9HY/78+QDMmzcvYq0IvizRWr169fYPfDuxy1yhpk2bBkDt2rXjftldunRh8eLFTJs2jQsuuKDCfc+dO5fBgwezatUqqlevTqdOnejduzd169atkrE7OOzpSElJoaQkCQmx6UhYvxH5lw9EGuG/IxcgkKb/HlQfIQtp7o9FQm0wwHYWypL0LtLGn28+i9aWBQVN+/eJSIA7EfgUZWm6GmnyC/GFfFs5OagxD2raPSSQ3oHvdtTC9Dsl6rrxdCvZaHlcj4T46SgIuToiHUvx/fq3BVnIijIYzeVm5P5TDwmrt6GA53xE+IJYH2hr17Rgkbqt+AQsVmpei6BLWnDuotHUHJ+Pvsdi5J5VO3COJWiP4ae6tQhaNUKILNbCd7eLJhbBtg1Qhq0RiNwOxCcFReY+ayDrzAr0nS1Az0czNL+voTiOTigAfaYZbzX0XVY3Y9iEvvd0IuNQSs11CtHzuBw9m0XIOjYYpRx+nK1bZ1aoyJ7vUmUtZjXR76iUCRNmusBxhypDYWEhngehUNVX8AYIhdLxPF1nZyIzM5PPP/+cyZMnM2bMGCZOnMjEiROZMmUKQ4YM4ZprrmHYsGE7dAyxUqQHsS2KhkTXadiwIaecckrCtruDnLvLiMWCBQsAaN68edw2zZo1i2hbUUyYMCFs+rLIyMjg7rvv5h//+EclR+rgsGehTZs2YQ0HQOvWrZk3z7cclM36dDVKbdobxTFEZyB6HWlnrV96ayQINTTHreDTEbns3ISyIA1Dwv155nhtc160MG5JQTF+LYyn8TXaW5Fwl0Kk60p0H9ECWBJyX1mN3GAuRhrvLUiojHeeHU82Eu7+ix/kC5GkI16mpVhZq6K15N3Nvc4z9zURBbw3Mm0fx48psLDxHzURsQiOPUgA7DlpJE7PC7HJRJBoZCJiNgN9xw2QEB9CBMGePw/F5Nxnzon1fdh0uPcG2tgsU7HIzQfIbelviGCG8LNlWdepFPQsLkVZoqyloQQRuL8hV7F3kcWtB35M0EJzfl3Tvg6RNT7s85FijhejtMETEOm+xLTtgyxl/wL+E5NcRNaEqYm+m2C1801m3MehJAR+4HhycinFxcU4OFQWaWlphELgebFdhbYXnldAKKTr7Ap07dqVrl1Vx6i4uJhRo0Zx6aWX8swzz3DOOedw7LHHkp6eTkFBAfPnzy8TnwC+RSDaLcne06ZNm2Je21oeKgsr9/72228Vam9l4Tp16uwR8cO7zBXKflFZWVlx21SvroC83NzcCvXZsGFDbr/9dr7//ntWr15Nbm4ukydP5tJLL6WgoIBBgwaF4zoSoaCggNzc3IiXg8POQOfOnSPMm507d67Qef451Zk/fx3SxDYE6jN//rqwL3jZ2hDnIkHsT0ioj85A9CSqMm390nsiQSuD2K4w1qJwKtIy90IuTW8hF5p5KC7AFlQLuiItNP1ORILXQHwhMpWyAchBTXc8ra51L6qH/O7z8IOR7bkRMxn4uxrS+k9D1g67cSbhk44g5qO5Ohe5hUXXYTjXHLekL830O8Pc462m7V+RReBbJAC/GThnBdJwW0IT1HxbAjCLSEtOIo13LGE+FhHphubPFtyrjQjb6sC575nPg5ln7DEbf/EufgC+fX5ssb1oWLeqM9E8FiMB3Fpn0s27/T5SkeUnG79SeSPT/i8ormUoEt6bmOPNzbWXmWva79gz19tqXjYzGab/G9Czejd+JrQ/EHm9HKgbJhL63VW0JsxBKA3uEPRbexboQklJVpk6Lxb169cv4xZhBREHh6ZNm5KeHqKgYH75jbcBBQXzycgI0bRpvExsOw8pKSmcc845Ya3+Tz/9REpKSti9MJ5Q/vLLLwNw4oknRnxuicavv/5a5pwVK1YwderUbRqnLXnw8ccfs2zZsnJaizzVrVuXWbNmMXPmzG265s7EXhVx1rNnT+677z66detG3bp1yc7OpkuXLrz66qvhKPl//etf4WwC8fDggw9So0aN8Mst0g47EkFSMGPGEoKkYMaMJVGkINa5FRVaDjbtWqGq0dlICE1GmujPkEB7ozn2OgqIvRa5fGTi+/FbASw64Df4fxZyE7kGxTZ8Y9rkIXeVDVF3Yy0T81Fxu2r4WYaijasVEZbt31nm/hcgK8NiYgvU0UhDgv0y4ICotpZ0lJrj1yO3ry9Q4b/BKLvV/8z7YPO5DSq+HhGpdogspCPXKHvOcHPOpSgt6vnoe5iPng8riHtRr24o5axNQxtN/iqD4HlnmPHajdRmcFpnxrQBWSu64xPAWMTve3yXN0s2gq8goiuIl6L7tkHqwWtYcrEVCfwp5rNi9F3VQM9htunXjqsmCna32cAWIvJSgB+Dk0pkHE8aShbQHBHFDsjF6nwUBzLaXLN6wDrYFZGEiSjd8MXou+pk3i9G1sCJpt1hiAQNQG6BChyPJCtaM1avLiS4ZkBNlixZH0E0HPZdZGZm0rZtC7ZunbVD+t+6dRZt27YkM7Nq6mNUFM888wy///57mc9XrFgRLnjXokULAG6++WYAnn32Wb788suI9sOHD+eDDz4gNTWVG264IeJYjx49APi///s/NmzYEP589erVXHrppWzevHmbxn7wwQfTp08f8vPz6dOnD4sWLYo4XlxczAcffBD+PzU1lbvuugvP8zjzzDMZP358mT5LSkoYO3YskyZN2qYxVSV2mSuULRKSlxet9fNhv7ScnERZTSqGG264gQcffJA1a9bw2Wefcckll8Rte9ttt3HTTb6PcG5uriMXDlUObfipSNhvgawGHfGLh21BdSNi59Mvv5CdRTckLF2Ishv9F7k6XYaIRJq55m34GYgWImvFWUjba2GLoEUHPScSXv6ChO/XUGEzK/jZdKLWJzQDuafsh7Te1qph08OWJyAnGkN3ZLVYgoR3q1WP7jMYb5CE7/LTMqqtJVbvIuJWE79WQ1qMfjtRtujexUQWFTwVzX8G0qjXRG4xDZHV5GkU9zIIafFjuXCdjSxF3yICaS0+iQK4gwQknq5poenndeT2Y+NdMtH3MxdZoy7Cz7Zl59Km9d2MtPq2CCDo+7dpaq27WzIiK0GXKWstiCYtBP6vhgiOvV8Pv4BjGiIXf0OxQgvM/Nh7bmrarDD/NyH2nNlxV0cWhWXo9zsX38XJM38Xm7Gfg0hNW2L/PoNIR5Yt361KVpfN6Dm426QMjbVmVENEOHLNgNwwuXDxGvsmTjjhcH788XM87+YqTTlbWlpIcfFYjj/+5PIbVzFeeOEFrr32Wlq1akWnTp3Iyclh9erVfPvtt+Tn59O9e3d69+4NwKmnnsrgwYO57777OPnkkzn66KNp3rw5v/32G1OnTiU5OZnnnnuuTA2La6+9lhdffJGpU6ey3377ceSRR5KXl8fkyZNp3rw5ffv2ZdSoUds0/ldeeYXTTjuNSZMm0a5du4gCedOnT2f16tURv9cBAwawaNEiHnnkEY499lg6duxI27ZtqVatGitWrOCnn35iw4YNPPvssxxxxBHbPK9VgV1msbDR8osXL47bxh6LF1lfGSQnJ4eLhpSXvSA9PZ2cnJyIl4NDVUIbfXUkbPRDLhU3Ub4mswuQbc6vhQTJd5AwsgH5pL9p3oOWuRVIYL0QCcNnoADUd8zxVUggsRmI7kEC/y2BPqzwVxEhP+JuEfGphWoX1EOCaRZypdlg2rU01zgOXzCs6CZYnlY2zfSbiqw08ymbLtXC3pv1fQcbWOsjCbn+/B+aszeRMFien3EKym71lmk/HJG7uvjWmoX4grSNPTkVCfWnI433vwPHg6/WqMbDs/gBx/ECDBN9h8FjeShepidy+RmCLAMeIratzBg9pMW3Gn8bcG/JjTX529oSlqRaa4CNnyhA5KgWImpBImuDzYMB+JbApEf1W2r6qmaOp5j+aqL5j77/+khg34BfODHaIhdEJzTH9yPCUgNZLV5HbldZyDLV37Sbi75rTP+r0W90NX6qW4tsZA35u+mnIQpAD5nr3gq8gDJc2TWjM2XXjGeQ9UOKPGe92Ddx9tlnk5W1jtzcL8tvXAls2jSWrKx1nHPOOVXab0Vw//33079/f2rWrMmkSZN4++23mTVrFocffjivvvoqY8aMISXF153fe++9fPLJJ5x66qnhitvLli3j3HPPZeLEiTGrbtesWZMJEyZw6aWXAvDJJ58wb948/vrXvzJx4kRq1KixzeOvVasW48aN49lnn+Xwww/np59+4p133mH27NkcfPDBMQPPH374YSZMmMBFF13E5s2bGTNmDB999BHLli3jhBNO4KWXXuL888/f5jFVFXaZxeKQQw4BlAosXjl4a84K1rjYHqxdq0XdWkscHHY2kpOTTYaH6kig2oCE0v8hgSgTuVbcgrTsFkuQlrU+8lGvhn6+3+JnXLLClXUxKTLt2iFh4yxzLBNpzkFWiXR8X/JOKMD1RyScWPO27c8Kb4X4AdAVQTYSsP6FrBI1zXs2fjG6JPNqRewaExUhNImOt0JzXgsJrjcTX+iGyKxP1jXGEp33kaA3gMi0quWNwcK6idnUslnoe8pFJM/Wp0gzn5Wi+b4dxQ88h8hInxh9D0IB86+jmJlaxI9jsIjnNuWhIPKNKLvWDyg4O4Rf46QECcxJ+NmNCvADq4NB0Jjjtkq3dYkCPVul5tgP6PmPHrMlEcE+7d+2H/udBt327LWzkCuWTe4RLWhb68AflM1kFo12Zvzz0HfZEs1RA2A8cmMbhMjYamRN+AMlEIBIvZ5VdqWaMdQx/w9ERPh1ZMGsiebtDfMCPQ9dkNLgAPQ7Xo8IYSdkHXsFPa+5LuPUPojWrVtzyilH8d57w8jOPo6kpO13WyopyWPTpqGcddZRcTN77kicfvrpnH766ZU6p2fPnuH4hoqiSZMmvPrqqzGPDR8+PGbcRrzPo5GWlsbVV1/N1VdfXeHxHHXUURx11FEVbr8rsMuIRdOmTenatSuTJ0/mjTfeKFOSffz48SxevJj09PSYlRUri6lTpzJ79mzAL6zi4LCzkJmZSX5+MRLEmiOtdWcknFi3pzkokPczJCQ0RML9x8gtpDYSivohwe11pAXtiFK/NkNCRg0khM4P9HcbyoxkLQbrkfC5Gblb/Iifgeh/SGgNmretW1K6eeWb6yRCdEDwCcid522kcc3Dd1tZYfpMx8/QE8taEYtcVEQ48ky/mcAhwGS0/FnCEEuAzUffFfiCZgl+HYa+lCUV5Y3B3oPFecht5VskcLZAZGs5SnObhATFDfia9yvRfD2GfPcbR12nEdKUP4AIbH/8wnPRSKTB9tDz8j4SnJsgF6yv0fe4FpEMSyqS8SuvY/4PCs+WpG1G5CFW7EwIEbol6DmPJiW2vyAJstcrjfN/tGG+E7LUbUC/xyCSEDFYhJ9+1l4vGtVM+0IUa2K/l7vRb/smM5bq6LlejX531cx1M/FjR2y9jFxEKpeg+XsUpUjuhH4/xyCrlHV7simev0Tf04FmHE3M2OuieX8UBas/Zd6XEu1a6bB3Y/DgQXz33fmsXDmERo0Gb1dfnuexatXjNGq0kcGDB1XRCB32FuzS4O1//vOfADz00EMR0fVr167lmmuuAeRXFjQ3jRw5kv3335+TTjopoq8tW7YwbNiwmGnBvvnmG84++2xAxYgcsXDYmQiFQuTnpyKt4lAUxPwACvTsggTWLub/+5E2dRgSHi407e9FRb/+gQSeR5Fw9giqG/EgSht7IvIXr4ncZ+5H2tOhSMA5Dd/NqT1wF9Ig/4rvMz8ZuYwE/dltxeQCJKhsREJ2hWfBnNcdaaNtEbo8JGRtRkKX1UDnUTXLkxWYNiFBLQuRhUVIMLMBvrHO24RIWGNkxbGuOg+ZMd8Q57xE44gW5PORZrsx+l7An5v6+C5Hq4hMyzoQEbuHKAsPxXNchzTV96H5LYpqkwh56Nl5HhGTvubzzcDPSJj+ClktRiNNeRISdGPdq3WbKkJkOFaWLwubtSuoBQ32Gc9FybqtBRMLQFmrVFvzXjbTi1DDjG0Fsa049rr55lhtpBhIQvPkoargmfjF9kLoO26Mnrc8RCDmmvvNQL9bm6nqG0SsVqDv4SNEGE5F5KGdeT8TWbE+QWvKGuQ+9T0imDXNODJQnMdFyCXt3/iulTnORWofQOPGjbnrrptJTR3FmjX/Lv+EOPA8j7Vr/01a2ijuvvsWGjeOVmw47OvYZRYLULnz66+/nqeeeoojjjiCk046iaysLL788ks2bNjA0Ucfzb333htxzsaNG/n999/LlG8vLCxkwIAB3HzzzRxyyCE0b96c4uJiZs+ezYwZMwCl8hwxYsROuz8HBz+966Vow7fay0Sw8QAtkZvUpyj+oSfSlL+C3JpuwXdHsgJ5jrnGSqT1LkJuFWehWID7kKXDM/1UR3n9vzafrUWCRwfT31bzWR6+AO6Zv38z17dBq4kKMFnhrAOKTdiCXw+iyLxPMO0KzTWtNjla6AlqsCviHlWKBHOrIW9tzlmMyFWBGUNQ0N1k7rEWsgp8iVyn5iHf9bvwtdkVRfR92NoFjfCL4v2OgtdL8eMu1qD5Wos00SH8SuV34AciR1/ncvP37ShofyCyQmVEtYse01hEbDciS0Vfc6wUCbmzzLVXIOvTQhSzUISyRvVAc2djVLbiz2dTJEzH2nqsEB+Ma7HjtKQqun0o0MZavGx8jiUY0W571l0wXhpx+ztaH+eYxRx0z93wLV+foBiSg/CrzyejZ2s+im/6Bj17KUSOv4XpqwRZVHqjec4yY96InuM6+NbDZHzSfyKqf/EUUjisI5L82tTL35njL1Ne7Q2HvQt9+/Zl3bp13HvvMyxfvpwGDW6qlFtUSUkeq1Y9TlraKO644xr69Inliumwr2OXEguAJ598kqOPPpphw4YxceJEioqKaNOmDYMGDWLgwIEVLrqSmZnJHXfcwZQpU/jtt9+YOXMm+fn51KpVix49enDuuefSr1+/XVbExWHfg08qBiEBoRg/wxHEFuzAr6zcHFUpbouCMOcgl5lriczUFO0CkoSE1VTTTwoS/LOQhrsREixuQ5aOW5BGtBiwqeqaI/cfKxzlIOHMFvWyAmMJEtDWIZLSkNhZkez/QaF+fyScbzHXeAe/pkAbJETF0obFEn6CvvZB0lGKSJYdZwjfvSkfP+uU1Sqn4BMRSwIvAEYhcvELIhvHkzhmobzxFpvxlCCrSA/k2vQGSu3r4Rd+24TmaJ0ZT30z1u4o/uEd5FpmYZ8DD8VYgAjCLchdrjtyrWmDX3V9HtLgf2WucyR6bm3BqGLkSvcqSmv8GZqnJsi6lYOE5tGIiATjK9LNcTtvXyABN7gWBy0DlnhbtyprMUvkylVq5ikn8HkSfsrgoNueDcwOWuKCfYGe9TX4LlvBcdp2M9Az0hiRuwJkjeuJSNF69P2tQr+7oDtjO/RbsClyrUvTh0iJ0Am52WXjV1HPROTSWuWDgevB8f/ZjPkRpDC4Gykp7L1eg8jmDOQW+IgZ58OOXOwDuOKKK6hduzb33PMYy5ZNIjt7ADk5JyXMFlVaWsimTWPZtGkojRpt5O6773CkwiEudjmxADjvvPM477zzKtS2X79+9OvXr8znaWlp/Otf/6rikTk4bBuU0zsHWSoGIEHc+qEncjsopGxl5b8gYeM15O70l6hz4vVXFwmv35u+bAG1G5A14xWUCvMwJGTORBrzrYiQ5CDBsTpl3ZLSkEBui7VtQgLUfPN/zajxWY2z1UIX4KcIXYdS4M4z581FMSMbzHXqEiksR8MKiUFyYduuRQJeY1RLo2XgXpLxLRXWCuMhgbIEv7L4Afh1BdKQcJyCTxQriyLzsmTMzslJyGpjU7CGzBjrIZ940ByXoDlOQ0Lq5Kj+7RxYYfwKNN//h77bccjSEB0D0Rz58Z+NnoeQudZy5Hr3prnf2WhOf0G1Up5EZPcjlJ3sD0SG6yEyYbeZEH463C+RWw+UjZdoat7no1iFEnM8SCyin4OghSk4D9XRb6AB/nNhM4I1QL+3WJa2DNNmCz5ZCV67AJEr6wIGIuF/oHS6toDihyiLVk38dMSp+C5Saea+Opv56IXm9X8ow9R1yHKRjH6/uWZ+1pqxpeATYFsw0ENrTh6KnzofEYh+5p4bmf7ORUkA/oSsWSuAFxy52AfQt29funXrxn33PcSnnw5m6dIhpKR0JyOjA+nprQmF0vG8AgoK5rN16yyKi5X96ayzjmLw4EHO/ckhIXYLYuHgsLdBgdqHoHSQtlhZeaQCylZWBj/moD3SRsZCUHNpXS6+R4KO1cbb6zdDxfKaIK3oNPP+VxQobuMhWuNnmIqGLRxm+452wQrWp4iO0QBfK51qxjoKLUfLzLgvQULianONBsSPuQgSCXsdG2S9AWmJU5E2vge+YNmSyPoJqUhwX4PcTVIC934XcidbhzJsWWJhg8wrYr0owScv683/wQ26AxK6N5sx23vLwrcUFZl72owsFwfgu5YFXRpsULB1bfoIkbhNSHs/AH9OayJhtXZg7taZtl8gl5nZ6Hk4Gwm9bRBJOQo9my+g+T4c+e8/iF9gzt4H5rwjkQXuODPm6CB/GwfzK8p6ZYmWfRbts27Pi7YwBVEzcC+WIMxCBLsasZ9tO3+27+DYbPuPUGzEY4E2y/BjQzLQvI0y93CTuS97fjL+82OvZVNC/9mc8ziyJmxGbm222v1S9Mw1QwTGEkD7bKWheb0TKQtS0DMzGD27PfFTHV+KnoOXkFvdBBQsXhbNmjWLSNXetGnThOniHXZvNG7cmGeeeYr58+fz7rvvMm7cD8yd+y55eR6eB6EQZGSEOPjglhx//Mmcc845uyT7k8OeB0csHByqGMnJyUgovhEJA1aIKC8YeSsSImxxLiuEzEeb/SDzma0uDJFC9TLKulxcioQo6+KzAAlWY5FAuATVszgCCSvFSPC0wc6JgpHtcSvQ2FSZ0S5YVuOcgi/UNzPXfxQJpaciUtMNCfZTUGBwIySw5iFB2gpS0Qi6tuSa65eY87ORILgaWWjeQoJrNn560wKkBd5o7msD+i6ykbBay5yzFGni0/AFd1sszloAgi5ZpYEX+L79G/Hdxixam/cl+OlG7fznmPHYwO5NZix2rMNRxqAM9HzMQ64u3yAScwQKxA6hZ+R50+cRiFSswK+svtic/y36Lg5C1rJ2iASuQm5V6eYerjN9DMHPfvQaCja2VrfgdzYICc9DkEsOgTmz6IZIzfX4Lk3xYF3dGsY4lo4IhyUexYhgHm+Ox/tNRmeUCo7N1vJohMjRhqhzM5AlJ5iOONYza8kf6PnbjIhmCH0Xg809PYueh5PMfVpLYnDsNpbDWsM8RGQGIMLwKiKgw9Fvqanp5y5E8nqhtWIg0D9stfCDurPQ77p+eNxLlmx2qWv3ArRu3Zpbb72VW29VEpwlS5ZQWFhIWloaTZs23ekVtR32fDhi4eBQxVCdimZIOzsLaUbrU37Q9gb0k8wmUqB/FxGFM1Cg7Dp8AcTifaThrIE0lcfju0ZYS0EyyiTTB/m4f46CuW/Brxh9AdJOz0IuGOXB+tHbImgpyFJRhF+fwloRkky/zZDAOwwJtGnIgvI1EnL+DwWYd0NWnyxkBVkamJ9gvIcV8G26zmIkmDVFQl4eEqQ7mvZf4hdeA2n7bZEya6WwBG6rOf66GXMKvlDfCL9wXhGxs0tZWNJhU47WxbdKWAQLvFnYMdqq0lno+2+N705VgAJ2X4jqryGKh/gLfiYkUKrYeei5+g65xNk4CEsCWiELzTmBczeiZ6wOIgbB5/lM9H09hJ7R19F3MACRy1L8ea2HyMj/mTHGErytZeQLVBQwmgBY4rbGzIsltLGE24aI0K5EGa3WIUEaIl3ZggTIZuOKFqo89JuZi35zIdM2Df/ZWYS+j774MS7R4wbfOoS5h2T8hAB2HDaF7aPIYpmML9zHgo3hslZSG4fzESIYDdBvoQ8iMrOAMchN63WknGgOrDWpaOtQtsK3TY89m2CFb5e6ds9HZmYm7du339XDcNjD4YiFg0MAHTt2ZNasWeH/O3TowMyZMyt0bqR2bz0S6IIWhZbIVcT6sEcjj7Ia+RByDToRCS9Wcx3E/yH3k17I1aEavk8+SADMMH1bQSoNCWztUVDu20jYvQO5sdgA24rEEFih2WpLi5HQvBkJWfXw3YxGm/fBZi7OQGSipjmnCSJGvZAryX3mWHMkQK8387Q+xjjs/NQksj7AY0hwewBl2JqFBKvF5nixmbPmRFoitiLy9RQSog5GmbDSkHVjMxLUauAL2dZKYREy47bkpRgJutGkAsq6idn+gp9VN/3YwPaaSNB8AX0PBaZtazPOeeh5jEYbRCjvR+5y95l+Ms08VCdSSPfQ/M1BVd1jkeQm+KTlFhSTshZ918EMWiFERHKRj/8KfFchkLCdiQTZ55D7WpDwgeZ4Bfpe6xMZ0xONVPQ9zUMC+pHmHJsiNhoeIqnJRG6RpSie5HXkmnQQflKA6mYMIfR7qoWsLdGEKdrty/ZrrWM26DwYczIQJVV43PydGtVHdJ92rmxBy2AczlWIYL2Pnvl3kAvew/hJHVqg7+IgZHXtTezv26Ztvxs9E48DM12MhoPDPg5HLBz2eSQy98+ataxcc396ejqFhR7S7jVHLindkHWgGr5ryq9IYB9B2aw7NvNLnUDPNrZiEXJTABGEdfiC1V9RjMS1SDuaSJO/AQkvQRec2kjYqIPIxbeoaNsXSBNpK5tGu7NEw8ZleOZeSpCgtQxfSPsWCbHnIdLVGrmUtEfuP1agyTJz83ck7Nxm7ifd/G+vYV0+7LVtnENQ+HwRuYD8BQn1LyNScjciYyfhFyrcaNoECxUuQULpx4gYdkTC8jH4VZRT8bNmZVC26NlG8391c4/x3Hrmm/cW+OQkKGBi+m+AvnubwjSEUtTGIhDBtsEg+FJz/6OQkHwcvjtNrDoQVqC+ELlPJUIrZOUYg1xwfkLf5cmmb9uvfe6GIPJ8Dfrd2PiTAUg4H4LIiSVteYhYFSHSau8rkTBbA1lopiMrzir8OKbguZa85BIZ1J2Ln6r5FES+C9BvyhLpDPP+G5qvTCJ/M9HjC85vgTk31m8sE/2+b0ek4IA4fYYC76n4ljSb4jkPPSMDUW2bxei3+ARwNH5Sh5fRc/AavnteIqSjQPD4qWuPP/54vvnmm/D/xx13HOPGjatA3w4ODnsaHLFw2GchQpHK9pj71UcOSg95IwpmXYM0wrayroc2d+uC9CVyA/oz0tT2xXd/iRY6bbEwu8FbzeE7SBO/FBGK2yhr6QAJuzWQEGVjD2zmplr4AaHno+XgMRSDcBDSOB9D2aw4iRDUmNpg2iwzFyORcPmYaVeI3J9ORH7vJ+EL5Rcht5WnkGB+N5GBvknErsdgj+chDeooJKBejsjKPGQh+QNp11/EJy1We11ixrw/0uR2D/SfiUjHheg7zUXCniV70WOx339t5JqVCLMQ0QzOd5Ak2fuubca4GgnkzYhNKux1i/EF8QaI7DyOtNb9zP3Z56CYyJoeQYH6OCSEJkKp6fs/+K5kU5HQ2tpcq4P5Ox0RkCtQ8PBfEOk9GdUOaYmE1RHoGT4HuaHZ76cpPskM1oSIngMPkajPzbU+QYL1bab/6N/cRjMHzZDA/xEiN/PQc/RA4Fqr8QvQ2SxcNZErYiJSEfwsWLcjHnnvjn6vY1HmrvJgf4dFiOh5aC3ZD/0ur0K/iz8jIn0PIhSDUQD3NESQKkIsLLLR7+J7YGNChc033/zi4jMcHPZSOGLhsE+iLCGovLlfqI+sCXeijXWF+dwG8RJ4twHOPZGQNgS5oqxDsQ32vCCii4UlIe3jKDPeQyhLKuy1Iu7Y3G82kZmbqptjNRDJ+R5pY+9Fgtzjpn+rzU5ktYCy2WlqIjePV5Cm/9lAH2PNvTc07+cG7tFDQv3vSOj8EdVqOImyy1bwXm0WpKFIm3w7Im5PopS2f0bCIeg7BQmu1hc+GZGa/yN2AbwDEMm8Cz8uIQdfk24Jok0PmoLmO5fYNQeCwuWXiMhFE0Trx4+5prWOlSDXri1IWLZxMtHffX0zjsXI8vIWIhfWHW0TkXNabK73Kfr+5yCB8YkY8xFELnpu/gOsiiLfG8w9/oCe32hNewukNQ+h+IUvTZt1yILxICLE15m2NnZoK5rzWM+mh08w30cVxM9DbkFrkTD9NJFkJ8Xc70IzT9Zq1QgRjEOJjO/YasYTvOaxaH5rljNfdg5snEXwt2+vYdukIAI+rYJ9gp80wn63hYFj3ZB19VMiizM2Qb/7GaimSldkFUqE29Hvy5KZNPy6IR56/jqafg8gnsImJcWjqKgIh50Dz/NYt24dderUKb+xwdq1a6ldu7ar1O6QEI5YOOxziKyGbQlBeYg297+KNshByLXAwrrjlBKJ6IU4OutLDtp8g+dZ4Rx8wfJ9ZK04Hwlo11I2ZWciJBGZuckGHGfgZ5EZjISdG5HPdSM0V8XEznxkXVRsgK7N4mOtCi+aMf8daaFBAt8wJLi8i1zDgtpRO1/vI2HwM+Qq0wZfEGyDX+BtPn62q3XIXec2NK+DkFvH0YhwRKMDEgIPQ4L3f/ErVUcTzlsQ8fgQxcoE3bKSEFGL1oDXxE95WiPqmJ3DL5CgeiFlv8skJKimBtqDhMwSM+7BiKhGWwSCc/M5Etq3ItevGchVrTW+dWE2vhvYYvRsvYuE5XjYimJnRLohN0ID7WcX+gXFqdyGrIKlZq5iWVzWo2fmJWRB6II06b+hZ/Rkc39p+JXTg/NeQNkK4r0Rkd+KnqtCRConm3v0TNtcM7YcJAg/ip9FKkgqNhKZZniL+Xx/814rxlwFv7+ghStE2TUjiGL0vX5A2dTCFrHWAPvsgD8/1vWqB8q+dgua/974KXiro2egoznWwbQ7MdD3UET2s5Al5E+IZNVFa6qNB/oVfRcPIULzD2IpbIqLXXzGzoLnebz//vt88sYb9LrkEs4444xyzxk9ejQfvvYap154IX369HHkwiEuHLFw2KegxbAeEpqvInEay1jIRsJJdZS6MTq4OQdpOLfiC0tB/+1oQaIfsh48bvptjDZ3i6b4xcJqI81gX/TTrY0EyVh+1kF4Ue8h/MxNa8z/1iLS3fT7NhLIrZVhBRLosvA1rEGE8K0aVvO62dzXi6guxZ8D9/04EswOQNrsQXH6DCHN8c1ovucgQendGG2bm/GfhzSvH+Jr3M8jNqnA9FeC3LKKkWA5BWm4H0Q+9dZFrh4iB48jIcsKYnasJUR+z6C5DaY8ja55kYc054chAS0aSUT6/NvYm2FI4H/K3OM7SEh+J0YfLZAg+QL63t9AZGA2kTE5BWYO8vAtRzdEzYFNaWu1zp+iWJNckpJKKCkpKxj65GIK+i7bIAtLB/wg41gEsRuyMD2P4n3modowQwLnt0C/hxIzjlmUrSDeCLlDvY+ITX30PdyCH+j8AHpWT0LPSl3K/raC9VFqmXfbZomZw8amPxtgH09QttnS7PNQSGzCYC2A7fBdmiqauScJPw7Huo5tMn0egqwwc5AL55fIEtca/XZtgoIZiJRegObxJfP3SmTBuRGRkmXoOa1j2tl1IOgCOhQR8+uRcqT8+AyHqkWYVDzzDJ3y8vhw2DCAhORi9OjRfDh0KJ02b+aTZ54BcOTCIS4csXDY6+EvftWQ4H8ucBraCJeYY7YGQ0XMwivQxuohYfcE5FIFvnbbWhiiMwQV4xdls+gPTERC3z+Q8N4AkRibpWcW0mrXQoLeX5Bga7XYiRDLRSRkrrHBjMcKN2lIOP/B/H8DmpMnUWaa/khbnEGkQB2E1RYPRcJHOkpdWmzu+z9IwDscCYC34wexx8IW/KDoIqR1b4KsHa2Qn35zM445SIC0Gvc6KLbj6Dh9e0gLrgD3oECj52YtEqBT8OMwCtAz8ACKP7CB4zYrlvX5DyKY8jRYFK8UxZysRRmQol2kQMJaEZHabkvMLCFrh+8SZ7NxFZqx2SxPQdyKBD0bsyA0b96cP/74owJzEE1CyveTj6yLUIq+13gk6ARkEWqJCFAu+n38GQn/H6A4iRG2d/OejATjHoHz8xFpt+5QZ+FbmUKm70eQpeoMFGsQngF8i9xm/MrnjcxcbAhcvxDNp11LVuMHzRNoB5HVxEvQ9xTLfcquF6n4xLKQiiOEnu9maC0pNfeQhebJ3t+xqCL7n0y7DWh93B9Z0e7Cj+s6Aq1DfzGfZ6N1NBc95zYoPvg8pKFaNdYF9GFE/Pqb49noO2gIPOzIxQ5CkFScm5pKj06d+GTRIkYlIBeWVPRNSuLUTp34YskS3nbkwiEBHLFw2GtRNjg7GRGLu5GgFczck4sEUSuMWi1uDmVdLDYjwfYWJGzfj3LuW6SY/hoRKSgW4gvjKfguRRnIz/kOJGyloUJ2qWbc3ZBwlY82clDw8SWUTyoiZiQwFjuOmvjCkkUHZBHIQ0LGRcj3/x4zxieQ4NaR8t2RbjTzVM3c/xAk6DfHJypnxhlvASJTTyP/7yz0/XyEhJBPKOsiVGSu1RqRvlPj9B10YZoJFJcRZIL/H3TQQfzyyy9RfbyGngPrCpeKTy6iff7TkMC13LSrR2TWqqCbGIFzrYbcCvJLkYXgfeTeE4uQVUffYTyUoliImUBJQgEueOzII49k0qRJ4f+POOIIvvvuuwTXid1famoqxcXL8Qu1dUP33gY970mIyLxK0BqieX0VWT0GIjfGAkQ0C9HvMgsJvTZ17hgUrJ+L5quPGYklgu8j0jwTuVsNQc9UkDzlExk0bl0JN5i+7Pow2txTNXwysZrYleOLo/7PMtew7eyzbGMkbCrh4PUqgiL8xAigZ8mmPN6MX3G+jbnmYmQ1swqSWea+JqPvYSWyaF6DYiZWIre1deaeLamA2Jm6soisp1EHWRMtBiLS/gKpqaku5qIKUYZUNFUyiVObN4c45CKCVDRvDqDzdiNy0bJlS/744w8WLFhAy5Ytd9k4djT2pPt0xMJhr0TZ4OxOSBC/mchgROvaUwdt7GuRexBog4+2aGw1n9tUkgOQpvgHfL/hxmiD3ogE92L8glXBtKjBxbgHEnDGmzHabENzkLb9aSQsH49IhUfsWhgVgd3wS5CmcBUSnqzrVmtzfBHSWIIEvpeR9vM/qFjcCCIF6Gh3pDaIRHhI+Pun6bMEBYk3NPc4ktgxAV8iwawjEtY2muOn46fBXY6sPVtMm8OQ0GJds+y8R8eDWI1xIbJu5CWcsZ9//rnsLIZCyG98GX6sTprpM9rnHyTwFpt72ogyLY1EgeMXxbiqZ/reiuZ1MQoq/xwRkb4JxxwbwaxNuZXSCleWRMSDFRZ9a8hcKmoN8d2pYrmp1UfC/memz/Hmf2vNaW7OnUNkOuHaKDX0bPQ9XkakO2Iq+r3XRL8R+7zbwnhb0ZyONMctUa+HH7yfh185PpjcwKImEtA3mT5sG/DXi6BLUyzEskx+idazs/CLCdbFz2IFkRnRLHlZjuZ3FlobuyMr79OInN2KH5ezAd9NzFrJohHtHmjraTyJTywx47gDmEBx8ZQ49+lQWcQjFRaxyEUsUmGxu5ELh90Ljlg47HWIHZz9f0iAOCnQ0m6EQe19TST4bTB/ZxBp0SjFj08AafLrIjeKJuZVB2WVWYXvg281g0EtXjDmIQ25Nv0QGEsO2uCtVaMrvutEMC5iW2DdspLM2LYGjlltZSyXiwOQC9AWZFVZiOakFnJLqh24x/XmflYhQbgICWgHI23nROR+8SZlU3M2Mu0uQgLV5+Y6H+Bnj8K0OztqjNYSUUx82ADzjxDZqTx8154XUGDxQORrnoRPaKJjcLKRFv5pNHeX4buDRPSOBK+1pp+vkPD6C3oGfkdCaEUSD1jEztq0q7AtdQ4iCUYiF60ifBe69fjFKq0loDoizTYwO4SIwSOIeFyNfo9Z+L+5oAUBc/1fULzKRrTWvIa+G5vooKbpYwWyNiXhp1+2606B6T8J/d6b4WdXKsBfJ2Yhgh8rDiMW8pC15kC0lqzGX+NS8IlKM/waKmko49yj5px7kdIjDRHgJsj9rpqZ2xIUc1Rg5mM+firrREjCL/z3fyiOyyLHHOvvXKKqAOWRCosguZg9ezazP/ssJqmwcOTCIR6ic1s6OOz2OPLIIwmFQuHXkUceGT7mk4pBSEiwgtcP+NWrQUJz0LUgAwmBqcjiUB8JCyHz/35oA05CQkyyea+ONt7f0cb9I7Io2Gwsq0yfaZRNNWphN84O5twt+NmVmph7KDDXX4cvWGxl22GJkc3cFEyHauclOv2uFcys0NwG3XtXJODnIrIx37z/gdyV1qI5PAulnX0TEYshSGs8DrlFPGnex5nPhyBt8+NIiM5Dblibyrm3ZHNPtr5AWuAV/NymIq2c5j4InRcM9j4SuYy9g565aciN5A1ksTkGuYZtRs/jWFRl/EHkljMdZdH5Es3Ty0ig62+ukYcfb3AKCrK331c8bDXteprzdj2piMa4cePwPC/8Kq94mt92Mwcf3Ar9zlYAqzj44FZ4Xp65x63oWe2Hgtz/jdye5qPg/mPxLVl90Ty1Q7Ez56I15AP0nSxARGIk+o4vRUJxK/Rd9UVxQ+PwrVbgrynV8V0pVyJiuQAR22X4FraN+OtFCL/Q5Vik3a/Id2ctU2vNvW9CFpTa+AqOWciKk4lPLN5HrqI9EEnqbvr6Ba1tA/CttUHlRC1kdbTKkNUVGGN1098PyBIaxBlmbA7bi3Xr1vHJG2/QKS8vLqmwOLV5c/qGQsx+++2EpMKiR9OmdMrL45M33mDdunVVOexyMXz4cEKhUDgmrFWrVhFywddffw3Ae++9x1VXXUWnTp2oVasWGRkZtGrViiuuuILff/89Zt/9+vUjFAoxfPhwZsyYwfnnn0+jRo1ITk7m7rvvDrdbu3Yt119/Pc2bNyc9PZ0WLVpw4403smHDhog+YuHLL7/krLPOolGjRqSlpVG/fn3OPPPMMpbhit7n7gRnsXDYI5Co2NKkSb8Fii3loA1/YODsPCKrV1sNobUigDbILfiZWmriZ3bJQhu9fa+GH5OQhtysRiHLhXWnslrOXLQRR+eCj+V/bF2QbNYXe/wNM5bGpr9s/ExRnU2bygiKwYBQ0JwtQ1aa2kRqMqPHG7QEWHeRRubdWoCsi8WH5l6eQGlx46E2Esij4ccCZGWlkZ+fT2npDJQ95pFy7tEiidj6E7/vpKRYWa4qjvK16JaMZSPy+Bh++tJ5KJ7FpjzNQ4JnARKKS4kVHJ3YJSh+1qZgcce9BdOmTYt7TFalNJTxaxCRFh773Vj3uBB6lp9CvwH7vbxHZIrYIvQ9V0OZuQ5Cc74FkcS3ELk4yZxnFQylSFiuhW8BKTV9bUFEfSN+HE5dfCvMV8jyYq1zwe8wFPWZhzI3jUIZtZog8lOKv+XbBAt2jNPN8f+h+InL8clDCBHluihxQ/S1bJ/JaI2yqaxTiAxGj37uQvhZ6N5F7lWY8zLQMz0bh+1DnTp16HXJJXw4dCifLFpULlk4tXlzjmnYkOy08mN5Plm0iBnZ2fS65JJK1cOoCrRt25bLLruMd955h7y8PM4++2yqV/eTVDRs2BCA8847j/T0dDp06ED37t0pLi5mxowZvPLKK4wYMYLPPvuMo446KuY1Jk6cyNVXX02jRo047rjjyM/PJztba8jy5cs59thjmTdvHrVr16ZXr16Ulpbyn//8hzFjxnDAAQfEHfstt9zCY489RlJSEl26dOHYY49l0aJFvP/++4wePZoXX3yRyy+/vFL3uTvBEQuH3RrJycmUliZTserYQ82xQfgEAeTC5CHBvRBfqM5HWvV84ldNLjXnt8UXmKOzAFlCsNmMaynyZ85EQkQwgDOYajRaILAWAqvptBr6N82xbLQJbzB9zcIPRo0eezwEsxbZsaTjZ4upQaQm06LYnGutGkEfdItkfNevjfhFsxKRiniIjAXYvDkovP8HCTAD455dmb5jpUfdFgQF9s6dOzNjxgzzXxKa11rILWUx0tJaAtAYPV9zkNZdgcLt27ePq1GruEtQxbM27a1ITYWioniENDr+xr5aolgn0BqzBD9+4FkURJ6OtPvHmPZNzfFG6HfbHp/E56Dv2QprNjUz+ClhN+HHZNk1oy5aV4Yi8t2K2ETCwq4Zo5Cb3YnmHEui7DW/RNbPs8x1P0HE8xKkgIkm25MRCQm69tnrJkX9b11FV+IrgmLBxpl0N/1bhYWNLzkASOH4448v14LlkBg2IHvU0KFQAXJRUVIxyvPode21FaqDUdU45phjOOaYY/j666/Jy8vj0UcfjRnU/N///pdevXqRlZUV/szzPJ599lmuvfZa/vrXvzJ9+vSYblwvvvgigwYN4v777ycpKVJBde211zJv3jxOOOEE3n//fXJycgDYsGEDZ5xxBqNGjYo57hdffJHHHnuMtm3b8u6773LggQeGj33zzTf06tWLq6++mmOOOYZ27dpV+D53Jzhi4bDbonLVsT9DbjYD0WZWgG+RsLECafgC8kLznoqEdeszbIWyYLYom8vdZuApxa+AXIy/cRbgB3YnIatKTXN8FZEBnMFFygo2BYH/P8WvHv1X5BaUZvpLR5vu52ZeMohtAQmiBH/DTiHypx/Cr2L8B9JkdsdPi2sDnT1z7fKWDQ9lypqJhJGqiwXw4xqig6a3v++qxPTp0yP+15jXU9UEINi2TZs2zJ8/P/x/69atmTdv3rbdwF6EwsLCChBSSzCg7O+oOvq9WUL6pvn8ZPS7noSsc6X4a8dmRKz/aj7LN5/VpKwFLbrOSS0zhjXotzPcHHsuarwRd0nZgoCd8N3/bNIJ8GugHI6skp8hUnsIWk+sq5RdUzYhK0p0pjGraInlV9/AXGcFkZbPWOiALCKF6LdsLUhtgXS++Wa8i7WoAlSWXCTCriYVlcH5559f5rNQKMQ111zD66+/znfffcevv/5Khw5lM+m1b9+e++67rwyp+OOPPxg1ahRJSUk8++yzYVIBULNmTZ599lkOPPDAMs9saWlp2JXqzTffjCAVoNiyO+64g7///e88//zzPProo9t627sUjlg47JaofHXsh5HGsDfaTK3LTjD7yia0QW9Egl1zpEmOtTFa7UYpEu5XIaE7hF/IKhUJ3ZtNW6v5X2OuWcN8XgsJDMuRNSPF3E81IouTTUfa56uQAHwIEmSykKC+0lw/E/ndf46EglPwXX6iNbBW2LEWmFiaKGuJyEHpORcgS0gwsN3Gn5SHSGuANKOnELuKdTQSV3C2iB80vf197ygkSl174IEHxsw6VVk4EhEflSOksdaDICG18QNv4JNE0O/IWkS3ImG5Haq9sQy5o61Av7NoRUYGWpeW4leuzkZuWa+a9s8iJUpr0z6YQe0rIgsChvBTXtu1wFotHkdr2lNI+H/M9H8jkZXh7Tqy2PzfOnDMEmKrtAlmlgJfsbLUtLGKmFiw/S5EAfU2KUUOmt+2wApHLqoAVUEu9iRSYTF37lzGjBnD3Llz2bRpEyUlssitXLkSgN9//z0msejbty/JycllPv/222/xPI/DDjuM/fffv8zxTp06xVzXp02bxrJly2jTpg2HHXZYzLGecMIJgNyw9lQ4YuGw2yEyALui7i6zgYvxN/kgubCb64+otkJ0ZdhYsBtYCGkZs5FgvwY/u4oVuBeY9rVREPevKKaiBb5rUCoiMgVIc51n3kEb9+fAx0gYSEFk4neUFakO0nj+gIKei5HrxVFIuD4eP0NLNKw1wtbNsEJGEMEg9uHmPl9GaR9rUHGUtQZUr16dvLzKxwJkZaWF3Z9iwfM84yZX+b7jVYfeWagKEuFQeewIQnrYYYcxderU8P+HHnooP/74IxCdjvh8tAYkIwIQy/UStCako9//4yiY+nrgJxR4/jYSuMFXIrREwdbnILcnu0bZehvBZ93GXvwDpXt+FP0u2hKZMS84JpskIhk/dW5BoI29DzvumviumynmnoL1LaJh599aSPPRuplvPn8IZZBzVbmrAttDLvY0UlFSUsKAAQN4/vnnEz43ubm5MT+P53K0ZMmShMftsei13lqU582bV24GrdWrK5IAYfeEIxYOuxWkHYgVgJ0IK9Em1Cnq8xR8i8VmpNnri0hLZRBCm2oT874Kbd51kBXjJbSRn44f3GljGFogLePZ+LUabLDVEiQQf4+sGqeZezgUbaxbkUbyZ6TpHIG0fzcjYeI2VBX3aeT6AJEVvWO5W0XDujp4yN3qdzRXL6LNfPuEr82bZc1JSUmhpKT8WICUlJQKF8WyWicXZ+BQUXieR1paGkVFlSekqalQWBj5/FgSEe9aPpEZjxQfp6EMc0XombSxYLY+xmIk+P8P/daDRf2eQmvNUYio1ELrgI2FKkVW0Q1IsWFdqjz81LPvo9S7p6M6KqORpbcH8S2SuciaMg+tpdn4RUQtCvFdR9chC21D0zZxjRifpNjMfEXoO5hj/j4GFdF0VbmrCmeccQazZ89m1NtvVzhQe1NhIaPWr6f9uefuEaQC4Mknn+S5556jYcOGDBkyhKOOOooGDRqQkSGr2IUXXsj//ve/uM9TtWqxYgl9JCIHsY6Vlmp/btiwIaecckrCvuvWrZvw+O4MRywcdisoULsTclWoKL5HG1K7GMdSkaViKdL419rOETZAm90vKJPJJCQQXIjcEFogQlGKTPuzUPXoEfhuCo2R8DAEaffuQylbVyHyEUxN2wkJPr3Nfb6DSNdNiKzchNykGgFXEt8KE28j3mTuZxSqxZEbEIiqLutQcbFfU+Lss8/mvffeC/9/1lln8e6778YZX/mo6urQDns3CgvlvrMzCKn/W/oRkfYXkVBvXZqCRSFnoLViGfot7YcvaKciy0UdFLsxB/02W+IrB2xhvkb461whsoYONX3ehiycjyJScQWKTSvrBiKMQm6mBWYu2hIZrG0tL9WQdbNBYCzz8VPsBpNpRMPGBbXAVwYVI9fQ6mi9+QWlBp4JvOPIxXZi9OjRqlNRq1aFSAUooLtvrVqM+vxzRrdvv0eQixEjRgDw/PPP07t37zLH58yZs039NmmieMuFCxfGbRPrWLNmijeqU6dO3DS0ewMcsXDYbaANuA7y9a1MQO4W/NiDWJvN1ch3twD5/PcsbyTmPVZfIeA7tDHXQ7UJ2qJNMQc/53wGygDUG1kYvkQBk39G6SknIU3kTaat3YRj+SLbOhjdETkZiYT9DYhMrEP+18tNfxUtoGXJzyvIX3x1eLPekVmHtodElAdHIhwqiuDz2r17d7766qvw/yeeeCJjx46t0uv4AfwLkOBfi0hhuzkqCvkYsmDcg4jAY8hNaX9EGq5DLlG3oPXySERS2iOr6iKUXtdWr1+D0lL/H/q9X4KskrejoPSXiYyfsHgZrSt9UNamPyhLDqK1sjbhRnVkSV6Pn1o3PUZ78Av/2bi2VLSmf2rGeUygrYd1+7QaYUcwKodEFbXLQ6wK3bsaaYYYBZVXFra2RosWLcocmzlzJj/99NM2XfPYY48lFArx448/Mnv2bNq3bx9xfNasWTFdXrt27UrdunWZNWsWM2fOpGPHjhW+ZqL73N2QyMncwWEXoAUSxisDG8ewJcaxr9EG9w/gaOQOEKtdLAQzxVi8jFJW9kLxCJ2R1SGNSDek4GaXisjMW8BxKC9+JxTDkGnGV4LvIhV9fhLalG2w91XA39CmPwppHW9HgdznowDsWO5EQXeoQpTdpj9yj9gYN1DaFiI79NA2BAuRHXpoGzxvc7iNg8OeirFjx0YU6KsqUhGE/zux7kIAF6A1ZRxSGFyOXDWbIzeo11Bc2AeovsSFyOo5FRH+6cgKcidaB/oA5yGFx+dI6fE3ZCXtj9ac5mgt6o0ffG0zy1mMQuvL1eaco1DWKZvQIRiDFuu3n4RIUD389NOYv21w+xb8BA9dAn0VojVsEVo370ZxW2+b+XgcrYGHAHUIhdLCri0OibE9pMLCFtH7cNgwRo8eXcUjrDyamqJ/M2fOLHPM1pIYNmxY2A0JVIPi0ksv3WYhvWXLlpxxxhmUlpbSv39/Nm3yi7Zu3LiR/v37x9wTU1NTueuuu/A8jzPPPJPx48eXaVNSUsLYsWMjLO/l3efuBmexcNiNkIXqVCTy5w/CFmQ7CAnSc9AGFcSjyIe4F9oc/4xckAYTH8GAxODiMAp/s70CxUjko43aLlrROd2DG28q0hbWQkL9KER2NqBNOI34VpIstAnXRUSkDxLwH0NuVH2Re8VDSBB4HOWw70Cky8U8FFz+ETAXzd0mfAEjPhL5kzs4OFQMvnvUTPR7fQPf1TAHuW02QnEZq5FLYgmqRn0iUjBkAofhF96cjNLR/opISzoS5r83r2L0O38OpclNxk8fDRL07dqzDK2RfZBF1EOB4W8T2+Jr17hY1og6pu81Zgz5RK413yIXMJvRro+5p6fN+9/NvAT77ozW+esRwXqcgoKZzj2qHFQFqbDYnSwXZ599Nl999RUXX3wxf/rTn6hVS26At956K//85z8ZM2YML774Il999RWHHnooubm5jBs3jtatW3PmmWcycuTIbbrus88+yy+//MLYsWNp1aoVxx9/PJ7nMW7cOOrUqUPv3r354IMPwpYGiwEDBrBo0SIeeeQRjj32WDp27Ejbtm2pVq0aK1as4KeffmLDhg08++yzHHHEERW6z/32228bZ2/HwBELh90CRx55JBK8K2oatNWzQ4g4VEMbFEQK87OQVjANae2iYxISBTUH/47ebNegjbsEaQ5t9iS7AZbib94WK8xnN6IN9iHz6kBkhdpYY6qJBIYQIiZrzH1NMn08jdwgniaymvN7UX3Z9LNrUOG6/ZEm08HBYWchvquhTT9dC60vDdDv/BwUTxHLigoStF8yf0cX9GuMCEcfcy2r/LBrZwi5YtrkFw+h9eamQP9tkOVkGLK6Bt0tQ8SvaWGz5eWZa9dDypE0tAaONGM/GllE3jF9/IGsFDYVdrDvJETANgHnIqLzL1zWqPioLKnYVFhYbuzF7kIurMXg9ddf5+OPP2brVmUxu/jiiznhhBOYMmUKgwcPZvLkyXzwwQc0a9aM6667jsGDB3Pddddt83UbN27MDz/8wD333MP777/Phx9+SIMGDbjgggv417/+xbnnngvEDsJ++OGH6du3L8888wzjx49nzJgxpKWl0ahRI0444QR69erFWWedVeH73N2IRchzv8JykZubS40aNdi4cWNEIZR9CTu6+JafYnY00rwnwla0kdkCeCAt3AaURtIuiGsQUXkCuQjZDTXoO1yRmAQPuAH5J7+BNrQN5rwN+BmgGqENz7oLBM3zNkCzCbI+bESpZFuYsQQFhmhXA4tFSGhoYfpbae73MbRBx1pc8lCmmS1IczkSbeg3oGDIM4EpeF75FgsHB4cdB79aezLSyp+KlAMfEelqGU0uglt4LNJRgoKfT0YWyvfx17wQEs4PQy5W89Faea+5frDvpeZYT8pafO2aFT02m/K7AMWAWeuphxQ8nyLrbWPT9j20Xq9E7p1dUByIJVwW65GyZz98/ehjKNB8xxW+3FnYunUrCxYsoFWrVtvt5rV27Vr+2b8/nZYu5bpO0ZkTy+KTRYsYtX49fWvVqhAJeXrGDGY0acIDzz5LnTp1tmusews2bNhA69at2bhxIytXrtxtMzxV5jmrjBzsYiwc4iIUCplXdebPX4cE/4ZAfebPX0coVD3cpmpgq1Yngs0wYgOarXb/VqSlC/p8TiEyW5TdbCoak2AxH6Ve7YesDrbwVD1z/XREdpYh96J1po3N956PNlVboK4IBTdej7LFLKCscBDLFSvTXGc90mi2RhleqiE3iQeR0DAdaUCnIz/rkWZ+njNz8SZKe/kIcseoWHpXBweHHYfp06fjeR6pqUlozVmF1pIvTYtYa0J5pMJDa88itGasRpbXYPtuqMBeEbJ01savaRHsswlSxIzCt44Er23XYvsqRaQmBVl0bU2LEvx6GjbT1VZEJjqgZBKXo0xWo5AiJ/pegy5cFgNRxrwcF3MRQO3atTn1wguZkZXFF6b+QjzYOhXtzz2XUaWlfLJoUcL2XyxZwoysLE698EJq165dlcPeI/DDDz+U+Wz16tVcdtllrF+/nl69eu22pGJHwrlCOZSBiEIqWvBboLgHm2o0E2m/barRz4A/CIUkMG+fpqiUxIHVJfgblbVU2OudbMb7hBlvdSKzRUVvun2peEzC84gUtMWvQ2FN+aDNLRPN1QZEKDbhF9ADbdq18a0bIbR5P4E281so66ccvVGvNeduNNevZ8bUF5GHqZR1fQJpAw9BFpJDzFiewFbH3tO1ew4OexMKCwvNGjwarbnW/chmTbJCe3l6QQ+Rk7XIeloNWWlHIsXIVabd2Sgd9pcoJfeJxK9pcTYiO88hJUvQ4htNfOx7ijmWjazILyEFyN9Q7NsmM8aCwL3djNbZYcgd7Goi18dYJCsJreUTKCiYEn9a9jGEQiH69OkDwNvPPANLltDDBAIHEV38bnT79gmL6H2xZAlvFxVx6jXX0KdPnypUMO45OPzww2natCkHHHAAderUYenSpUybNo3NmzfTvHlzhg4duquHuEvgiIVDBLQ45CB/2xuJXxzNuivdjTKWPA5sewDdEUccwaRJvyHCEu0KtRFZAjahDXJ//EfXbkTFaLM7G9WFeAhtxDZblPUDDi5+jVHRqfnEjkmwG9lalI1lf3w3K3tdzLVboHmyFWY3I1JSDRGARYjsBH9y6WgTnxx1v9HzZ2M8tiKLwxakeSwy1zsQfQfDTXvrX22JTTGyNtVDlhT5IwfTyzo4OOw+8AO8i5G2/zGU+SkYfxVcz6KFuhJkqViL1nMbHH2Y+T9IDNqglLVPI6vBJXYUUX3aa19m+ngKxXgNQEqS1Ki2RfjkpxAFo1tryU1o7bNrlYfW1hy0ds5FLlfLkevqyWac4RmKc985yHLRn6SkpIhMQPsyyiMXsSpqJ6rQ7UiFMHjwYL788kt+/vln1q9fT1paGm3atKFXr17cdNNN+6xrmCMWDmH4cQ6Xok2sIrUk0qmKALrvvvuOUKg6soKAqk3/F/gJbYDRaIhyvl+ABP5klJ2pF6oo2xhp6IPZouJlL2mNXIXAD3zcYvqsj9yMulG2xoSN9Uil7FxloyDFtfgbbjRB85CF5D1zvVhuYCF8rWMyEjTqoJ/uSmQdqW/GsQT5HLdDBGIF0gI2NX2/iwSUyOrYDg4Oux98cpGPrKY1kRCfRKQlM7iulSJFzEr026+Dfv8bkWJmIVrPWiFlkCUGN6MA8Vxi17QArT3F5lpno4DrB1G8xRBUZ+cAc36yGfcSFDg+FpumWpn6DkZrVzZy0Uo1Y96A1s1CtOadbcZ4D1KG2PsMVuuOhtxCPW9tnPvYNxGPXMQiFRaxyIUjFT7uvfde7r333l09jN0Ojlg4AEFSMQhpfCqLbOSz3xB4OC656NatG5Mn+xr6rl27BvwU84APUUaQGWiDOQnfPaka2qzm4xd/GoM09jegzekeRCYeRlr+TCKzRcXLXmKRiTZd63I1H21msTbbfHOsZZy+GiHT/2rTVyzXhdamjyX4sSDBzFJW65iJNHpLTV+WuOQiv+VVyL3pcPM5aPPdjGIqPkNWk03AVkcqHBz2AHieR1paGkVFK5Aldi4qkFcbv/ZEKVJybEXrQSFaI5qhNWceqta9FiVyKAF6oCxPlhjURuvVcqTkiI6VKDV/2zixJERYhpr+RyDL6zuB9snm1RJZHA5D6WMPQAqQJLQ+fo2vWGlpPmuE1vHpyA1sOHLTOtzMzFa03scSYTJQ+t7Z5U/wPoZocvHrjBnMyM6OSSosguRi7owZiqlwpMIhARyxcAi4P13KtpGKIAYizdQLYXLhLz5ZaDOoj63ePHnyHGOpyDNtFpvjD6LNKFZth4NQXMGtKDj5aeSD2w/57b4A3IU0XAVIqL6LyGBviO0+YDVyqWhTDBaQCqIUbeIp+L7PQdigSRvcXYA2QxtjYWGtGNZtwG7kGxFZ2Io0eo3wY0w2ofmyYysw549AwY4p5v+QaZ9p+i7EkQoHhz0LhYX6nWsdfQVp8C9DQnZ0rJldu5KQImGSOedXc/wb4K9obWmDYhjmIkLwJVovlqD1pg6+C5UlCfGUIzfiK0jmIbfPBvjWUpCCJ2TGcidSDm0x47XrsnWLSkbrXgZ+7N35SMnUBWW0OyDBrHUEUujZsydjxoxJ0G7fQ5BcfPLGG/S65JJyU8Xa4x++9hqnXnihIxUOCeGIhQNa2DuhxX574QfQwXcmqLsiQeD/RUL0ucBfULrF6DoQULbg3GlIo/Uo8sVNQwTpAVSB9jxk/h+FUqsGC9jZvmJp5OyiaWMqgtlHPCT4FxLfbcC6L9VBm/SviADYvu0mba0eJSgoMp9IrWNrZJ2wPsugDddmpCo081cfBUT+GwkJ3ZF72mrzmXN/cnDYk+EraX5CltRmyPJwgHllofXjFyS0f47IRQHwDBLyLzDnd0auR41RAohByBJyLFJabEZrXn38GI14KMCvU1GELCG1YrSbhEjMLWbsF6L9oB1ay1eY4wuA75DlohWqsxPCz7j3JVrPD0Uxfq1iXKs9kM6nn36aYNz7Liy5OPbYYyscB3DGGWdw1FFHUbt2bUcqHBLCEYt9HFog6iCNU0ViKiqCHCTMVzQI/DXkP3wlyp2+xZwfS2iPJRinA7ehdIjPo02tL9p4Lgf+DwUanog0adGwgr7NXhJEdAEpSwJWEhlbESQpwaBJGyBXzRyz6WaLzeezzf/5SAiw7gep5r5K8EmQtaIU4Vskkk0fOWgDXo3mMANtuosQUdnejF0ODg67GpGF9dYh60ASWitkBfZTcmcgN6cbAz00Qhbe15ELZSp+9e4spABaCvwZCfrLkJIkG61h1g2qFCk1tph+rHKmMVp/g1mbPLTGP4ji4W5BcRA2scQGRCJsEHk2Wt++QK5WtyNl0zGIQN2CSNNQpDi6Hj/g3CIDl00/MUKhUKWDi/fVYGSHysERCwe0mfSuwv5sZqS/Un4Q+AIk9J+N4iSC2UwWEj9+IQibevEqJPA/ifx5xyIt/j0o5uJRlC3Kxll4+K5PJeYza/q3SEfatVkoMBz8oOh2gXbxgiYtspCA3wDfrcoDfkNxKU3MOdatqhBt6ssQWYi1seeba74L/I5IRhpyfSjGupc5QuHgsDeiHkpJm43WgI0og9TZxHcTegk4HVkwBiLSYLPLJSNFz1gUzN0MWS024BOIaFgrqiUk1u3JrjklaH0fjsjE9cjCUITWyQ2m35pmDJYMpCNr9PEo4cRQ0/9laK08HSmKHjWvNUS68drEGns23NrtsCOxo54vRyz2eWQhF6VY1oRtQXvzPohITVk83IOCjW8x/ycjgdxmPVqANq1Y2qdgnYck8xqIzOgXIeH8WmQJWYW0Zo0RgbGwWjP7igVLUm5Cm+A6tBFCpPtSkemvLWXjLmqatpuQdQHT/iugK8poFUQq2miLEEHII/bGPgERML/+x4EHHsjPP/8c514cHBz2dOy///789tsKZK28mLKVsOPhMBSL9h/8zHmrketTfZQZysZb9MSv22PjtQrxY702m3Pr4buMBtfpYuTi+ipS+vRAFpMC058lK/XMK5bba3UUH9cQkaEcRChsLaOr8NPnpqH1HjMvBZxyyikVnJfdC8nJisMrKiqiWrVqu3g0DnsrCgqUXS0lpWqpgCMW+zDatGmDFueOVdTjUEQG/ooE/Oh6ENGYhYq6PUhZQbwB2pjWIN/aBmgDCfYTrCVhs6OUII3d7fgB3SBLxQr8bFF34LtF2cDE6D5tv2eizXYE8k0uQdrBjfiBhhlIw1c9zr1mmGOrzHsSMvWvQYJBNKwLVBY+EbEbu53Prcg6sdlpthwc9iH8+uuvUem5K4OH0Dpt18K/I8WIzTh3EFrLj0EWiGB2qJB55ZpXsmkTtAKD1qoFKDj8TOSeaYO07Vq7Gilc6hLbxTWIv5ox/w+5pLZGa+FqFGM3F635dyDCVBso2GMDt1NTU0lPT2fjxo1kZ2e7mAaHKkdJSQnr1q0jKyuryolFyHMSSbnIzc2lRo0abNy4kZycnPJP2EPgp5gdTdmidNuCaiiuYQwyzwcfrVgL42BgommfFnXMbmTz0AaShJ9mNegSVIK0+cVIg1Zsjl2FcqX/HWnJLAYhDVobZIHohZ95ygrswbHmo/m5GW1YjyP3LJtpKZNIfp5oAyhEsRo55nUO2lRfj3HvNlg8nraqFPgHyoDlgrIdHPY1aP0+FK2h22JxHoRclOxaeBJaQ+eb/09Hawz469oW5KZaimIp1iBraxKynDZG7lgFwDVImTMUrX1NkYIlBWX/K0AEoaKxEJtRco9aZuztzGcrkSX4RuRamoUfaxLbJbRjx47MmjUr/H+HDh2YOXNbSNqOQ25uLkuXLqV69erUqFGD1NRURzActgue51FSUkJ+fj4bN26ktLSUZs2aVcgqVhk52Fks9nlY4Xh7MRgt6DdS8SDwKWgzi0Uq7NjqI01aEyTkR7sEechdKBMJ67XMeccBHyFrzLEob3oIBXKfizJH9UfZo05GaQzboo1vKyI0s1Cq2j/MdVNQNpIr8c3/FRXoPXN+fRRD8iTalIfGaFdk3uMJC6WI4PwHRyocHPZl/AF8gNa0yuIhc95VyIWoKXKL7YAULiOQ4uM8fAG9GK3zts7EarQepSOLxFa0Fs8GfkRJNQpMP+loDbT1dZpQuQDr6ihz1W0oKYV1f01D5GYgqvUxBO0Dv+Gv4YsIhZLNWMumPZ81a1lE2vPdYU21wtuaNWtYunTpLh6Nw96E5ORkMjMzqV+/Pmlp0fLX9sMRi30QnTt3ZsYMWzTOaqg6bWevTyDhvaJB4JuQgN2hnHY5SCO1GWnDQJtDEX4mkkVo09mEXI1KUSD0AhQ8Xow2u44oS9Q35v9maCN5DWnUrKneZjyx7yXIYtEKEYKaKAtJsxjjjeX2FSw2VQP5Aw9HsRUNotrZWI/oehcWucC9iFSs2i02QAcHh50PpZ9NQ2tvT7Ytq99hwDQU6/Uo8AZy8UxDipz7kIB+KVrvauMrPLaitdG6N1VD61MN008mWt9ro3Xc1tzYgG99rix6oGDur1Agd1NzvSS097yI0treidbXIuRqeiNyG2uAFEkHETvtuRRJmtddn0kvJyeHnJwcioqKKCkpKf8EB4dykJSUtMOtX45Y7CNIVKROC7R1ixpMbJ//aCwAPkXCvN0gKhIEbl2cxiPLwxx8V6wGMdrb4n2bA5/Z9IrgF4pbhTbD2miDOxJtOnea4zOQy1V/tJn8C+VRt5vdGqRhsxVgD0ZzNRJ41tzr1WhuXkak6G5EYOw4gzElwc/saxNwPyIFKchi0xPfJSuESIXN7hTEVjNPj+NqUjg4OAhFaG37F/DIdvRzIsrA9A+k+NhgPt+EFC+TkbXgfPx1zVqOS9CanIMsGKPNOechMlKNSFEjD782T2WRhqzc4/EDuK3VIzNwrMi0HYGUQXWR2+iBaC+pS+R+Y12B70YWIK2ztsjrrkZqaiqpqanlN3Rw2A3giMVeDhGKVCpWpO4zZE6+GWnUT43q7VWkwVqL76pjheFUlOKwkMhgaPCDqt9AgdqrzflpaNG37j/VzLj+jrRKFhkoo5JNK2ux1IwlA5Gimvib1VrT/37IGrMGbYrTUUzDMDPGC805tZE2LFgsz0NVXl8zr6dMHwPQRj7FzJWt0REkFwT62Ircsh5HpvnuwHtos74S+SLfj++GsD++S5b9bj7F1aRwcHAIwi+aZ7M8DSznjHgIuldGJoNITk6mtHSq6ftZFHvRHq2v6WhtzkWF+d7DT8fdBRGIEvz9wMY+1KHsellRdECEoRCRm+qBfjqZYwuQIull4CyUddC6/K5BiqgUfMWQRTpyD+uJ1vj/7DbkwsFhT4EjFnsxtOGUV6SuFG0AISK1NRfgC8AfIhcikC/rOUSSk8+Qtqw9PomwwdbFaIH/qznWHC30HZFrUQ4SoOcgzdtnyOxeB20K3QJjLsAPZl6INjNb2To66LrAvKch7dtqtPEeiLKU2Pznq5EVIzxrgbGvNeNvaeamEUp5eDnQBxGl/shXOUjY4pECD8V0dEHkZqa5Tj6qhjvb/B9d7MrVpHBwcIgNn1w8hLI8lVc7KBqJ3SutC46u8TMS5regdb4aWusaIOXOMnxr6374e4GFrXtRXnxa0N01hJ+sA/zCqbnIqhzspzUiMf9B+9a1aO8Jtqlr+l6JSEksK3s22tMaAg87cuHgUAnsFsTi7bffZtiwYfz8888UFhbStm1bLrroIgYOHLhN5r8ff/yRhx56iG+++YaNGzfSqFEjevXqxR133EH9+vV3wB3sfvAzPl1K4o3GxhSEiKWtkSa/FAnNNxKbnMxAi342frXoYiQUX4BiGjpEnW81+anoMexi+roL3+XnDGRReMgcs/ELS9Cm0tDcoz0WjG+Yb/5ugDRoNdGGAiJDd+LnRq+Pb7mwKEKarSz8gnY25eEbiFikIYH/J1SgLh4p2GL6yEYbeHzC0KZNG+bPnx8eRevWrZk3bx4ODg4O8eCTixdQbZuBxF6rg6ice6V/jWVoDT0LOAUFU9+OhPpL8TMDWvIQJBY2LW2soG1bMC8PXzEURBp+qm7wLd02Jg10v6UoycZ5aM2GstaRBsglahlScMXDQLR/vODIhYNDBbHLa97feOONnHfeeUyYMIFu3brRs2dPFi1axD/+8Q+6d+9Ofn5+pfp75513OOKII3jnnXdo0aIFffr0ISkpiaFDh3LggQcyd+7cHXQnuw98UjEIaV0Saa+spt8K5qDF+2FUSC4VZVgagywVwdSstr3Ndb4FX+OfilyLJqGsI2MQabEbnb1uadT/aagOxRhUg2IEigGxbfKQJaEOPqmwxwiMaRZy/bK51mPFb/wFbY5PIotCECvQphWsnp2EXJ/qIDemdPwKr3ko6HGVOXcVBx/cCs/bjOeV0rZtk4hjbdvWM8e8iM1q3rx54c88z3OkwsHBoULQOpKLXDT7oziz25B19AfkqvSD+f824CjTbgoVjdnyr7EFpXtti6zKLyHLa8hcFyS4RyezsCJHsFZQEUo/O9/0nYmsyy0RWWlp/s8yx+ejtdbKBoXmPYRPTuqgtToektH+sSXQTywkodoYHfHj8RwcHBLC24UYOXKkB3jVq1f3fvzxx/Dnq1ev9jp37uwB3s0331zh/pYuXeplZmZ6gPf888+HPy8uLvYuvvhiD/C6du3qlZaWVmqcGzdu9ABv48aNlTpvVwDwIMeDWzzwKvgq9aDYgxLzd6kHazyY7sHfPejiwfzAsejXUg/qevBf00eJB309qO/BkEC76OsWeJAfdSy67yEeNPTgSjPGmR78av6ONZYS0+fJHtxv2m9IMPZNHpziQb/AZ8s9+MmDlYE+g2P80IMDzD3v0p+Qg4ODQxkQ1vxkeVDDrMUNzXsN8znbtH7pvPoefOLBjx6M8eAwD6714BAPPvegngeveZAXtVbbNXy9+X+dWc9nm3U6uAfFW9//a+7lcA+eNGu4XZtHmHX5nRjrdnB/8czx3zxYEmNvin6N8KCOW+8d9llURg7epRaLBx54AIBBgwZx6KGHhj+vW7cuzzzzDABDhw5l48aNFerviSeeYMuWLfTo0YO//vWv4c+Tk5N59tlnqVGjBpMnT+azzz6rwrvY3ZCKYirurMQ50VaLQuTyUxtpa+oA9wTahZCWZymKjViPXH1mmPPHAF8js/iNget4UddNMZ+VxDkOsppcgmI/vkHarQYkNraNRVaNk4if1jCYJes6lBHqV3NPK1EKw3hucz2QW1XlrGkODg4OOwNe2Oq5mf33b0TQWrr//o1iWksrhyRkzT0UuUNdgNb8TfhWh1lEWpI9fKt0PnI1XY7i7Fqb9/IyRYVQ7Y5uwGnAv1FiDYupyJJ8Rjn9ePiZrLaU0xbTX/MKtHNwcNhlxGLp0qVMnjwZgAsvvLDM8WOOOYZmzZpRUFDAxx9/XKE+R44cGbe/6tWr07u3aiy899572zrs3Rp+sPaNVC54zy74llwsQwJ5A/yiRFOR4L0VmaIX4KcNbIxiKD5Dgv/l5v/yyI0tcFRMWVIRCrwPRmb3axBxqhEYbzS2oIxPh6MiSfHSGgavZ4nCS2izq4HM78F2wT7SUWrGEEcffXSC+3NwcHDYtfj1118j3Ct//fXXKui1lEiB/GbkVroRuVYdgPaDQsq6Q2WhTFKrUaHRRlRcFClEiqMjgX8Cf0PkYhRyg/oCKdYq4rbkIYWYrVWUCBmIQGVVcJwODvsudhmxmDZtGgC1a9emVavYwVNdunSJaJsImzZtCsdP2PO2p789Fy2oeJE6iBSwk9DivAW/zoWHL3g/g7RDn6CiSbVQAHRNVIthCcqDXorITfUY17NZo2xK11T8ILx448pGQXTr8APFLUJR5zyOfGxvMn3aQPF4mjk7hu6m7yaUJRWxoOwjhYWFCdo4ODg47F047rjjkDA+O+rIv5Al4VOUlnsJsl4H4+hsko8ipMCpS+UwFu0D55p+bYa+x4C3UKxc3zjnxlrPgxkHAx5kMdERSKFjx46VHLODw76FXUYsFixYAEDz5vHNi82aNYtomwgLFy4M/x2vz8r0t2cii4oVqbMILqBWQF+PrBU55v9NaLHujLJE3YVqUVyPNEOtUXG3zchl6gVkMu6NbwGxGaK2mvfCwP+F6DG0maTioRcqtvRsgnt5CWmuBuK7MaVHtQm+gvUqOiAXqAzKItriYSt7p4Wtbg4ODg77AsaNG4eIwcwYR+9H67qtDfQUco2y8BAxyECKqUSIXnfzgKHAEWjfKUVB2APRfnWXaXdk1HmJyEIww1R557QH0pk1a1Y543Zw2Lexy4jFpk2bAMjKim9arF5dGu/c3Ny4baL7S9RnRfsrKCggNzc34rW7o3Pnzkjzvr3alDy0SOcj16cFaBM5CFkn3kIaqVHAE8i3dhXKF16AFvo/mbEUmJfNZZ6KX1QpDb9qql3Ei/Bzl1uh3yLd9DuPsot+HtrQnkfpBc9G5MhqyKIR9Pe1bdqYYwuj2kDZDc5mmtrlSdUcHBwcdgHykKtTdFrYg4CDgTcRCZiN0oVbV6MCpIRqiNbVaEt1PASt0beZ/jy0zmehdOH5yBpSjcTWZotYWQljXdciA7fmOziUD/criYEHH3yQGjVqhF/W0rE7Y8aMGUhYb1/BM2JZK0rQQr8JBWUnIStBe1RnojoyXzdCi3cHFKD9GdpEctBC3wHftJyKTySsMG6F+WR8smErs1rrRtCqUWDG1RFtHqtM2wLklnU+Mrnfhp+3PKiJCgadQyShsOOxloqtJCYVJeb621o11sHBwWFvwB8oqUY07kbxE98CV6D0tk+aYxvwLeKp+JbqeII9RFqjb0buqsX4a/catDe0QYQnXlKNUIy/LTGKToMefX3wU4s7ODgkwi4rkJedreDivLy8uG02b94MQE5OTtw20f3ZPmvUqLHN/d12223cdNNN4f9zc3P3CHKhhTZzO87fhBbPIvyK1lbgX41chW5GpuwgQoiANEVZlWz109TA8ViwVgpboM9aGaLjJqzbUkvT7/1mbNbf9nDgOSJrTthrF+JX645Fpiy2mvcMIklF9FhWok1tDVBA165d49ybg4ODw94Jz/MIhdKQ1bonkclC2qGg6qeBAcAxqC7SCqSIqotPCqyrrFVCRVwFEYXHEam4GjgT7RnWWrEckZUmwOmIwMxHrrtBRZLtLxpbiVRsWUSf4yHrSwEdOnSI0Y+Dg4PFLiMWLVu2BGDx4sVx29hjtm0itGjRIvz3okWLjGvQtvWXnp5OenpF4xR2J0Rn6qgI7AK6GpGCZCS02xiFZciUPQ5liToOOBCRhwy0MM9DblNPow2jOpGbRKwF3VpHQqZtcjntPWTyDgHDkVXkDFS0L5aVJgk/rWENfIISz0hnq123CHwWTSrWoBiUhuh+i/nhhx/i9Ofg4OCw9yIlxaO4eAYK2n4k6uhfkNJnKHJNzQNeRCnDb0YB1qn4yiSrYEpBe4HN/jQUEYd/oqxTxea1FQWHl6D9qhayWiQBP6GAbkicvrYUubXGSjISCzOBYmbOjBVb4uDgYLHLiMUhhxwCwNq1a1mwYEHMzFBTpkwBiKhxEQ85OTm0bduWuXPnMmXKlJjEojL97Wno1KkTM2YsQVqVbuW0jhbclyOhGaRNqo8W5JHAEJT16U6kiepEWRLQCegKvIKsHuVlSrKbQ9CqEUTQzSiodbJE5AzgdzPWZuZ6yURuIqXIUpGLUhoG0+kGtVH2s1lI61WdspaKEmSpWG+umYXiTOJb2xwcHBz2ZhQVFZkU5/9BKccHRrX4Bwrgfh6tm92A71CQ9XOozlAHpKRKR0qxeSjj4NeImHRDAeANUW2ifLTulqK1uiFSIGH6yURr8+3EV27ZtT0X7UO1E9yl3R8KcGu+g0PFsMtiLJo2bRp2I3njjTfKHB8/fjyLFy8mPT2d0047rUJ9nnnmmXH727x5M6NHjwbgrLPO2tZh77aYPn068TN1JMJqtIDbnN6NzOcvIZejP6FAvKPRQh7PZPwQclUqRvEZ8WB9alNInGs8lvl6DrrHx5Cp/UXgQ9OfjcmwryJEAIpRsGDwUfei/i4EvgQOI5JUlCJt2Tzz3hBZbUajTFkODg4O+y5UYG8VWv9vRYqlIP4C/A+51i5ALlM3owDv8Sge41IUJ3c5cC8wwRx/CBVIzQfmAovRWl4TxVM0xycVHtqbMtB+9jmJXaBsrFwmvqtsPIRwa76DQ8WxS4O3//nPfwLw0EMPMXXq1PDna9eu5ZprrgFgwIABEfESI0eOZP/99+ekk04q09+NN95IZmYmX3zxBS+++GL485KSEq655ho2bNhA165d+dOf/rSjbmkXI16mjngoQMQiEy2+tqL1SKRRuhoVp6uGNox4GbzmI03UQNN2Rpx2NqYimUhjWbzAvejPZqCNIx24Epm7n0K+u9asbl8ZaAPKRBuIjeOIrgTroaJKa1Bu9HxkmViOiMxS03cbtDnmIp/f3O2oWuvg4OCwd8AnFy+gInJvE7kHtQP+C9yD1uX6wCUobfm/kVX8MbTnfGrOvw1ZM0rxA7VtcdSGlE0jXoQsHunIgj4U7YfRa7S1QNhYucYVuEN/zXdwcCgfu5RY9O3bl+uvv57NmzdzxBFHcOqpp3LOOefQtm1bpk+fztFHH829994bcc7GjRv5/fffmTdvXpn+GjduzPDhw0lOTuavf/0rRxxxBH/+859p3749r732Gg0aNOCNN94w5tu9FfEydcSCrbBdhL9oL0OLaB8kvINIRTHx846/i8zJJ6F4B1txNRrBmIpYSBTkXWD6bYk2BQ8RmZpoUypB2iv7so92YzP2lVHXsdfajGJDrIvXAjQHm5CFpjXQCm1YpUijNpNQKFHNDQcHB4d9B57nkZKyBVXd7o9qSdyGMkL9APyCr7haixQ+SWj/qWteVgk0H8VPrEd7RVNgP2RNX4+UYRBZI6kEWTRsco81wKNRo7SkwsbKNaD8mk/+mq8YDld528GhPOzydLNPPvkkb731FkceeSQTJ07k448/pmnTpjz00EOMHTuWatXKM1NG4txzz+X777/nrLPOYv78+YwcOZKSkhKuvfZafv75Z9q2bbuD7mTXQ5qjXJSpI9okHY1886qPAuFy0OPwEH4lbdDCugoJ2fEW4e+BE9Em8He0KYyOHh1+cN62ELsP0cbxd7SRLDdjutZcf36Mc2wMRR20kayJce3H0UZ3JyItrYD9EUFqgm8mLzVt/wPkUlrq0g46ODg4WBQVFeF5hWg9nYafFeoMZMm4ECl4fkFKrP2Qy9NBSGhviR/fB36NilKkALJpalegvSAfX1mVjmIzWqL1+3rgPeQui2lTgvaNVYjI1CnnjoJrfgly4Urh9NNPr8y0ODjscwh5zp+jXOTm5lKjRg02btxYodS3uxKyxuSgeg7RmTqCWIxMx61Q4HIzFEdwPtLQHIcIwlIkyB+JCEee+bwQWQZqA6ehYLk/IWG+G7JufIKfccNm/UinbA2IeHnDLTaiVIIbkOZoCdq8Gpox9QJOQBqyYP0KGySeatqvMeNqYK75IvAMcAsyzcdDrpmT/wCrnAuUg4ODQwVw/PHH880330R9eigwkcTWgo2IQBTgu7Haddf+n4HW8ppof+kFdAduNG2fRWv2mWg/3Iz2gwaUTyqCa/460/8PiCS5PcBh30Nl5OBdlhXKYcdA+cXjZeoIVrPOx6+wbRfpF9CC/iLS4JfgB1vnosclCQXgWReq9cg6MtL00Qxl/RiAFub/C1zb5i4vD8FNxAMeQPEOo8xnTc21V5j3E5EJ3l7D+tyW4MdcNDDvK5HG6i3gIxQwGI9UbEWWl8cRoXFxFQ4ODg4Vxbhx4yL+195k3XXPTXBmDfMCreWb8dOpb8CPqViE9oEfzd9tkaKsCK35LYFhSMn1F6Q4C9bciEb0mm/TooOrvO3gUDE4YrEXwicXD6F4gTvwfUOtSbgILZTW1el5tNi3QKljL0I+r28APyPLxjGIODRC1ooC0/8c0+Z2VKzuZrRpvI7IzQ3EryERi2gEScWTwGumvyMDbVrhWy5qos1kPbJIBAsuBR/xLGSGfxylq61t7v115Ppk63LMRpvKp2izyiUUKqa01JEKBwcHh21F4sJ68ZCKH99XB+1BoP1nFdqDnkZW+AFonynFt3YUAlOBQchCfQpyvUq85sNVpr2Fq7zt4FAROGKxl8InFy+g9H0DkRk3HT+wOh0Rh+eQducOFLRdE3gfBcHVQlaHkxCZKEIao7WIoBxl/m6CzNvDgH4oRmMdqri6FMVG1CZ26r+IkZv3TcB9SOjvhqwWweOYa9ZC8RXrgeOB3mjT2A8Ro3xEfGag4O8/kFWmyLw/jX4G6URuSMXYnOXOSuHg4OBQVShC63GswnqVQTraA55CiqJ8tCedjBRc5wAHBNr3QdaI2cRf87eg4n3vxbieKm9XNP29g8O+Ckcs9nqE0CLeH1kw/oSE7hrIEvABIhyXI8KRAbyK3KH6IIKQafoqQotvDUQSViJSYS0EPVFsxhBESvojwjIckZubzLVs7vEgrPBegFyUhqDc5RcgopIZ4xzM503M38sQESlAWi7rUlVk2oWALaSlhSgo8MlCt27dmDx5cvj/rl27uoraDg4ODjsAid11K4PIhBrqtzrQGSnJovF+4O+Pzbkb0X42EMUKJoIqb3/00UfbOF4Hh30DLni7AtiTgrcttHDXR+bfgSiP+H3IdFyCXxDvWqTlt4XvZiEt0tXIFGwfD+s+ZWMWLNbgZ2gKVk9/CblXDUaE5Uok3DdD5KYTym9ejbJWhSUoMPsl877etI9X8OhLlHHkX2bcK1GmqC3mmt3M/T+MC7xzcHBw2PXw96hLUUxfRdyiLGIn1FCfFQkOryy2Iuv8NLd/OOyTcMHb+zj8zFCX4muDLjIvULXTx4GLkYvSIkQaViOzsq1hEU0qbN7xIOoiC8Fa/ExRmPNXoBoTr6AUgL3Rgv8/5GYUbVXIMu0eRW5NHkohmGH6tOMJxmWUmjYFyHcWZCXpbf52GZ0cHBwcdjfEdtftTWJCkDihRigUwvMqEhxeWajydlKSC952cCgPzmJRAexpFgsFx3UFxhCpBbJfdQ4yF3+IXJpyUZzCIyh+4i1EEGwWqWiBPoQsHMn4weBz8CtUW+QBf0aWjGVAF2RBAVklpuG7KXVBJCWI9Yj0NEdxH9GB3iHT5i7kvrUxcMxldHJwcHDY3SFykYr2peZUPLi6KOaaHn//21bkIjffKaZOh4PDvgdnsdiHoUW6DsrlnU0kMQBl40hHmZoyzbFs/MJF9yHCYFO3Wg1N0A2pFD8NbRLaFOqhIO2t+BaGLORqdQdKCTsWZYxKRZml6pixWDesIEqQ21aqGaeNm0iOarMYGIeIxw/E2oCiYyocHBwcHHYPRLoxrSVxcHX5CTUyMpLZurUqgsMhWHk7IyPWPvX/7J13mFTV+cc/s7C7lN2liDTpsJSFZRW7YjRqRFGRYIzR2E1iTSwxJhqTaPwlptp7L9hFsWPXGGMvCyxlYQEBQem7LGXr/f3xPYd7Z3a2CbiF9/M888zOzLnn3pmduff9nrcZhpGI+fVaJf2RSznatwIkDv4WeT1AxjnA28hj4Dtop7t7Ly42o2Tq2cBCt12qm6MMrTalIkM/ysGE1aDWoHwICL0dFdQkQHkbZagOuRcfPnncsxx4F3k/1qHE8PNRpafPgdUEQTllZWXJPybDMAyjWRAEgbuVMnp0f7Sw9DWwgtGj+xMEpVvG1MWmTZuQl+FB5LH+tsQnh2tewzDqwzwWrY6OKDk61T2OJdxvdq+3J1wFWoIaDB3qtveN7IqAKSgRejE1RUp/lBg9wf2dRdjh2oc1pSGx8jFKfrsVVY7qgMRCVCjg9rHCzbMTYf8NX+rWj1+DBIVO+r6J36GHHsprr71W/8dkGIZhNEvy8/O3avuavZy2TXK4YRj1Y8KiFTFw4ED0Lx3pnokRn5cwBxnz/vVUJBi+QWLAV176CvWueB95G76PEsEHEca8LkAVpF4HngD2AX7htl9J2Pk0BdUSn4KqMp2OSsleQegw82FXVcgLsRqJlD4J77BtZMwaVOlpDlBpJ37DMAxjC9sjOdwwjPoxYdGKWLRoESrfN5TkUW5PoZX/oe7xJhRCVO7GD0K1vq9FOQtXo8Z4qdQkF1WP+jUKb7oFlXo9DjgerfhsQLkXvgxtFepl8ReUY3F65PliJHDKkKciUVRUuzlXuHmfRoLGTvyGYRhGTYIgoH379mze/Anqq3QNDU0Ob9euDZs22bXFMBqLCYtWQlpaGmFORG3N5NYnvP4lOrF6r8YLyLOQ2BgvGf6Em0p8Y7z7kFC5CLmgv0JxsiXopP19YBIqa7sAOBUJi3J3HD2Qt2IzYdLeZsJwJ1DVqnuBlSYqDMMwjFrxuREpKSkEQf3J4SkpKVRVVdUym2EY9WHJ262EigrQKn+AGsMlIxOdRDfiy/XJkG+HPBePIq/DFdQtKpLRAbjcbf8w8ih0Rw3xUpE4uBN10Z6CvA53ASejBOwO7rYaJYcXuftlQCnytMxHfTf+gxryGYZhGEb9VFdXb0kOHz9+f6LJ4ePH778lOdxEhWFsHeaxaAWEJWYvQklqhSipOpEfoYZ1hUAXZPB3QsZ8CQphOnMrjiRAwuEbFKO6Nwp5Wo1yNV5EXpMytFr0EupBcRsK0zoYNcjrg4REOUrQnoNK1a5BuRx3oNCrm7biWA3DMIwdkRdffLGpD8EwWi0mLFoN/VHy9B9RrCjIK7CUsCN2XxR2VACMRsZ+DLgBGAacsZXH4JOwL0SVpK5FIU/zgN5IVPRAogZUcWNnJCqeRJWjplCz+lQ/JDqOA4a450cCbRk7diz//e9/t/K4DcMwDMMwjK3FhEWrwJeYTXd/PwTMRWVkE430CpTrMAmVnF2Aqj9djERHQM0O1w2lGlWdykSJcle743gbJYFnoYpOlW7Mq6hi1GDgMjfHBpT7Ue7eTz/CkrNRhgLpvPfee9/yWA3DMAzDMIxtiQmLFs7BBx+MQpq6AychYzwTlY49lZolYp9BSdofI6P+KeS5OBoJkfUoPAriRUl9eFHSxt0fBNyMOp+uQd6G3u5YVyJxsxSFZkXp6I7LC5zaRE47LEXIMAzDMAyj+WDCooXz1ltvIU/AvSg34UYkMjKBbGToe3yJ2D7AA8jY/whVaspECdEr3H10uygxkguOSsIu3dUo9OoglLA9AZWcDVDjvGKUH7ETqiZVG3V5T3zVKMMwDMMwDKM5YEu+rYJ2wETkfTgShRZtQhWVkpGNQpSuRTW7c5AR3xN5Hr6pZ3+JnoQKt73vd+GFR28kAC6IjA2AycAi5E2poKZQaUgoViFQxv7779+AsYZhGIZhGMb2xoRFC0bVoHYGzkU5Cj4Xoav7ezUKN9qIDPwqVFHpU/f3Eyj0aZDbLhUlV69D4UrJCBLuK91cvoeGf22tO46d3Ov++btQONYpKJdiDqoSVUa8ByIW2SYZBUClJW4bhmEYhmE0E0xYtFAkKrKQgX4cSnYGJT8viYxchXpCvIBCoX6N8i+ykIhYj0KTfI5EJxSutBIlWicLN/LGfgUSFm0JcyuqUG3wVcgD0gGJmvXAn4FbUfWp/3P7eQqJkoCwSVF9bEY5GhsaMNYwDMMwDMP4LrAcixZLKhIIf0KhS+tRkvQm9G/NQt6MGHAV8AjKreiODPq5SFBkuu0WEuZm7OzmWIGMd/98CqF4qCQMf/KiYr3bZqPbv8/H+BD4LRIbpwE/cNsfArxHKEwqCEOj6vpqPo9CuAzDMAzDMIzmggmLFkjYEO9CZPAvQN2pOwC7IKPeO6NOBV5z9/NR3kU1cA1K9E5Bxnw68BX6SmSiUrS9kRhYigz/DJSUnUaYqL0JeRpKCEOi2qBQrHfcnDcCe6Kmdl2Q+JiPPBpfIUGS6eb1XpAYyR1qJaj5XglB0JiqVYZhGIZhGMb2xIRFi6U/qrY0Ha3wd0WGelpkzG+RqLgUGe6PAicjL0cH1L/iDeS9mIQEwjrkpVjr5vDGexkKt/IGfzTBOtXNl4HCoCpRKNVMJICeQg34/HyZblxnt59FKEwL9JUMkMBIS9hPNeqNUUCbNlYRyjAMwzAMozlhwqJF4hviFSFjuxcy0qsJS7R+gkrKnoJEyNXAz4Ffuu0DJDBeR/kXlyCvRQ+3j2pCMZHmXou55324Ugph0nZAmJMx0G3zGXAsoajAzdEGeVZ6ubn+DdwfeT3VzVPp9uuP5zrUrbuEykrzVhiGYRiGYTQnLHm7hTFs2DBkeA9Ghv9OSAz4BOgKN/JnwBB3fy1qgHcsMtS9+BgAXETYOC9KCipjm+5u3kuR4h63jzwPqkC1zh1LJ+BNFEZ1IjXLx/rHHZGn43kkhKK0RWKiGoU//Rb4B7DCQqAMwzAMwzCaISYsWhiFhYXIG5BFmAcBofegCngVhRpdhBrRdQbOj4wDiYvAjRkOXI8ERn0lXxOfq0aeihUof6ILyve4CdgdeSuSCYEYShjvhPpq/Czh9TZIOD0JHI4a7ZmoMAzDMAzDaK5YKFSLpR2q1pTYITtAHoo+qPHdNSgMyve4SJab8DAKrboBCZDuhFWdfPhTdD8xJGBKkaCoRJ4Kv49/Iw/G7fW8h9nIa3I88CvUPTwXmIfyM15BvS5KaNOm2sKfDMMwDMMwmjEmLFokPpeic+S5ze75GErGPgGFGHVFZV29MChD4UdRI7036itxlfv7aFTCNgt5QdoiIVONci42ofCkSjdXfzd/OXA38DRKGO8f2UdiOFQZCpc6BImaXVCjv64onKud2/8mcnKyKSgoaOiHYxiGYRiGYTQBJixaGEOHDqWw8BvkEfBhTZsIez8Uo6pOo5CB/30kDnD3m0kemjSR0MuwCVWPKkUCoAoJB7+/NFQBKtPdQPkV1wNTUQO8ZLkVUd5EFaF+jEK6DkOhWL8HdgX2Bz4CjmbWrFl1zGMYhmEYhmE0B0xYtDBSUlKQob/IPeOFgvcsfOr+7ovCiE6ObN0ReRp6kNzoPxMlg1/r5jkb2As10KtC4VUdkcCoQp6FclSy9kaUrH0p9YuKUtTTYl9gkJsjx20zBIkKkNfC0oAMwzAMwzBaAiYsWhhz5sxBxv27xHsq2iLDfKO7L3GvDSI08rsiz0IJSppORmfkkXgP+ACJiQPdPLsQVoIqQU3u3kGejhwkLrLdPN4rkigwfNnYYuB3hGVrB7u5ZwLj3Fgf3mUYhmEYhmE0d0xYtEjaI2P+FWSEp0Ze8/kT693jdgnbdUAJ15nEewM+Q5WZliNvxzkoiXsBytl4k7Ar9iYkYNoCPwIuQ8JjQJJjDRL+vgd4FriCsKJVzB1bLHLcAIVAGTk5Ock/BsMwDMMwDKPZYMKiRZKCQoZuBw5G4sF7BnZHAuAr93hzwra9UWO9RUgklKPSsM+5OW8BjiK+gzdISCxCoqINMAu4FeVxVAI3R8bGqJnHsQF5Kp5FIVbHJIzfhLwTmZHnC4BKS9w2DMMwDMNoAZiwaGEMHz6cOXO+RgnOz6MSsVdERuyMQqW+Rgb7AlTCFSQopgD/RfkXbYDF7v5k5HnIomYJW5CnI4cwt2IEMB6Vs52MxMW9bqxvwAdh9adbUPjTFUhUREOkqt2xlaOKVCBB9AoSJIZhGIZhGEZzx4RFC2P27NnEYhkoUfos5LXoRXyDuRzgbRTSNAvYA/gb8D7Ks/g+cBryUnwN/Ab4JTLwq5BI8A33Uoj3PvjqUGUoJ+IqlAz+L+AIJAzaIHGzFHgLJX/vi3IqdknyrgJ3nABD3f3zSPQYhmEYhmEYLQETFi2SDcB/gMvRyv9tKDfiYmTQX4L6WIwAHgdeRB2xr0Z9I1JRTsU04BTgQjev91R4cVHmxkY9GMm6cJ/l9n8X8LHbpsy93gP4A6oUVRubULfwzu5xCQqbKrFO24ZhGIZhGC0Eq+XZYlmKqjadgXo/vIo6WL+MyrX2QuJjBXAQ8BhwOGGi989QTsUfkszdBnkj2hAmbAeRWyVhkz5QPsblqHdGd+QJuQXlUmQgwbMbKmGbSAC8hLwTv3PzXo3yKyoa/nEYhmEYhmEYTYoJixZLOXAHSqr+IRIOA5FQOAoJi02outOpxFeHeht5GC5Chn9tPSd8b4xK5MXwosKHSqW5eVORR+Ri1PRuE/KY/BXlc9yCcjSOROIhyjqUJ1IN/Bx5Kh7EvBWGYRiGYRgtCxMWLZY05I241j3eBVV3ehw1tZuOSsEejcKSvols+y+Uf3EUNSs4VROWk92EviJtkJioQAKjLRIUiUneR6K+F/9wjwPk+TiWMOzqQZTf4ff1F+SduBb4rdt2hYkKwzAMwzCMFoblWLRAunXrxqpVFcABqHyrT97ejHIs5gHDgF+758pQ34tUoBtKlD4BGf2BG7MG5W4kCz9KRX0mMt38tX1t0oHDkPfE4ytEZSLR0Av4J6pA1Q0JjT7AnUhgmKfCMAzDMAyjJWLCogWycuVKVxmqPcpjuBmVZl0GLERhUn8mvtlcBcrLWIoExCgkOJYRNrvLQp4I31272o3ZjBKqS1BIU29q9rnwjAJKUdWqbu65qFC4AIVh3eme3+D2Mw+oMFFhGIZhGIbRQjFh0WLZgBK2x6HO2EuB/sBwFLZ0IhII1UgYbEJ9JN5B//ZeqHdEWxRGlUXyXIv27r4HEhYr3HY9UOnaRLKRh+NTd2yJVKOqVe8Cn7hjXW2CwjAMwzAMo4XTpDkW69ev5/LLL2fYsGG0b9+ebt26ceSRR/Lmm29+q/kOOuggYrFYrbeePXtu43fQ1OSjxOgxqOTs20An1E/Cex86IAHQG4mODMI8is7AYLdNbQncnpgbN9ht9zUSGYm0d2M3II+E741RgQROOQqnutDNh4kKwzAMwzCMVkCTeSxWrFjBAQccQGFhIb169eLoo4/mm2++4eWXX+bll1/mhhtu4Je//OW3mnvcuHFJRUSnTp229rCbGTuhhOg/IsGwEXkuTnWvRw32AOnIF9zfHZHXorGkuO1SkbBoS7znYpObPxUJiSgBSvhOBSYAf0e5H4ZhGIZhGEZLp8mExS9+8QsKCws55JBDeO655+jQoQMAL730EhMmTODCCy/kwAMPZPTo0Y2e+3e/+x0HHXTQNj7i5kMsFkP9In6LSsZ6liLjfTA1O2aDwpPeRp6MNVt5FN2QF+IbJGp8zsU89/yukbEpyItRHRmXjkKlCrfyOAzDMAzDMIzmQJOEQs2aNYtnn32WNm3acM8992wRFQDjx4/ntNNOo7q6mmuuuaYpDq9ZI1GRBZxMvKgAhRlB2LMiRnyI08+Aoagj96xtcDQ9kDZdFnluJvKG7OKOo727T3PHUhk5ppFAW7Kzs7fBsRiGYRiGYRhNSZMIi2eeeQaA/fffn/79+9d4/cQTTwTg+eefp6LCui/H0xYZ5FdQUzh4b0BiCBLAWygv4iJgb+ANtr6zdQrynGwkLGv7Ksrl8F4KCEvO+kpT/riHAunMnz9/K4/DMAzDMAzDaGqaJBTq888/B2CPPfZI+rp/fsOGDcybN4+cnJxGzf/MM88wdepUNm3aRI8ePdhvv/047LDDSElp2f0A5a3YCZVsjeaL+CZ3fdzfC1DZ1yj/cq8fBeQAU4HXgPFbeVRZKBxqDfABCsf6e5JxPsfDd/COIU9Gy/6fGIZhGIZhGKJJhMXChQsB6NevX9LXs7KyyMrKoqSkhIULFzZaWNx44401nhs6dCiTJ09mzz33rHf7srIyysrKtjwuKSlp1P63L/1Qh+vEKk4xVAGqPwpzmpDw+mxUgjYNeRR2R/0vDnLbfVt8aNY3qHt2D2A/JCASO3MnejA2Iw+GYRiGYRiG0dJpkuXi9evVuK1jx461jsnIyAAaZ9QfcMAB3HXXXcydO5cNGzawdOlSnnnmGUaOHElhYSGHHnoos2fPrneea665hk6dOm259e3bt8HHsH3piDpbt6tjzF4o7Cka5rQKhStFvRh/cs//axscVzpwN0rcvgcJigrC5PFEEeSfL0R5IXW9H8MwDMMwDKMl0GiPxaWXXspzzz3X6B3dfffdjB07ttHbNYarr7467nGHDh3YZZddOOKIIzjggAP4+OOPueyyy5g6dWqd81x22WVcfPHFWx6XlJQ0ubgYOHAg+nflEG+oJ1Z+OhZ4AuVQHO6e+wSVeI0mSQ9AIVX/ROVjf/4tjywAHgGeRN6P3d3z1UhcpNexbYHbPp28vDzy8/O/5TEYhmEYhmEYTU2jhcWyZcuYO3duo3dUWlq65e/MzExAORT1jc/Kymr0vhJJT0/n97//PRMnTmTatGlUVFSQmppa5/j09LoM4u+eRYsWoUTpwYQ5FckYBOwL3Ap8D4U5bSQMlfIEwE9RH4lbgOWoI7YfU9c+PBuAfyMhsxk4PvJaWyQsqgkTuaMejM0o0TsDqGD69On17MswDMMwDMNozjQ6FGry5MkEQdDo2+GHH75ljgEDBgCwePHipPsoKSnZEgLlx24tI0aMAJQ/sWrVqm0y53dPCjL8kxn80QpRvwXWoZyHWGSbjUm2+xXwG9Q471jgJeLDmJJR7sb9yG13AhIIUeHShrC8rMdXhAJ4HvgSOBtL4DYMwzAMw2j5NIlFN2bMGAA++eSTpK/75zt27MjQoUO3yT5Xrw47PHuPScujGomDqNGfWHI2AHoDFwPPotyHPZBYmFfLvD9FoUy9gMtQCNVfUeWo6cBcdz/VPX+EG9fLbTfIzT8KJW17fHlZf1zee1ECXOeO+zAsgdswDMMwDKPl0yRVoSZOnMgVV1zBe++9x+LFi2tUh3rkkUcAOProo+sMWWoMjz32GCDPhU8Mb0kMGDCARYvWAkXAntT/r/shKgF7Gwpz6oCa19VGfyRC5gKPAu+hEKdEdgHGogpTw91zdyKPRQ9Cb0db4svL+vs2wF9QfsWDKIG77Ft1WDcMwzAMwzCaD03isRg5ciTHHHMMVVVVnHnmmWzatGnLay+//DL3338/KSkpXHbZZTW2PeWUUxg+fDg333xz3PNvvfUWb7/9NkEQH8JTXl7O3/72N2666SYAfv3rX2+Hd7T9Wbp0KQormoVW+JOFKiVWYToT+D3KZdjo7suTbBfddjiqGPU0SgB/AAmO+1Dfi2eBqwhFhc+VGIESxNu646yKHEc1YcftG5CgCFAOSAFQaYnbhmEYhmEYLZwm8VgA3HnnncyaNYvXX3+dwYMHc8ABB7BixQreeecdgiDghhtuSLqKvXjxYubOnVsjTyI/P5+LLrqIHj16sOuuu7LTTjuxcuVKpk+fzjfffAPAJZdcwplnnvmdvL9tTWVlDNiEjPvL0L/O//uChFu06/UPUQnac4H/oXCmH0bG+KTq6oT7FKArasjnKSf0SKS6bV9ETfH+7ca0da9XEHYCr0ThT/9EoqI3Eh4zgFdQErhhGIZhGIbRkmkyYdG9e3c++eQTrrnmGqZMmcKzzz5Lx44dGTduHJdccgmHHHJIo+Y78MADOeecc/j000+ZPn06a9asISUlhd69ezNu3DjOOuss9ttvv+30brYvYcftA4F3gGnAMUgE1Oa9iKGwozbIkH8GeRluBA6lZlM8Lyb8Nom9J0BCoYLQI7EZJYj3Qt4Hv99UQi/FZiQebkRN+g5GoVA/Bv4DJE/gNwzDMAzDMFoWsSAxdsioQUlJCZ06daK4uHiblL9tLBIWY5DHIQv1ipgKZCIRkOh98J6HqPehLfAx6sh9GvB3wqTpcrdtQ0vsVgNlwB9RmNQPgR+gPhntkWdlLsrpeBWJh7Yo32MwCn+6wu1vPkFQW3iWYRiGYRiG0ZQ0xg5uMo+F0Rh8x+0TkDdgFgor+hsSBD5VJiD0NLRx91XIc1COPBbHAJORl+EC4kObauuUnUgMuAN4GOiCvCgvEoZH+VCocmC92+cm4Eq3/WagFPXQSHPCiRr5MYZhGIZhGEbLwYRFM+fAAw9EBvsUoBg4ByViP4xCnC4g7A+RrKldG+RdWIQExkVunn8Cy1APi05unN82KjAS51wP/B8SJwe54wiAlajD9wbktRiD8i7uQT01DkFdwwcRiopiJJJeBb4kFlOolQkMwzAMwzCMloeFQjWApgyF0mp+GtAZ+B1wITL2T0CegpNQWFEGNT0NMZRYvRqFHXV382xEvSpmImP/IhQilRbZNtF7UYaa4V2H+mGcgDwmifvznpJ1wCPAzSgEqzNhiNZ85IXpE5n7OTd3AVBi4sIwDMMwDKMZYKFQLRwfGiTjuy3QDjgFiQqQgf4IEgT3o54TFwFHEYqDGLAQVWPaCYU+VaLE71vcHGeiHhTno5yLw1CTu2iuRCEy9l8FlgDdUNfsvZBIKEXekHJ3nH3cMawGhiFvyDfuPkAej0riq02lA8chsfNn4EFisZiJC8MwDMMwjBaECYtmhARFKjK6+yND/yZk7P8hyRbXIc/BGcB5yINwGDDSzVGOkr0XoZCjN1HTvL2RJ6EUeT0uQELlURTKFM2V8IJhMAp7OhRVd/oT8kQsS3JcPd0xfN89LiP0VqxAFanaJ9kuE4Vo9QT+YeLCMAzDMAyjBWHCopkgUZGFRMSFKDTpMWTUX4iM7miokze490IhTU8iEfIIEgwpbr5Obrt+KM/hOCRaKoAvkbHfGXksUpHBPxuFS3UA9kDCwIdTnQ58ijwX0bwJ7+Eoctu/AbwMfI1Cn3KRqKh0+6+Li9x2d5q4MAzDMAzDaCFYjkUD2N45FhIV3VG40x+RiACFJGUB/0VGfbJG6f7fl49CkIYDee7vm1HVpr4orMqPXQA8DnwIvI9Egu9r4UXI3sCxSDRUu7keAnZGoVM/oGZORvSrVIlK4v4beUyOB85C3ohoGFRtlKDQqE+sHK1hGIZhGEYTYTkWLYhQVPwOrdRHWQ38iNCAj5aT3TKDu69GIuQdVKHpFmCfyLgAhS39DYmJLqhZ3XhgKMqN2IxExyzgdeAJYF8kJp4CJgGXIJESnTd6748nFTgAGIEEyWSUa/F4rZ9FPFno8zjHvBaGYRiGYRgtgGRL4MZ3RBj+dAo1RcVCZKyPJHkZ2Sib3Jj2wL+Qh+Io4kXIVOAnyHtwNeo78VvUY2IXlEMxCoVg/Q5VgLoaiYybgLHIm+I9G6tQqNPTKCF8JTVDtdYj78RfkSB5183dUI5G3hPDMAzDaF4UFhbSvn3NfMGlS5fSs2fPpNu0b9+ewsLCGs/37NmTpUuXbvNjNIzvGhMWTUoqMub/mOS1aSj8aSjxDfCSCYy1hJ2zZxEfphQD7gX+ghK7H0MhRqnu9c4obGl9kmM7GPgHyqv4CAmRo1B41EjgZyjx+2coh2KQe/0tFMpUgbwbFcDZwInAXcj7EX0/0VuUdsA44j0khmEYhtG0FBYWMmzYMDpv3hyp5ChR0bdvX2LffBP3PGgxscvmzQwbNixOXMRiMVK++Ya+ffvWKy7qEi21YaLF+C4xYdFEhN6KCwlzKqKUIlHgPQQp1DS+V6PE7a/d2NWoqtMo93oAPAPchgz7KyLzedJRD4wVKJwqyjJ3bGcjI/9+t68TgOuRF+QVd3+9e34FEhD7I+9IJhI5acBlqATt6e72ZZL3nSgyRgJtGTFiRJKxhmEYhrH1FBUVkZGR0aCxXlSMQNmDOeia7kVFLroa+udx9yNR3EAubBEXic/XJS7qEi210RjRYhjbAhMWTUp/FHqUjAxkXG90j6NdsNehZO0l6F/YyY35Ankast3jZcC1KNzpzDqOoydQhXIgPJvdvt9Fp85MVN72AeBKdNrcA50693CP/4ISzW9GnpAzgcvdMbYBugK/RuKmAFWoeriO4wqQxyadOXPm1DHOMAzDML4dRUVFDBkyhMwNG+o12L2oGIyu3gcCv0TZhF5UXISuvL8iFBcj3bhr3eteXIxI8nwyEVCXaKmNxogWw9hWmLBoMjqi0KT0Wl4/HJV5jcZipqDci4Vuu36oClQ/ZIQXE+/l+Bsy8C+u51jSgB5IsKxyz61B+RM3ovCmp9DprB3yWtRGKkrynoZyRx5EfTY8R6IckK5u3n+6fcQityjtsK+pYRhG62LOnDmkp9d2/UtOevq2X2TyoiLRYE92bFFR8RO0ZFeFyq8cifzr30P+eFAdxAPcnBPdY9zrP0SxBWOBn0eeTyYuoqIimWhJRm1ipqHi4vPPP2+wVyS6z88//7xR2xitD7PYmoCRI0ciA3xkHaMGIiO7IPLcbBQi1Q15Jbq4MW0JvQLey7EAVX86l5rhTxAfUhW4uXZGSdjLkah4EjXe80nbKegUWooa59U1ZwbKz/gNCpe63D2fjgRVIWqydy7KAYl6LqICYzM1Q7QMwzCMlsqcOXMYMWIEXcvLG2S8+pChncrLGTFiRIPERW1J0lGiosIb7OejciZZCceWKCpORDUTR6GMwj2Ac9DV91G3zaPoSvkrYFfgNff8a8jIvxBlOD5KeJVLFBcffPBBnKiIipbaxEVUVJxVy7zvvfceXbt2Tfq5fP7554wZM4aeCXN37dqVL79MFsKscb2AMWPGmLjYwTFh0QTMmjULeQmG1jNyJ+BV5LmY7+57oRX/xH9dO2AASpSeB0xBXoFDImOSJUv7W7Ub3xOYi0KejiVcS/FkIiGzNuH5aLnZWOS5C4CTgPuAz9zzo5A4Wenmn4RyNBYlzBlDAqQM6NLo1RPDMAyjeeFFRX0hPfPmzaNDhw5bDPqeqP5hLtQqLjp06MC8efOIxWJ0TpIkHSVRVJyOMgQDwvqIQ9yxJRMVOW7scpQtOQL54A9HwuFqdz8JGfcjUUzBE+5+JAoWPh61k00mLkYB++67LyOJFxWeZOIimajw+HlHAmPHjiV97doan70XFaPce/Vzx2Ix2q1dy4ABA2qIC7/P490xm7jYsTFh0WSkkNyTEOUKYDHyHPjSrT3ca4lhQz2Rl6ADSuj+CPg+8owkJn3H3P5TiBcBPqH8XuS9+HWSY4ohcbGBmonWyQz/mHsfg1H1KJC3JRX4xG17kXtvVyXZvgCdbs8EskxcGIZhtFC8qBiOQoZqC+mZN28eQ4cOJXPTJoYNG8YoZLT2RsZ/MnERi8XI3LSJoUOHkoM6QI2EpOLCi4pBaOnrdOADFJT7jjumS90+h7g5uhJmE+a4efJR8HBfFJTcEbWWPRyVJpmExAZIeIxES4Aj3WNQ3cPRyK+/PHKMp6ISKP1QMHGiqFiGroyJ4qI2URGdd0/3GR5D/GcfFRXnoyDqc9yYDMJcjai48Ps8240/HxMXOzomLJqMasLE7No41d3f6O57JxnjBUYW8iT0R/kNiwjXVKLjamuw5+8L0OnyPGov89oOeRH8+kqyeaNkolPOcuA/qN9GDK3blCORcQ4SGv9F61KgMKhX3Ot/QKfjVAzDMIyWRVRUlKJ6haOpaRh7UZGDDPZRkTG/QEtQieIiFovRBxm//dAy1EVINIwiXlxsSdRG/v2pKGj4JaAIGdv7oniCk5G4GISu1h+h1rPVKDB5jTvGUmToZxGKiz+gvIsoI1Ax+GiNwxeAD90+1rjnAlQC5W03NjFL8iEUZPwgobj4JRIidYmKanf861GswERC4RCLxeJExZHoSn0MEg0DUPyAD6caMGBAnKiY6MYfiYmLHR0TFk1ATk4OMqjrjv8UR7lxT6IciroYgE6Fi5FHYaB7vi7DPyo8Ktx+ugGH1rEfn9RWWce8iRyJunv/AwmHAFWzSkdhYT9A+RtTUX7IQvf3YtQ/w3fiNq+FYRhGSyJRVHRBBns1EhcnI89ANmwRFX3Q1eFC4Ai0pDSBUFwscNuMGDGCPshnfzDh6np75CP3IUVeXGRnZ5OGluCOQ7UVz3TznY7asm5ApUy8uOjtju0IJEJOR0JiELAPEhcL3XMd3a0NChhO7M4Ubaf3AsouHIEM+OXADCQqbkdByUuBOyLbPIRCrMYAbyJxsc59Ng+5zycZXlS8497HMBRYfQ7y7nR0n5MXFZ3Q/ykLiYaz3Wc1BXltMtxn7UVFFrIcOmHiYkenbVMfwI5IQUEBsVgG8YnZtfE8Oo2+gk5jP6tjbEfkhL0Cnb7TiA91SkYQua8GPkV5GXV5BrwejXos6uoMDmHS9mMoB6QC2D0yVwckZj5GqXOLUHnb9cgpDTrlX4P6dRiGYRjNnWSiwucL3IGy6zJQkO/XsEVUlCKj9YfoarEB+conoCWndSjzsDcSFUcBP0ZXl6no6vdDwhCi65C4AImE81BMwAgUE7AYXYH8FanY3T+Erm6/BMYDj6OOTm1Ru1eQwIEwE3IAuuKVuOd9mZUoUVFxittmARIKjxIa7gXATW6bDkhU/Mh9Di+hK2oZyojE7TMrYX+JoqIv+rxHAS8j62IwEhnjkTjwgdqZbq5j3OM7UGH7AUjEHOP2lxUZD6G35mYkLgAWLlzIgAEDMFo35rFoMjYQJmbXRTk6XZ6D1jCudtvWRgydksrRWkdtYxJPc1Vu3mWEEaS1iQUvKBr79fFJ2x+5Y+yWsI8cdHpviy4NK9Bpai/3unXiNgzDaEnk5eXRDS0HRUUFKHfA10f0jeZWES8q0ghXzqtQt6SNKN7fhxSNR1fJwcig/RHqwPQMMrRPR2FRO7l9/Aqtupe7/V6AlrC8FyUTGddXAm+hsKGfu2M4Hi3vfea281fDbm67hejKm4muWJuQMIqSTFQEbs53URDwroQhYgXA/7ltvKgAXQ2PB95DAsFnP5YQXlnrEhWvArNQB6vvu88ui1BU+GxMLxy8x2gA+t/58R2Jv5L7z2888iL1cJ/PwIEDWbRoEbURi8X44IMPan3daBmYsGhSvgSeq+P1WcihOhKdyi5HawvHo9NIspKvf0PrMe3Q6WhFwuu1JVhXIaMe5BmpK9zI7zfq1WhIeJJP2n6R+ChTzyB0evo3uiRchd7zl8hbAb4T98CBA5NsbxiGYTQnvvjiC1Yh4/JCQlFRgc7qbdzzV6HwmW7oCuNFRZobn4VExQyUQ3Auqjc4Ai1FDXRztUEG7bGE4mId8Dkyqs9CIVC+UHuArqjnI4+DFxd/R1l/P3P7CZDB3BMZ/D9HGYEXoNyIde4Yert5ZiGB0CHyHgJ3PMlExRTkqZiECrHPQEtw3yMUKbujECbQFbsaxSicALyOluMSxcUS99oQaoqKze5/kO7GvETocfCiwpPh/gfjkYfnJ+6z9sKqmprxD0+7/ZS7zzCX2sWFL1e77777mrho4VgoVBMRBAGxWBpyBB9O+HOO8iTxZWknobSwv6HUsOvQOkMOMspXoHWJP6LyrkuRc7kCRVOmkNwL4Ss7VbrHPrm6muSCYRM6FaUkvF5fSJSPLl0J3J3k9VR0OnwYrVed4o5hOXI6P4/vxF3XqodhGIbR9MyePZucnJwtPRtOJjSIj0Yr5RegVfgUZMRXAbehq92rkbl+iVb0zwIOQle3k9FS00PAnUgceMa7+yeR0b4MrbKfgAyfNHQVK3O34934m9FVNg2tzv+U8EoYI8x0PBcZ5g+7bX6BsgS7oyW6QpQE3sWND9xzk9GVMCoq1qAWtLugEC3P7yLHei6wG0oaD5B3xnevOsKNf8zdT0QeGFAye2d09eyEAou9qMhBn7fPCbkHWQvnRo7ZWwK4+zZuziw3x2L3fGd3n4Ku4g+hkLE1KIPyKpRPcx0SF9GwKJ8EfhgSOPvuuy/vv/8+++yzD0bLwzwWTUoFKg3751peL6FmWdpdUMTl4ygi9FPkJD0dRYymIs/ACPdaJnJCzyN5KhmR53xS9ibi1x4Sx67n24UjbUKnsZ3QOoynGp1+5iIn+P5ofQP0/v+AToFnYJ24DcMwmj+FhYXk5OTQC12pzkBn7nLiRcVp6KpVja4O3huxDBmaIFHxkXvtBCQquqBcjAnISP8PMvCj7VQPReFCa5FwOAl5RDoSXu3S3a3ajRmPrnAnEnoqfHH2KItQAvVh6Errl+XeQKFaAwiLwwfu/aQhwVSKBI9f2c9CXpRlSCCBsg37uucHIA/GYPeaz1KMlnM5AshDomGDm3ODO84i5EGZjLwhXlQMQh6QI9xci5B18SfCZUL/eRa7+bq6WyqqZjUdeW7WuXFeVNzn5vatcV9w7/1C4j0XtXUIN89Fy8U8Fk2IvBYxlK7VG/2komRRe1nawYRJzRvQqfZ8ZLCPQnUuXkXrGwehcKIlyIORhdZM2vkjcXP4SMki9NNOIT5BG3TKrSR5Olp9zEWntPuQyFiHBIWPQF2E1o6moXCpDqg6+CXoszkHOZKtE7dhGEZzJdrUbjm6Et2BxMVE5Fv3ogJk2Feis38q4Yr5bWiJLAuJijPQFaofWilf7P72OQcPuvvz3XwVyNg+w82xAl25EkVCOhI1L6Mr6W/csfkV+MR6jIvcvuciL8TRSOS8gZbxhqIcCQhFRTkyyH/i3uMj7vUL0ZXUJ18/ijwPGwgTzB9Foukit/1wanbBeg4lVY9H4VoQZnCe5I5rlntP3VCyu39vV6LGfT1RONczbrs/u+P3oqITWqoMkGeh2I3/ComLPdy296HlzUuQeBno5i8hbNl7PRIXOdTsEA7ybOy7776ACt6omqbREjBh0cSE4uJv6PTzR8KwqOPQz6+QMIG5xgzoFLMLWpcZjU4VI1BE5s1IWOSgn/U3btwqataNaI9CpmajtQXvCI16L1agaMs06g57SqQSeWfaonWYOei0loaa8bVHp6m9UIL6PDf+VXRa7I5Oh7cBZVZZwjAMoxniRUUu8hi8QljZaB66mk1CBrO/wvisvTQkBjYRiotHUciTFxWgUrEBWi6LEYqLAmTcH4mMZG/8j3TbzXb3Pk/B45ezXkKG+Vi0hJZJ3aLiNWA/t++GiIpO7nkvIqLioso9vwCt+B/o3jPoCuiTsF9G+SdRnkOhVEcgjw7u+MvQcuHZ7tjuQCLqESS0zkBX5klu3PkoCf5SQnFxMbWLipFI5BQij8pN6IodFRVdUMB2BQrHSkHLn5+693QsyleJMsG9di36vEeOHGniogVhMSXNgCDwBvudyHH7JDol+AjIZGVpfV6EZ4l7PCjy3J+QgPiXe5yFUrhGoVNCf3Qqz0Snm54ouvQNwlO9jywNkCipIlwPaSgVyPn7GhI+lUgM9UOnJd/a6L+ohsSeyBH9V1Tv4hZ3jCvR6bGShQsXNvIYDMMwjIawYMECMjOT5f3VTmZmJtOmTdsiKi4ivrN2ATLeOyPD1GfZlaMrRCpaXvLeCy8uHkBCoYSQAIUH9SM09KcgY/VQtAS1EBn/vdBVbW/CZbO5kbnKkH/8RVQ6dSK64iWrvej3NRcZ0Iej2ID5yPc+mPpFBUisHIuucu+h5cNq1CMjGxn4m1Dysz/GSe75uchor3Kv1SYqfMCy3+/vkQjyn/XtKHNxErIeLnDvpS3qNjUU/Z+modiGDGqKCl+CJRt5Jd5xn9HF6P/TBVkga9Hy5o/ce3zAzXGGm8c3BgRZAW+g5cdzkIXQDYmLWbNmYTR/TFg0EyQuSlAdinNQ2tdl6Kfsy9IG1BQUHi8E2kWeG4BOF08TRm56L0VbJDQ6o9N4GjqVnYBOBa9F5okhg34tOkU29GtThUKfKt17WEoY5ZpNGE4VoFY/a9CpJ0o6OgVPQ6ehrtRdbtcwDMP4tixYsIDBgweTUVra4GaksViMjNJSjjjiCEYSX1I22lm7ABm2KciXfiehoesrJ/mcBy8uBhGW9agi9HLECMXFg8iXfRDyhJSjgN4KJCg8ieIiUVSMc/vxeQ9Rb8Ui4kXFOJRsvgoFK3sD2GcyFpJcVPjwpERxcTVauhuBEsEnIg/Kk+59pCHPzbko/+ISVAWqIaLC8xgKcl6HlvouIxQVp7kx7ZFXpDNh/sv/3JzJRAXu/e6GrtC90VU+DS2X+spYG1H25LHufe6H/nc93ecGEhUr3fMb0HfmBygrMxsTFy0FExbNiCAICIJyZMR/jhyLFdRdltafYv1peXPC6yehn/stqC7DBuKb5nVGp/C26NQxjDCEaiNhVaZVKGQpE50uN7tj8zU+fJpXlXt+M6HYKUMF6rqiNZ5uhJcI3DHdgsTUAJILp0wULnYJsLN13zYMw9jGeFGRiwzVHEh6ri0qKiIjIwPc6znIMM9F/uZTE8afhYTGCEJxsR5dZSYTXr086e65SsIsvCzi6xD6q8SbyLhei0RDDF2pfEHy9xLm9uKiALiXUFQchUTFZsKwH88mJCq+IF5UbHTPd0dXzg5ujseQh+B94o37Re65Re6xFxf7oCt+T+QpCNzx+AZ2JZHP6EQUqjUN5TJERcUiahcVnsfc60tRAPWvkGDxPIXC1ya59zgSXcmfJz78CcL/gV8ePBOJlHSU2L7JHcdO7t6XZjnPvddKwpK1XlSMQN+hkeh/+yzKYbkMfb9MXDR/TFg0QyQwAoKglJycwejnfD36WUIoJqIn/H7u8YIkM16A0tFeQKexF5GxH0OngAz3uBKdlv6EfuJ/cfOVoFPxzsgj4vMrKtEppwydjsvc48rI3GmoW/Y8lI7VnTAp3Bf6u869x9/6T4D4qtj+uRharzkFyDJxYRiGsY2IiorEMKboubaoqIghQ4aQuWHDFlFxEjJEL0JXkFuIL7ExFRmSRyDDcS4SChnI03AX8VQhg7MSGe+ZbizEX/XuRXkD+yPj+iV0tSpGS1Qj0ZXsw4T5s9BS2aMoIHgCurqWu30lJkb7gOEn0JXoZ+759ihkZzUqi9IJCYcn3T6ecDeQ0b+QsIneIvf8myiU6sfIoPa8gioujSM++Ph+FHLUBnkJvKi4A/0fnqF2UQFwK1pG7ITCxnz+RhUSFT6H4wj0WQ9Hn2Mm+i74/JTo1TkaMH0mWspsg+o5eoGW5fZZ5N5/XyTyStH3w4uKbDfXHFReeDOyHk4mrBhl4qJ5Y8nbzZyCggLatGlDdbUvS/vPWkZ2RD/VWWj9JZGT0DrHVUj7d0P1GUagU+tKdFr9D1oj2oiqWndEosQ3w/NthUDrSG2JP9Unrildj1LRfoicmtEaG5Uo0vYp5InYJTJPspAvLy5+j5yzn9TyWRiGYRgNJVFURMOYQP7mWCzG/PnzGTJkiMqFIoP4SGTg7oxKr+5EWJ3pPORrr0D5Bz7EKcvt4ywkSG5x439O6APfgFa8o52gIbwKeFGxBzK+/bJZV3Qla4MM4TYo9OlDQkM23x3nySis6HHggMi+vChqQ7hcloPKqdyHcgiudccxxI0tQl263yaszfg2SpReizwRfd34+ci4/i8SCYeiK6S/Mr6CxMlhKJncByvfj8LHDkBX86eQONqI4hsKUJhTW8L/XZRbkZA5zX1+r7njOgMFTE8jXlT4K/lwJNTSifdSRPGP56Blzh+7z+MKVBAfJOZ8Z61qwjK6Ve7z8qLiBZQzcyr6DtyIgqSfcvNchyV0N2dMWLQAqqqq6ilL69kLnTZ+TU3nMkhA/A2dSt9Gp7XH3WtlaK2nAiVTn4Yqh9+C1n/+QJi+BWHPUu/MbINOvwESD5tR8vVk5Oj9O6GDLIZO79chR+epyHG6Ank0/JiouIhWp+qELz8bi8VcfophGIbRWGoTFR5voN4AW0TFgegMPx5lyq1BZ/cFxJd+XYjO/qPQSneWe20sYWWkp5Hh+hAKedkJiYoN6IrTgbCPhL8irEZXlnaEomIiWroK3PO+fK1fYffiArRMNgwZ1T2QERtDxrCnGl0NKwgrV53t5r+fmuLiHbRMtj+6ohW7Y56DDONTUXUk3Pj3kEgYi4x531fDi4px7jOqcPue7OYfi5KrPbeifI7ehHksN7nXzkoY9yYSKwNQ8vYv0RX+C8Ju54miwl95o9mbJeh/mSgu5qKlzTS0VDjYHfMVyJNRRNjYbzb63APkERlKvKgYT9i0EOLFxQy0FJqXl0dFRQUNZcaMGYwePZpYLEZ1dcPK1qekpBAEAdOnTyc3N7fB+9qRsVCoFkJYOepvyIOwPsmoH6FT/BtJXqtC1aa/RqeffyGn55tIANyJTptZyOjvgn7Cp6BLxOHoEuDzJgI35xrk1J2DLiOF6LQ7zs3ZG61b+UtKudvvT1C06hXoFN0drWdE60Mknrb8iSAFXYL6aVQstuU2YsQIDMMwjPqpT1R4foZi/n18/WZkyJ+FRENbdPaeTyguJiBD8htkNG5GK/a5yLD/AJ3RJyGj9SRqFxV+Jd8bvF3d+GLUg2Eioahog4z0VOLFxQh3bEsIV8fLkbE/CSUm+0xGH/fvS6ZkuPdY7MaehK6cF7vxL6Olut4obOc/yHifjoqmz0JX0Tvc+NeQENnNfT6+VEqiqPAF2R9DIWOjiRcVw5DHpp37O5okf1Nkf4miorub6/fu/b2MBFYyURG9B4WbXeLuo8xx+81Cn2kpEipj3fzvus9xhDvWYeh/5BPBU6hdVJzm5sxHFsMrOM/HtxAVXYFOW8r8100sFqNLENAZGD16NDNmzGjw/nZkYoEt99ZLSUkJnTp1ori4mKysrPo32I7ox5AFW2pvTCBc6wAZ8V8iT0RHdHpcR1ifYSd0eo/yJTrFDkKn1kvd2FOB36H1gXPQT7kPOoX3R5Gf7dHpYRm6rLxF6HkYjKIkv0Kn5e8Tiod93dw+/ClASeLr3HZRj4tfpypHp7gl6PT7CEpBSyfM2/DFC1U5yr7ehmG0FAoLC8nLy2PTpk0N3qZ9+/bk5+czdOjQRu8vMzOTzNJSfoJW35NRhfzK1ejM/TEy5I+MjJmFDOhKFGTbBRn/TyFxcQUqE+IrPs1DRugNKEzoXCRqArQSnY780sma0+HGLUJejrdRKNElhL5zj78atEWiohBdcXLRFcZnCKagK9fzSEAdTni1qXLblyFPSbU7vieRF2GE22d/t+1/kGga7uYrAGbPns2IESMYiQKVl6OqSMehZcCXkNiZSbyogDDkaxBajstBomguWvX3iei3u/e1F1rdn4WshJNR4HAOugJ7UfEqCqHKRrkd2aifxukkFxQgw/8ZFAa3EAmHI6kpKlLQlfwpVFp2H/Q/XogsiMGEpV+8RVXs9j8QxUdE930X8qy0QZbJLBp3bfeiYjDytvlQtCV1zBOLxRjkxi9w4xfDDuu5aIwdbMKiATQnYQG4nIs26CfZD52KvDPRJ0Efjn7KJegU2RatVWQkmbEc/XSy0JrLV6jg3cfIaXsOWuv43M39JTq1diZe1PRGVbyPR+sR1YRF6m5Fp8E2KEr16CTHUY0uQ6nuWKNUop/1v5FDuysSJ11R5GwmOk0WolPcq+44Fd5lX3PDMJoz0W7VX1O34TRz5kzy8vKorq6mFzJU586dW6u4WLp0KX379mXGjBmMGjUq7rVYLMZI4rsfg87+uyLjuBoZrcMIw2AS8eKiFF1l1qK+Ej9AVyhfygNkNB6JloXORTH+MXR1WEt8QzY/3lONzuzeSP0M5RnsDVye5Lg2o1CfVcgI7++Owx9Porh4Dl1ZfurGbERXsVJC78V6t+2TyD/fGV2Z9nRjTkGBxouRqBg+XHWUYrEYQ1A/itMIk6yfRiVVDideVKxC4VkVyMPxIVq28x3NfY8OkABJJi5GoKXE5Uhk/JpQVAxHwuZ6JAR6U7u48KLiB0gcvYwE0Z6EVZ8OJbzqP+6OaV8k+nzDwvnuuIcSbz3gPvtEj8W2FBUnov/rPDdnAcnFhRcVx6PPbDHyAM1gxxUXjbKDA6NeiouLAyAoLi5u6kOJgy1L+R0D6BRA9wB6ur93DuCPAcwJoDiAoJ7b6gAKAljhHlcF8JsAMgPoEcBuARwSQHYAvw/g4wCeCWBqAO8FsM5tUxVAdeTmH68P4LIAegeQFcBvE8b529oAZgawMeH5xwLYL4AjA3gpgLIASgP4wm2T+H42B/BEAPu6/dlX3TCM5sncuXMDIMiF4CIIcty5PRkzZswIgKAHBF3d+Fw3fu7cuTXGL1myJACCnm7MjBkzaowBgpEQ3O5OoKdBMAKC4yB4AIL/1X8BCQIICiCYCsE/IdgfgisgWAfBBgiqI7fjIMhz+yuDoAKCcgg2Q7AegpUQLIGgJLJNAEGVm+t1CD6BYCMElRA8CsEECP6acDzl7rX9Ifi7m3eduxW7/VW7+2K3v8sgGArBze75byCY525+3uUQFEKwEIILIBgFwRz32r3uswSC2bNnx33GORD8BYIVkbmq3Hv4xv0dPf5iCG6AYDcIznavvwfBUxB8kOTz/437v33iPlv/Perh7kdB8CcIprj/1ZMQ/My9hx9CkAHBrhA87/blP/vnITjDfZYb3edaDsF97v/4G/ceqtxneDsEe0Fwofs/rnPzVEEwH4JXIJiVcOx+X89CcAoEj0Fwp5t/DHX/Jmpj+vTpARAMhuAPkf9R4N7T4RD0TZgXCAa570H0GKdBcCQE/dz46dOnN+pYWjqNsYMtx6IFE0TK0u6zzwi0lvE18lqsROsXd1PToZmMrqiux0q0pvBnVCPi9yiisQ3S6z9FuRxtUb+L8cjRmUnNErhRMlAux5+Qs/xOtE6VSJabO5prcReKLD0crYMcgdaL/Nc3WRJWOnI0v4K8L92tPK1hGM0O76mor8wryFORm5vLSLSSPQCFk/gynMOGDaOwsHDLeO+pGInKko4CcnNzmTlzZty8QRBsics/EK3kHoU8A0+6+4YU98xBZ+O5KNzla1TVxxcmB60CFxKGP/lwp4Awp8FXZyp2+4awAHkFWh1fi/ITAjfnCSgE6Ro3vgKtsj+EfOUPuvfShjDMyld88h6Mx5CHJnDb3UPY8dtXfwKtvmeiFfY30Er/EOSLv44w/CnqqRiJ/q9nu/2ud/sJ0FXTF2L3lLjb8Sgo+UO3bX90xY02/gOFZb2DPAh5yPvkS9V+A/z3v/9lJip/+xX6f76KvkcXuM+ov5t3Ksr7CJAn5RlCT0UqukKnIGtgf+S5uMq9p0cIPRXnu/F+fTtA39khyFs1O/K8ZwLy2tyHqkltS0/FsMjrR6HA8ZHIo+PzNKOeimjG5jjk1ctFcSKWc1EH21nktAqaq8eiPgC3Wr+vW73fXM+C06YAHgxgr0Aej/PcNh8H8lr8JoDFzkuwPKjpnUh282P8PqoC+HMAQwN5WH4Xea00kIflzQBecY+fDGBMAHcmmbsuj0Xi7V9uf/aVNwyjeRD1VNybcNKKrjgHQeipGIlW09e6FeN9IPiV2z7qufCeipEQ3OHmvButWkPDPBcPuZXno9xqdUE9J9oX0Ar479z9C24VuhStvr8LwUAILib0VFSj1e4ytBpe7MaXufe5vzueSrTyvQR5HmZB8AbyGlS6ea6GYBwEb0HwtFthHgHBoWg1PifyWQTIO+I9F/ei1foxaCU7z43/dy3v9d7IZzmceM9Roqci+plWQ7Ca0CPjPRZRb0Wxe32Ne7wGguuR5+J0CJa6bf14/386xX2m/viix9OhQ4egu3tf2RAcDcGr7vM7EILR7hhLkLfgTAiuI95T4f9f3ptRiTwX57r3eCqhp+JLd5yL3fsJEt5rkdv/LOK9UtUQzIbgcve59qF+TwUQ5Ofnb3lcl6ci8eY9Fz0gGEBNT0XibUf1XDTGDrYciwbQ3HIsGoNWvPyaQWI+Rju0FuDzEl5BEYTFaH2oi9tmOVp/egqtQXVDazR1eQCi5WITozVLkdejyM29rzvGlW58NVqHiyEvTC7K0RiUsI81KEJyJGGfjdqoRvkhdwIllnNhGEaTkuipSFaR6Q7CWHnQme4ctPra2T13vxu3F8qJuA75lv34XyGfreceFFM/E+JyLvyq+tkoKTcLrcpPRp6M7m6/o5FnIpEX0Up3N5QbMJEwwTtAMf3laIV8JYrlP5XwbO89F96L8DCqhFSEEnrPRavZ7ZGHoQ1aiV/kXv8ceSX6oyvb+24/fdFVbU4tn0kpytG4GV3lxqPV983ufSTLP7mP8HP+6quv2GWXXbbkxiTzVCRuHyCPy0bkPelIeKUsRZ6KjugKjPs81rnjfAD9n69G/6Nn0f9nOPrftk04Pn888+fPJzs7e0tn7++h//Ul7ljOR9W/yt3tJWQRHIYyItMJPUuJOS/lqELWS+7xFPdZdnb/g0qUV+FLBvs8nXz3+n6Exew3ogpWVe4934T+f6sg6XU7FottyTPKz89n9OjRpKSksHMQMA79XvatsVX4fwjccd+NvtsnEO+pSKQa/SYfRd+9lY0oXduSseTtbUxLFhae0KXeEZ16opWUfNft+EpK4TY7odOuLyw4hPhGeEn36O6rI49nI0fpJyh9aj1hhfJUFIo1Gl3WeqGUuzno9LOWsJJUbzfnV0hcjKJhlZNLUDjVJwRBeX2DDcMwtgt1iYpKlDjtjaFbUZWcnsi4jooKz7/Qss/e6Gx4LmH50V9Qk0RxkZubS44bfxZazqlw+2xD/eKiBCUFV6Az+UTiq0aBDLiZ7nYTCpG6AIWppBA21QqQULoJVWHqgs70fZGReKo7pvZu/CJ0VXkTlV6NIaOvFC1FlaKrSBAELFu2jF122SVOXNyDjPDFKDj3TUJhFgRBDXGQKCp699b1KD09nfz8/HpFRfTziIqLzUgwlSAjOyoq/Oe6AYknLy72RAbxKOoWFR4vLvqiq/omZAmcj5oTggz6MndfTBjk7Ev4Ji4TVrl5UpAQvN8d/73o+7KJUKSCkr6nIjGxi7t5Q76asITLLBSg/QpKbq9yz0dNVv8ZH4ZKxMwgFBd1hTX5z9/vL8V9tqmE36tkVCEBfRu1J363Vix5exvTUkOhamPQoEFeqAdAMGjQoFrHasyYAL4J4PNASd6JIU7JQqCCyLiiAE4NIDeA7wdwRQA3B/BsAB8G8FkAUwL4P/d6bgAnBvBGoKTwskAJ20cGMDaAp928BQHMDaDSPW5IfuETAexUr2vVMAxje9GuXbugFwqfiZ6gKlAS8gAIjnWPfwHBHhDcgsKfEk9q/4ZgPAQnujCULi4s5TYU4lPbyTAaFjUChdqsdvtIDLUJqD8s6l4UtvSHWvZXAMHjbr+PQvA9FJaTGAJ2BwrhyXY3UEhMP/e+biEMmwkguMcd/84Q/NR9dv7a1p2aYTRfffVVgJvroshnAGGCe2J4S3S8Dy/66quvav3/tm3bNun/N/Hmw6Kuh+AAd/8NCi8KUDiYDwvz26xBoW8DUKhPZ/eZ+P9BsnCsKPPmzQuAID3yPal02/uQtGgi/WZ3DAvd3z4Myh9/lXt9HUqsv90dwz4QfIzCoHyo04sQ/ByFc+2FQt02ER9W5Y9lNgR/dP/biyC4lfjQQP8/8SFm0ffuw6IgeSJ2dH8VCfut7VaJEsuPIEz43lHCoILAkreNeigqKiIIgi23oqKiOkZ3ROsBa9F6SucG7iVw9w+jNYNvUFrdZORs/KGbdzRKNZuIigVOc+NWo34aT6J1BJ+4fRjwF1QsrgJ5NnzYVUPckWFjPcMwjO+a+fPnk5KSwnKUPOubmFWipNXFqAnbfFS+8wOUUH06OhtHuRY1HtsPrXwPQGFBp7mbbxCXjDNRaNEQ1PDtp2iFupj4FWbPSYQ9AL5EPmffbWMRCn35GQoHejZh201u/GZ3XGXIO5CCelnc78bdiXzj7ZF/+0zkGZmDVu4L0Jn/Tjf+PuR5mY1Cal5zxxIE6pS8MharsaLcu3dvvvrqKwqQZ2Mm8NlnnxEEAd/EYklLiQZBsGV8oqciGfn5+SxHq+j31TpKV64n0XfgA/e+nnbvdTOhpyLa1WkqShj/Gq3gr0NerYup3VMRZcgQpaF3QQnZP3X7qyQMSWvr9unDzh5G//973DEFkZvvR4G7P8PNuwwlk69yr3lPRXfkRRqDPHMvou+D9xz4PiSLgI9QXMP7yFsVLWqQ6A06nbCIQV5eHtOnTycIAhYgy8En8XtLwe/PJ6L72I1keE/F7W6Obm4/o0eP5oEHHqhlqx2Y7ShwWg2tzWPRUHJycgLoHMBDAeQHsMSJ94Z4LKoDuDaA0QFcGSjRek0QlrStDqAiUFnZZGVq17vtcgP4W8Jrd7p5b0w4nsrIMdV1+10AHZv64zUMYwfDrxb3jNhmfhV+Akoevs+tqN6BEmq7uFXbf7gVY7+a/W8IJkLwhHs8FYJjkIfjSbfi61e8k3kunkfJtje6VeVilGi7grAMa3T8PchzMsHt03ssNkLwDgTT3WO/Kj01YfsCt92lEOyHStKWuTnzUEJ3LlrJjm53UeTzmjVr1pbPLOo9mDVrVvDZZ58FjTFpvOfis88+a/A2ffr0qdNTEaWgoCCA5In5/hZN0G/fvn2Ae3wbNT0ViavyQFBYWBgEQRD3narNUxHFFwIYBcFd7n8fTc6O3rwHqXfku+o9F75Mb7E71jI332gIDoFgBkrQvsd9J36PSg3f7raf4r6DTxEmnQcQvOyef9h9FqMg2Ne9drb7jC6v5TONfkaTJ08OYrFYAKHnooCaHopq5KFbhzwz0UT6qKdiAErcDtx77OX2c//99zf4O9RSaYwdbMKiAeyowkInr+4B/C9QGNSqyO/Xi4HahMWDzvi/wz0uC2B2AF9Ftql0wsKHMiWb53YnLu6OPLc5UI+OfYOaYqchIVEPBdApGD58eFN/xIZh7CB4UZHYqwII2jtDxYuKSne72Rlzg5xx5cXF/xEvKqrd80+hfgSXu79rExdeVNwTMQy/csZVGWEIjBcXtYkKv+8FSFwsoW5xETWkfRiLFxf9qCkqEitdeaKG9KxZs5rwv1o3dYmLxKpfHi8ubq/jsygoKKixr7S0tAaJCk+iuPD/+2SiYm9UgWssobjYQLyoCAjD63aG4Bn3vO9pcQrxosJ/b+6E4GRkvFcTLyq8gX+7m3cUqlL1bwgmu+9sbeLCf7ZRET8QVSxL/P766mBPojCtRwgrWCUTFYeh32tU3LZ2cdEiQqFeeuklrrzySo4++mh69+69pYbw0qVLt2re8vJy/v73v5OXl0fHjh3p0qULBx10EE899dQ2OvIdDZ+kHUO1NhIJqJnEvQjVTZiE0uNiyHHbBujhtiHJdsn4OQoQ8PVBvMP2QuTM9VXL/TH6c0hdqOfnnDlz6hlnGIax9fik2cReFb5P9iAU0nECYUBnCeob8EsUepGCEnePRz0Njke19QLCUJajUajRXNSRugBYgMJpfFjUCyjR+3tue1BoSqa7pblbhZv3XnT27Y3CYQYQn7gdc8/1dftaiiorTUThIz4s6kVUtuNAt73vm3Ef8B7wT/e+PPcRhvYkdhUPgoDVqanMmjWLESPqquHTtOTk5FBQUMAM9F7uc89Hq30FCaFaQRAwC302Pkwu+lkUFBSQk1OzLldZWVmt4U/JGDVqFDNmzGAmCkd7gjDMCdQ96mYUFvc/lMh/Fbrq+iRtCEO17kVhaRtRMYGn3bwfoI5X64FD0BU9BYV9nYTC5I5AlaSuQ71EDgN+QliSxffkKEbd1vuhoOyetby3k90xjXLz+E+rFIXLTXf7DVDSdjUKW3sVBWa/hsKn1rn3MQdZLy+44/ga9cC4ljD86rTTTrOwKM/2Vjm10alTpy0qMnpbsmTJt55zw4YNwX777RcAQefOnYNJkyYFhx12WNC2bdsACH79619/q3nNY/FGoH4RGyOLAtFwqMTbqQEcHij8KQjUH6MgUCJ2dFxDPBY+LGpcAD8NYEMQ9uN4KVBi+Xz3uNK9XhqZN9mCRn6gDuXmsDMMY/sS9VREV63LIBjmVmHvQHX9F6Nk1jVo9d/X/7+HmonZvt+DT66Nhs08i5K/Ez0XU9zKsfdUVLhV31VJTpRlbjU56qm4nbA/ReL4ZJ6LG5Dn4hG33RRq91zUtjqfrJt4SyPquYh6q+rCfzbRVfFknoqtJeq5uAet1HtPxf6R/0kVWun3notc938rJfRUpES+5w+jZPp7CEOM/HfU///bEoZ+PYo8Fw8Rhvv522SUDH48KgDwAjW7lPtbBUqEHxX5Xt0MwRAIdnG/Cd87w3sqprjv6YsoJGwaoefieUKPRXfkqYj2QlmJPD6+CMCnn366zf9HzYEW4bGYNGkSf/3rX5k2bRorVqzYJnNefvnl/O9//yM3N5d58+YxZcoUXnnlFT744AMyMjL497//zQsvvLBN9rUjoFWRcmCheyaaHO29DYnegdlorex81DsVpPvbUjMdsKGei45o3S4frcWlu+cPRgXp7kPejLnIW/KlO+a5KNVwOUqF82xOeC+GYRjbnkRPhS8rWwVchs5E49Dqfsw9Xo5WVrMIOxaf4bYdiTwSQ9w474VoQ3yC7wS0SjwTnXlnoxXmF4CDkKciy22X4eYoTTj2R1Ap016oJOwa5HEYgpJwX0wY7z0XPZDn4hG3/51RsvERyIcNWoH2ibjec5FsdT7RU9FSiXouHqVhXaSDID5hvDZPxdaSm5vLMPQduBH4DaGn4r+RcSmoV4b3XHQiLIV8PeqGVYK8bze6+f6N4g02EJbP9d6aJei7NM/trxj4B3Acsgx8QvjD7vUh6PvTEX2XkhmvlcgqWIssEJ/YvYfbNht53oYT76mYhn6DI1BC+TDkEXwdeVp8+eZ05DHxJZyfRQXwO6LSybnA7rvvzmeffZbk6HYcmkxY3HvvvVx22WWMGzeOnXfeeavnW7t2LbfddhsAt912G926ddvy2u67785vf/tbAP7yl79s9b52FAoKCpBzdB76qW9OGJFMXDyCHPc/iDy3AYmKxK9bNWEIU30cii5Rj0W2BTn0P0Uipje6tA1y973RT349utR9iYRSIdA4t7FhGEZjqEtUXIvORnsAb6OwiwwkDiqRGIhWil+L+kI8hColpaHeDlWEDcqinXmmoLPiOBROVYbq8fUDTonMHUNn5gxkFHpxscrtqxIZZF8hUTEetRkdT3JxMdvdPkXhMlWoHuB4QlHh8eEtPQnFRbSyUWsRFR4vLla1bVuvqPAEQcCqtm23m6jw1ZUuIhShT6Hvw3+TjUdGeQoShh3Rd60MhRy96Lbvj8TDy+i71g59v+4mFBW7oF4a5ej7fRMy1H2r2yr0HfSi4mi+naj4EC03HosqS32BKnElExVLkABajITEj92YYlTxbBSyJC5zx/qCe+4NFJpl4kK0mnKzL730EuXl5fTr14/999+/xusnnngiAB988AHLli37rg+vBbMBRRz6NjhRooLAnyg/QZGU0dNDGcnbzlQT9vKsjzQ37xeE63QB+lmvQqevzm4/6e6+M1pvG4JOY+XotJAPVDJ79uwG7NcwDKPx5OXl0QstiZyKlje8qPgQxZf/AJ1hH0JGWWd0ptuEDKTA3ZeiFeJO6AxWicq7ZqAzaDVa9ilHht6jyMg6F5WjTXf7+wp4PuE4k4mLLMIY99tQ/Hw7VCQcd58oLmYh4/S/brz3VycTFSDPxOsoXn3+/Plxq/OtTVR4cnJyqKioaNQ2FRUV21VU+JKtaSiX4jzU7HBusm3c8yvR9/kclM/wZ1Ty2DezewSt7PtSwm2BZ5BIWEgoKrx4+T83740otyMNie2bUe7O1oqKgchbcRwS2tNQ2eJEUTEICYMBxIuLN9Fv8Tz0u3sIeWkmuM/vx+69d3LH0ROJix2VtvUPaRl8/vnnAOyxxx5JXx80aBBdu3ZlzZo1fPHFF3XWoDYS+RIJhgMIqz97fMI06FSznPjUPn8SjTrqQZfYgLBqdX3E3LyPI6d8ZyRehrh5lqLTQDJS0E8+A50+XsN3GTcMw9gebNiwgVgsxivIOBqPjP7PkJH/DVrp/AdKcL3bbTcJGUhr0fJJBTp7VSFRUYyMm76ESzu+U/IUtBp7KKGoWIUMpx8ig26q22Zi5Fi9uMDttz3ybFQTJhmnIDHgPS9eZExFZ98uaNnnVSQwKlESbm2iwnsm5s+fz+DBgwmCgPbt2zM3P79ViormRG1dwdOAS5Dgm+meG4YEZxbyRhWgq+8EZBmciAzvtsBeyFivQiFMB6H/cz666s5GfSyiosJ/FyphS9J6IUro70X9ogIUmlWEQvxqExWd3fPj0ff6OeJFxQAU4wBhp6tF7v3nueP/K+qt8Q3qct8dWTCHufHXo+/019Qf6taaaTXCYuFC5QH061d787M+ffqwZs2aLWON+gmCgFgsDa07jEb5El39q+7eX94WuPtBkRl8yFIK8Z6JSsKKUwHxVZ1qw8+7CJ3CIMy3KK8xuiZt0GmxBOhKLEnzJMMwjG3FlClTOPbYY7kHeSE+APYhFBUnA0e52xVIXMSQ0b7WbbMTtYuKGOGSjfdUDEeekKio8EGfR7r7qe5+YuRY/Vm8ElX2ibnjqkBeC1/ZCOLFxWtIfIxBBuQM1IR18ODB3IeWc6LGazJR4dm0KdErbmxrYrEYvZAxfFaS11OQMPXi4h1kTO+Jvoud0PcrQEZ7FVqlf9Nt3wuJyxXIi+abF37tXk9H33uI/y74pn6xWIxitIz5oJunJ7WLihXAW6gh3xsoh+PHSPR0R2FZnd1Yb2lMAPZHS5SLkaCIoba8We4Y+7ptHkMN+k5HHp3l7u/ebtuhSFT3Qb+BQred/w3siLSaUKj169cD0LFjYm/SkIyMDABKSkrqnKusrIySkpK4245NBVpLmIx+xlVJxsQIjfto2JP/ikWTpX1Ru1TihURdRn5AKCKirmTfKzPRI5KMUrQesi9Kh8wiFmtIfodhGEbjePrppzn22GPpjXy5LyGj7DEkAryoAJ09/4JKsd6Fwkm6oJW/TdQuKjzPI09FNjAWGYIriBcVniMJS8FOjTxf4vbRCXkvNqOV4vMJE62TlU19B10dXicUFYMGDdqSfFxbYnaiqDC+G9599916u4J7cfEZEo2F7v5zwuzJGPp+xpBFcBD6vz6HBO1dSFQ8//zzfOMW8YIgYAk182miomIkMtwfdPP3pm5DtTv6LUULATyOficrkPFfTM3lyxIUGtgLWRabkYVQTGhVzESLARvdZ/ACEjyD3Jg+6PfiwwB3B36PwqmGDBlCUVFRHUfeemm0x+LSSy/lueeea/SO7r77bsaOHdvo7ZqCa665hquuuqqpD6PZIK9FDF26uqFK1H2SjPSGf3TVyedalCPB4ftQpEbGJDPuk3kv/M892k9jgRubeDyJc1ajGhWr0KmnB3K2fpJk34ZhGN8eLypykSH2HyQuZiLDZV9kfEB4pqpCserHoDCSo5DRMo+6RcUadGbeBa3Ufo58ysMI+2QkEvVcrEfhLCXorNwVnXkzCM/SfmX7RkJxMQPFlfsKR5mZmRTl5zNoUOix9teOm1AMvRcfJiqajrFjx/Luu+9ywAEH1PBARbkL5UXMRh6Dr9H3rh/h98GLi0okBJ5G3/GVyCu3aNEi+vfvT3V1uLDovxOr3ZyJoiIxPKshRL+fBSg343wUwuRN+2g41GYU/pSJrIkyZL10Rp5CkKB4AVVha4t+KwegEjLF7nPYC/W4mIl+Kz48cIb7DPLy8igtTay31vpptLBYtmwZc+cmS+upm+394WZmKkJ0w4baY+f9MWRlZdU6BuCyyy7j4osv3vK4pKSEvn371rFF6ycUF7ci4/wSFJUYxV9QFqCfNGitIR2JjQ7o8tmWcA0iUQDU5UHwoVZDIs/NQs7OjtTu8ahGp8kpaO0t2z1/EXCOhUQZhrHNiIoKXw3KN5krR/7W/6EE0JPR2cubXX9EwaY/R2EnfhV1Z+JzKjxVyDj6EUqY/Ttaof0JYQnb2jgShXj8HzKmTkMG0xokLlITxp+FxMEaQsMpGkvuowYSSTQkTVQ0PfWJC18SdjYSB7vtthvBmjVbRCLEiwufzDwTeUQmTJjAos8/p3///kn3HwQB6enpzM7P32pR4UkmLs5Gv6NEceHLu3yJfo9dCZc8uyCBNA2J/Alu20EoCNuLit2RqChA1owXFXcgwf01EOyAogK+RSjU5MmTt7i0GnM7/PDDt8fxb2HAgAEALF68uNYxvqu3H1sb6enpZGVlxd0MfwFZhdYmfoJCo6IlaDORY3FWdCuU27AW/XRTqZmwHaPmOlwyZrn5O7nHZSi6cq9at9Bl+WpUA+JEVDTOczRhmpZhGMbWkUxUgAIvz0EBm22Qd2AKMsi+dGP+iMKKfo6qSIEMoH7ImP8qsh9f59+X0jgaha7MRQnUDwIPRMYm414UQlWFzuRPoTPrBre/RKJVnN5++23Wd+zYqLKp6zt2NFHRjPDior6u4P3792fNGn0jagtvu55QVIwdO5Y1a9bUKio8vlP4thAVnsT+KLcT/r4WIO/fOuStWImsg2Xod1mNRMN/UL5INlrCXIeKEywiFBV7I0EyH/2WfQf7ujqq70i0muTtMWPGAPDJJ8lDWxYsWLDlx7Hbbrt9Z8fV2gg9F58CF6JTypEobWkw0vWvostjFXK0V7i/ywnDmBqb21COUrPy0E89C/3816CfdfRHHEOi43V0ClyFTjdRUYE7lnGE6VaGYRjfjtmzZ3PssccyChlJieElZ6Cz5jtoWaYEGfMBMnI+JxQV0bIY3jzz/to+yAjyoqISGT2DgZ+6OVeiFVvQWc/Hl3vuRaVAO6E4+nMIDcUfIQMKwjId0dyIJUuW0KdPn0ZHIeyIISHNnUTPRWJ4WzJqC2/zoqIxNFZUrEA5FXWR6Ll4AVVeW4w8FxsIyyr3QhbLp8gj2BPliIxGfS9movyKEnfvRUUMeTyykaXxCLJ0TFSIViMsxo8fT1paGosXL+a9996r0cvikUceAWCfffaxUrNbif/RSGCsQ7o9HXksqtFP9mm0hpaBRMBKdFrwldfr3UvC49fdHEehtbuF6HTRE/2k56Kf+AL0037DjR+NxEU2yRkJtGX33Xfn008/bcBxGYZhxDN79mxycnLIRbHd49FZKTMy5glk1PwSZXi1RWexm5DB8ktU4jXxzJdMXPR0915ULEIhH+e51+5AK7HJxEVUVLyFzsZ3om7CycTFs9QUFUbrISouEsPbaiMxvO3bioq6qlMlcgdh+GB9489y43OQ2N4P/VaWEDaW7IXyn15GlsMqJFq+h7wWM1AeVD4SFQMJRYX/LeWgKlGvot/3IkxUQAusCnXIIYcwfPhwnnnmmbjnu3TpwjnnnAPAueeey+rVq7e89tlnn/H3v/8dgN///vff3cG2cvQDSkhArQAA2gdJREFUqkY/s25IUKxFl9QH0aXL1zbpjn7OK6gZ9lTfD9FXc8pDTfIGojWCOWgN4mSUtngy8Cd0WtgDOfgfpnZRAUpvTN+hu2QahrF1RJvh/QJlfJWgMyHI6HgJ9XQ4C1VrApW37IEERbJ+D9Eg0f5oiWYBMmYqkTBZhERFtnv9DLeP3sifezNhj4xkosJzJzrD3kEYFvU4KnlhoqJ148VFY/ovBEHwrUUFQH5+fr3VqTw+xOg94sOwauMQFONwDsqz2IB+L0OQODiAUFRsAHZD1dPuQknaWShMMB/FNBxBTVERQ80u/4B+Lw0peL+j0GQei6uvvpoXX3yxxvMTJkwgLU2lQ8eMGcOtt94a93pRURFffvklxcXFNbb961//ykcffcT7779PdnY2Bx98MBs2bOCNN96goqKCiy++mKOOOqrGdsa3J+xz0QW1pNmIErxvQB6FfxCKiJ7okpiK0hGjlZ+izv/EErTXovWEm5GQeQitEfwd/dxnI43ci/jaDw2hHS1QXxuG8R0xb9488vLy2LhxY61jysvLicVivIqMnoPQGa4EGSpvIOHgE0GHoZ4TC1A3YB8a1ZGaZS382fBBtEziKzp5H3BXQtHhtzkD+XBfRPHjN6NlmNdRSMhfSe43jnou5qIeFT7x1URF62bs2LGNXm3fmtX50aNHk5+fT15eXp3VqRLzFpIlkEc5BFWkOht58GKobEwf5K3IQlf9F1CG6Ei0vOgF9O4oF6MnYVf5ieh3mCgq3kYxEf9x82aCFYOhCYVFUVERH374YY3nfQdtgHbt2tV4vS46dOjA22+/zbXXXsvDDz/MSy+9RFpaGvvuuy/nn38+xx133FYft5GMChSN+Gfgn6gPZj66FPZGaYwg8VGJLokVaK2uDbX3sggIqzmdj37qV6Mwq5+ipO1NKL/j2yZhbya+x4ZhGIaYN28eQ4cOpSf1Gwze6LkCeRCOQZ6JN1Fo0YTI2LuRETMG5VwMRqUlEpc4/N4ecNt0QaujoDCNbsigKUXGkg+9ehgZOwegJZ5DkddkDVr6ucXtK5lh5sXF0278ZqCw0PLQjG1PfeIiWTJ0XeIimajA3fu4ha+QtRIjXlS8hwT+0cib9xSKs3gRWSvHRua6FjWgPAn9hvujGIr2aNEgeq746KOP2Hvvvbc8bt++Pfmuu3ybNm3Iz89n1KhRcZ9LLBbjww8/ZK+96ipM03yJBTu6tGoAJSUldOrUieLiYqsQVQvKt+gO/I5QSOyH1rtOQTVP/GVvLfr5t3HbZFIzmXsD+rlPQZfoXOTIX+Xm+x7Sxf2Jj2RuLJOB8xkzZrDlWBiGsQUvKnKRYV5fUivoPOg7X7+Plj1OAy5AhnwbJBBeQUbMamSkdHJjjqdmoKgXFQeiJnq/R2LkNOTtWIeM/zQkNJ5DHopd3f164B4kMJYQGmZ1Jc3eh7wa85GoyM6uK5zUMLaO6dOnk5eXF1dNrb4KS4nf4dpERZQAhT+tQcuU3ycUFccjUZHhxj6PxEVXVFnqUBRadR3wX+AE1I18CfKIvIbExSbkfVwJfPjhh+y9995b+oCA4iqWJ/w9Y8aMLeLC554sd9s3F3HRGDvYhEUDMGHRMEJxERUSE9FlcAQ6ZUxAid7l6KezAQkE36omhhyMtyCn/TB3v46wY3Y/dFrwZWe3hsuAmwgCq1hiGDsqRUVFcc2soqKiNkMnIyOD/Px8Bg8ezJw5cxgxYsQWQ8fHb9+DVjuvQbHaDxCKijXIGDkOdcp+HziXeHGRKCr8xfpy5JH4OQqxWovEhY9X35NQVNzvjn0O8QZabeIiWgHKRIXxXREVF40R8iOR1+AN5Bm8ntprTs5CQuBVJCYGIg9eoqjw+I72ZcgrOAqFSUVFRRf0+2uPfs+TUXC2x7+fp1AQ+DHIC7IReUHeRL+1GTNmkJuby0iU0O6rbTUXcWHCYhtjwqLhSFxkISejFxJvoEtzFfIwHEbohGyL1u0K3e0dVDV6M1rjq0biYzjwMTV/+lvDZuRV+XyHj4k0jB2VoqIihgwZsmVVsbCwsIao8ETFhV9VfOGFFzjqqKPIIayjD8qfeBkJgwrgUtTJeG/CYtknIIOmDAWSfoBWRY9HouIOFPB5E6GoqEDLMv+HVk5/hsTFZLQc8w0yro6idlHhSRQXJiqMpsSLC/9bbMh1ORaL0R3lG40CLiZ5rsYsFAblf5cvomXLc1HZl0TLwudUPIiaW6aiEMeTCEXFINTxfgkqpNAOCYLJKEcpxx1PR/Rb3A1ZPoVoMeEkZIXcgH5zOej8kfhbbA7iwoTFNsaEReOQuEhFl89+aK1uJPpJ34e8DyDnvRcPZSj/IuycPmDAABYuXOjm2wn9vLdlnsyT6DK+2oSFYeyAeFHhVxWnodXGZKLCczaKr94HGQczkUFwHqoe0wFljwXIoH8LnfU2otCLd93rv0CJ2FWEjfMuR12593Pb5aBVzVy0JONFRarb5gq0FDMWGTRF6Ow6BtgfraDWJio8XlxEV0lNVBhNhRcXjbkmx2IxJk+ezEknnZT0txsVFa8gq+RJ9J0/HsVTRLM0vaj4ClVdm4V+G8eh39YylBe1S2T8Yje2I1oIeAeJhN7odzgJeUgK0HJqe+AxdwzXoxCqc5DQ8R6X5iQuTFhsY0xYfDskCEA/tbYoBMoLiY1ITFQAMHz4cGbPnp1kFj9XGlonmMbW5VR4SoDDgU8IAisUZxg7GlFRcREK4LwSeRUOIuwBEeVxtNq5L8pn+ACdkfZHtfBKCMMi1iLjv6sbcx0yPtKRAPmdmyNwz6WiM+N5SHzsiVY4H3fjT0SCxIsKf3b9JfJ+7IHq4T3h9t0JreI2dNU36rExUWG0RGbMmMHo0aPjxEWiqDgAGfkHIWHwUyQsBiJxkSgq+iKrZSaq9LaR0FOREhkPirW4B4mWA93Ni4rD3Jj57pgy3HZ/QvkYv0UezAD9dpubuGiMHWx1No3tRhAE7lZKnz6Z6DL3NbCCPn06EwTlW8bUJSpEtPLU1lKNqksV4IWNYRg7Domi4nTCUIozUG7C4wnbeFExCeU2tEN9H05AvtYpyEfbxW3fFomKLm5MCjIW1iKvxMPIoGiHzkIVKEl0Ikqc3g24HZ31HkNFtpcRLyoWoZCnq5HX4i10hi2j4aICN664fXsTFUaLJjc3l+nTpzMDGeN3ojClmYSi4iIkFi5CpWCmIC/lQiT8k4mKhWixYDkSFN1RCFM58XUs30Xhib687Rvo93ww+k1WIxGTQ5jgfbh73B0tMMRQc0o/7+nuWHOBvffem48++mirP6ftTavpvG00b5YsWbJV2/tKJjVL2DaWanTKeRAosRAow9jBSCYqQDkTOyGPQEfU0A4UKhEVFUehROsKVMIy091eceMPQQKjjPACeyRK/jweJY0uQp6FFLRi2gsZLwtRyNOH6Ay1DoVcfIn6XoDiwQe7OeYir8dSFH4xA4WS5ObmNvpzqatPh2G0FLy4GD16NDchb+J/kUfPWw0pqIA9yNP4FGE52WJUWMGLikWE+VS7oIWAUnSOqHTbtEX9ap5Agd8jUJ+MSUhUVLoxVei8kI2Ew3x0TuiGvJrfQ6Kk3I3zDRdOR7/tlRBXura5YsLCaDGE4uJvaO0uWsK2IZSgtb0HgRXN/sdpGMa2pTZR4TkUxVKD8hxeQsb7AuJFxUqUNZbtXve8jIyCSciYKAV+jFZNz0JhS4+gMKsKZIiAhMdKJGzeRYmehyID5Q5UZWo5obiY4MZ3A74gTP78tqLCMFoTUXGxCnnxUlBYkf/NR8XFDUhcHIe8B32R58AL/YHIU+h/6z2QdyEd/Y5fQuFPg9zYIcC/CL2Rae5WTujlGOrmfhOYjoo4ZCJPCISiAnQOeIWGJ7Q3NRYKZbQo9KNagZyc4wiLwdXFZjfucLediQrD2BHJy8ujB0q8TpaYDTLos1A89X7IuDgUGf8fEC8qQJVlhiNjpAsKR1qPVjR/RryoAOVKnI8MjbZIXDyPjIxPUGWn3YG/o14VZxHmZnhx8QwmKgyjLry4+CYW44svvtgSHnVfZEwK+h1uRkUOnkJhUG2Qp6In8lIEKNahEnkzNqDfeBkKd3ocCYoL0G96JvrNVhDmRFW7eVOR56IalYx+Hi0cHOyOI0C5Up76+nk0R8xjYbQ4Qs/FJ6iOgq8S70vYtsP1i0V5FK+gn3kJUNFifpyGYWxb8vPzGTJkCK8AZ6JEyypkRAyOjIt6LnJRZ52lSECMRMmVVWhVcw066/wHGRnZyPMwEZ11znU3zyrk0ShHORSgsIl1qL7+ENjSVRgkLmYiQbHe3aYgb8gbmKgwjNrIzc2luroagC+++IJdd901rsP3Hei3tsw9V46ERwZaMBjgnvvSjRmNxIIP7C5FoqIjynEC+KF7bhYSLjlIRHiroy06b7yIfvfHIOtlsxsX7c7VEkUFmLAwWij+RyaB4ftgJFaeii9h25J+mIZhbFuiYVB7oZCmM5BQeAetGF4YGX8oqhL1BUqk/hB5MkChD+Pc368QxmAfhLrt7I6Exznu5nnO3Q5G4gIkLlYjn+ooFAYV5V63j2XAlClTmDRpErFYjLUoNMJEhWHUT15eXpy4mEF8E75YLEY3lJORjX7TPpH7K7Tw0Csy31IUEnU4EghFKJSpBGWBliAvSAqhdxMkKl5323wPVY5qS7gcWozExV20TFEBFgpltHCilafGjBlMtPLUmDGDCYLSLWMMw9gxScytuJswkfpBVD72XVRP3nMHMhaOQeFLJ6LSkbciMfKQG9MbeQ52Bv6NPCEZqHKUFxUBEhTPu23fdNsch8KtNqOk7EcIvRIgUXEd8lh4UQE6730Ti5moMIxG4MXFDOQBjIoK39L3Nyi3CeSR+Ap5IHsjURCgsrR9UDPKsagAwydoEaIYeTqOQqGRs1C3btz201Ci9zFuzAa0DOpzNlLQYkNLFRVgHgujFfHpp5829SEYhtHMSJawXU0YfvARKgt7AmFydHtUSeZwtILZBuVA7Ic8He1Q6cp30EX0DJR/8SJwKmqEB8rH2BkJiudRguYRKLTqSRT69A5a3RxI6BFZj0rM3khNUeHxIR6GYTScqOciKip893kI2/RmIhGxBAmM/oT5Fv2R8FiCzhOgHI0sdG4AnQemImHhG1xORaGT3yfMxfJ5FzEUEvkILVdUgHksDMMwjFZKbaLiY+TXPB0lWL+CwpFOQAb9FEJR0RkFVBajakwnIW9De+TFmOvGTEDhDS8iY6QzMlAmEy8qUlGy5t5IvKxEK5e3Iw9FlpujLlFhGMa3x3f2rk1UVBF6EIagwgwLUa5FKjKcv0QiYIB77mjgR8gbORkJkP+6+2x0vshCuVezgaeRZzMDnV8q3X47o/NKDtEmwy0L81gYhmEYrY76REUOuuAPQRf0Z1By9k7I6B+CLvJrURL2UGQE7Ik8GL5D94/d30eh8IZn3f6PRGEPryFjwosKUOx1JjCeMDcDJC5mI0+GiQrD2H40RFR4hiARsdA9DpCnYgAKifJMcPdPIeExEoVVDY+MORRlf76AQqWOdPusQh6NFFTsoQ1aXIjFYi3Oc2HCwjAMw2h15OXl0QtdyGsTFQEyGA5EF/JpyIMxAomJhShMaQAyAjagVccjUP7FG6ibziS0Ank0obgodLcfIeFQiYTFHBTm0N4d25coNOpgd9wvoNCs4cCxxx7b4owKw2judOjQgV7AYdQvKkCJ1A8h4VCKyscOR6LC/zp9+M8E1JfifdQsc1hknnIkHo50+5jq9nm4ez5aFcofV0sUFxYKZRiGYbQ6SktLWY5W/+9AAmEpqk0fFRUbUM7EeOCfyPMwEImJte5+FzcuDQmLaaiQ9XjkxdgfiZXnkbdjIhIVE5HYSEfCogCJii7u+cEo9noXJFL+jspdvoMqVLXkcAjDaK7k5+ezHFVnuo/6RcXNKFH7XrQ40R39hj0+7wJUavYTJBb2jrzuRYXva3E4EiG+7GxbZJCvi8x7BvArWt55wDwWhmEYRqvEx1H7vhC7odCjucg48KIiDRkU7VECZToSFDsDHdCFMgOtVk5BHbbHIWFRjQTFF8gDkYo8GmNRuJOfbxNK4qwiTO4ehLwVb6Aa+bu4G7TsFUvDaM5kZ2dTWFjI0KFDuRaJ/p9Su6gYhPKmpqK8iSHo/OHPDVVIPDyOCkAcApyChEJAKCyq0fnBP3cQYcU4LzY2I3ERQ+ebH7tjaUnnAfNYGIZhGK2WIAgoQI2wPke16GcAC4gXFWmEhkCVe9wRrTKWIWHxLjIc9kKiItU99wqKp94HGQMlhKICt30bZJC0Qd25QauV76MwqGJUBjfKWbTMFUvDaO54cTETGe2PoN+pJyoqrkILAMcBY9Diwnvot+w9FS+6OfZ12/wG9bsBnVPaoPNBGTqnFKIFhUGoCeeLqEpUqhuzHp2DSlGo5WHI29oSzgPmsTAMwzBaNd5zcSPqX5GNykdmIM+EX6n0K23eWEhzf1cgI+EFVEpyIOqYMwflU0xC3pAvE/abhVYqy5FhMRJddGcBf0MejKNQt94pqCIVKM/DY54Lw9g+RD0XN7jnTka9bbyoeAYtNPiy0T9BoY9PovPGUWhh4Ul0DliPijAciDwcvrT1OpQ7VYE8pnOQd+JR4DP0m89ACwxl6NzRAZ2DHkFFIL4GPvzww+3wSWxbTFgYhmEYrR5vQDyMmtftjxK026E8hygpyCDY4G7vohXFg1GYQymq/PIBMkQmoFXJEiQ4OrkxqSjMIoUwzGo48B9kLHQALnf7PBYJjRdRSES02sxZyBhZA2RmZrJ+/XoMw9h6EsVFAfIkDEaiAuRJ/Az9Xj9wry0D/gXMB/KRkOiJ8qRWogWMQYThU8vQ73pXJCrK0PngBfS7Xk5NUdHGvX4PEhUAa9as2eafwbbGQqEMwzCMVk9eXh7dUQjCFLRymIUMgy8J46A3ITGwEV3oNyEDozsSFVkojOp9lNxdilYpl6FeGN2QKGnv5om5W4U7jpdQcudBaCX016jC1PPIeBmHuvxGuQ8lmn6NEk8Nw9h2RMOiHgMWEZaOvQOJ/57AZcjw/x8SEmnoXPI9FGL5HPo9H4hCL0uQV+Mt5OV8HrgfnUOWAjegfKydgVvdvqOi4jE3z+7Iw5kLHHHEEUybNm17fAzbDBMWhmEYRqtn48aNrED5CkcgUdAWGf1FSCBUoAt6CRIVHZGgGI+8CS+48c8iz8NwVHWqwN12RuEMKYRVXjKQ56ICCY+pKBTreuAcFJJ1MjIyDgZ+QfyF+T7gOpQXUlRUxKBBg7bdh2IYBhCKi6/RQsF1wMVIVPQAzkcLAptQ+eofoLCo7shjMRWFRY1FeVH9gCvReeNMYD8URlmIFhc+ROeaj9H5pQR5Jp4kXlTkIdHxM9SPpyWICxMWhmEYRqvHN8T6FfB7tAI5D4U69XJj0pCXYTPyJnhvw4EoVOlNFAJ1GDII5ri/S9FK5gZgFTIavkQrjzE3byoKuzqKUKScgVY7v0TeCjBRYRhNRXZ2NkEQMGPGDGagXItUJCI+RrlR5yCh0AstEByMQiVXI0/lRchzsQ55Lfd0t00on+swNz5AyeDpaGHjVPTb/zvwO+JFRRt3fKcjT0p/JC7mzJmzXT6HrSUWWCZYvZSUlNCpUyeKi4vJyspq6sMxDMMwGkFil13fLG8xukgPQFWcKpA4yCDMmejo/t6IDIInUWhCCgqJ2A8ZCAHKg1jnxnYGRiGvhs/ZqEDC5Q0kLNqhajMz3HFGj9FEhWE0HTNnziQ3NxdQ0vVA1FvmR2gRIQ2FJi5B1eYmowWJY4E7UUjkmUh8lKFGeR3QOceLls1IRFSjc8II1GhzCRIgdxGKClD+1VSUx/URsCYtjbKyaC2r7Udj7GBL3jYMwzBaLYmiAhTSsBwlV3ZCF/4UtKqYgUSGx4uLjqic7EYUK+1DIV5GyZyHotyIDSgkahMw080xHAmPVOTBOMTt/yYUQjV16lSOOeaYLT035qKcChMVhtE0jBo1ijlz5jB8+HB6IZFwDPIwVKFFgt7I87kbEgD3o3ysbsC5yAvxNbDQbdcXhVWNQcb3PcAT6Bx0NMq72gWJjn4Jx+NFRSrKC/saCL4jUdFYTFgYhmEYrZJkouIjlGjdF61EdkHGvu++m4pWEFNQIiVIXLRF4uBQJDxeQrkax6NVxlIUptARCYL21BQXKWils8jt+xC3z4kTJxIEwZayuKuR4WCiwjCajry8PHqhRYTjUd4VaPFhI1qQqEK/81OQN/IpJChOROKjJzq/LHLb7ozONSeg0KoXgd8iUVGGQiU7AA8Af0Fhm9MIRcU7brvmHGxkwsIwDMNodWRkZNALhRRERcVyFJ7QDl3AO7nX3iTsSeErwpQTios16MLeCfWdWIbCF650455F4U+T3NiZqLzku4TiIgd5I2YiI+LfaNUy2qMiCAIyMzMpys83UWEYTcjmzZuJxWK8hhYGjiNeXKxGiw4d3N+7osWCoUhk+K7cPdw2C1Co5EAU6vgOqgI3ibAJZyYwEYmR+4HTCJt4tgRRASYsDMMwjFZIfn4+Q4YM4XV0Ef8hil3ug3IZlqNVx3Wor8QzKA76BXRhHImSqvuj8IRSdHHPRJWgZiLRkoU8FwuRuOiJQqVeQZ6Ps1ATrJnIKFmKVjJPQOETyXpUWJ8Kw2geeC/iTe6xFxdlyFORgs4TpcAQ9PuvRIsQaYQNMru7+8Xo3PA4qiD1Z+JFhe+rfQwqBHEDqmDXEZ0nmruoAKsKZRiGYbRCBg8ezPz585mBkqCnIFHxNarK1BN5LZ5Bhv+RKOzgKNQQ63HknVhE2KW7HAmPKWilcTwyIN5DK4m5SFw8jhI6b0J18E9AK5fzkag4njAp03pUGEbzJggCCtDv+UnkodiEjP1uyPDvhMRDVFTg7tPQ778X8lo8iSpF/ZHkogJ0rslGeV1VtBxRAeaxMAzDMFopXlwMGTKEG4ALUKWmWe71D1Ds8q7oQg8y+EtRxadRqFLLIvfaF8DDyFNxLMq5eBMZCnshL8hU5L1IXOk8C9WwH0y8qLDKT4bR/En8PZ9OKB4GIk/EElRhLq3m5qSgfIpXgJPc9ptJLipwc8xGixbLaTmiAkxYGIZhGK2YZOJiJMpt+BiVjzwIJVS/jTpqn4sMgalujhEoKfM95KnIRjkWs5GoGIPCFj5Cde3bEuZMJIoLj4kKw2hZ1PZ7HubulyJBMCCyTTnyUr6IPJkHA+e51yrRIkOiqABVi3oZeUEhPJ+0BExYGIZhGK2aqLi4HjWlewWFQg1AwuEd4F504Z8Y2XYqyo94FRkCu6BSkE8gr8ahSFS8hxreHYc69q6hdnFhosIwWib+9/wQytvq7p734mKBux9AKCo2ospxvQlFBdRugD8BPIK8qL9H4ZTRAg/NHRMWhmEYRqsnNzeXrujCvwpYiUTCTcjzkI9KQS5CCd2+tvzXaLXxICQsHgf2QAnf65E4mIFK2h6DxElizkRUXFiPCsNoufgS1icTigpPVFxUoTyuVJSHcRwSC7cQLy4SSRQVEHpGWoq4MGFhGIZhtHry8/MZOnQoQ1G37DtRB+0ClEA5GPWeOI6wBO0CVBXqIuTlmIE8GB8iwyKGQqoqkViZjDwRM93+Ro8evWX/1qPCMFo2yfriJDIMeSqK0PlhoHt+krt/xN1HxUU5CqF6AuVwZaOS1wFhmFRLEhcmLAzDMIxWT3Z2NoWFhQwdOpR0JAI+ReJiNsqpOJvQAFiAqjh1RzkZAZDnXit0fw9CguMd4DbkjVhKTVHhsR4VhtEyaYioAC0y7IS8mUuQV9QncycTF4uRl7QLyuPqAPwUJWyDCkhExcViVFyiOYsLExaGYRjGDkFUXFznnuuAqrLsj0QGhKKiB6oYVYFCGyAUF35FcpDbbiy66ANJRYXHelQYRsuioaIClIy9irCvRRo6T3gJEBUXpcC+6PyzEuV3TUOhksehBQwIxcWrwApgNPKypqenU1ZWtpXvbttjwsIwDMPYYYiKi/OQMLgYlYv1jepWEooKUJx0orgIkPgoduOPQTHVN9G8VxMNw2g4jREVoEpzpSi0sgdhX4tEcVGFwp42Ab9Gns4UN+5lNy4qLpYhj0Y6Kmf9DTC7mfa9MWFhGIZh7FBkZ2cDin8+H1VzqkYX/q9R9ZZdE7ZZi8IVqtzYHDd+OWp8lQfshgyDlhAHbRhG3bRr145eqG9NQ0TFP1DDzdOA76PFiHL3WlRcVKCGnIWostyXwFBUpc6HPUXFxWMo3LISeUULgdmzZzN8+PBv/+a2IyYsDMMwjB2K1NTULQbDmUgopCAxsZ4wedtzB/AQStg+I2H8IBTKkOLGnkXo+UhNTaWiomI7vhPDMLYX+fn5DB8+nNdRiejT6xj7D+AB5KV4GeiKeuRAvLiocLeXkHcjmzBkqn/CnC8D81Bo1dtAZ7Tw8eyzzzZbUQHhudAwDMMwdgjy8/NZDryBBENbdNFPIbmouBH1qbgJlZNt624xNz56Ib2PmuVmDcNoeQwbNow5c+YwA1V7u6+WcV5U5KHS1GNRmNNT6LySisTFZiQqnnev56EKddOQJ7QK9cjpBwxHXo/33euDgZ+gfItjjjmGwkIfJNX8MGFhGIZh7FDk5ORQUFDADOB64MFaxnlRMQtVdCpA4uJuknfLjTa+KygoICcnZ1sfumEY3yH1iYub0WLDLu61KhReORYlaEfFRZV77u/I+3Al6qztxUYMeUO9uMhzt51ReNU/UT5Yrjuu5iouTFgYhmEYOxxRcZHMYEgUFRAvLu5IGG+iwjBaJ7WJizsIy0x/jbwQMeSd+CU1xcUj6JyyCXk8XwZGoHLWFcjbkUIoLl5FjTuPAC5w25yO+uqMovmKCxMWhmEYxg5JbeIimajwJBMXJioMo3WTKC4uJjxHTJ8+fcvzjyIRUUkoLh4GfufGp6GmnEeiZpsvEYqLcuAFZJj/EXkx9kfnmtTIsZwCXEjzFRdNJixeeuklrrzySo4++mh69+5NLBYjFouxdOnSbz3ngAEDtsyT7LbPPvtsw3dgGIZhtHQSxUXUYKitqlNUXFyMiQrDaA0UFRWRkZFR6+tRcfEoOkd07NiR5cvVzm4ROic8TpiofT6qBPUoEhUfol4XxwDjSS4ufgs8h0TFjcRXWfJV6U6l+YqLJqsKdeKJJ1JcXLxd5j722GOTfjkGDx68XfZnGIZhtFy8uBg5ciQrUVhDfaVigyAgFoux2o03UWEYLZeioiKGDBlCT+ouFZ2aKt9BpXucuWED48aNYwQy9B8DbnCvnYiEwWxUPe4RFCrl++FMcPdT3f14lKz9BqGo8OPbIEHhK9K1QXkXM1HVqGHDhjWb8tZNJiwmTZpEdnY2Y8aMYcyYMXTv3n2bzf2vf/2LAQMGbLP5DMMwjNaNFxd5eXkEDSwRGwQBqampFOTnm6gwjBaKFxW5wKHAKyQXFwsWLGDw4MHkAgchAdAR6Iaa4p2BciDGIXExA3gWdcvuCDyJ+lJ8g7pngzwXMSQu5iOhcD4qbd0WCYl85JlIIRQVoCISrwHrIu+jOSygN5mwuPfee5tq14ZhGIZRg5ycnEb3nbA+FYbRcomKiouQMPA5VlFxERUVZ6OE6pGozGw/1LhuV+ALJEzGuddSUTWnucD9wBRgNXAAcCsSChPdsbwAHIjKygbIU3EeKnW9D0oU96LiDhR2leLmuQ4YMmQI8+fPb3JxYcnbhmEYhmEYxg5FMlEBanL5KyAHiYtkoqKzG/cLYC0SFzEkLmJAMfI2ZKHO2n8HSpB34XDgM+DcyLFMBI4H2rvXUpCoyAeOQgnf5yCxERUV0wkrReUicVFUVLRNPp9vS6vsvH3fffexZs0aKisr6d27NwceeCDf+973mvqwDMMwDMMwjCamNlHhOcvdXw9JRUUX9/pp7v5OQs9FhrsNRqFVbwFPIJFxLhIWQ4F73Jy3Ax8BG4G+KGfrJGChO47TUWWpm4FDUGhVWyQqPKej0KuVQF5eHqWlpd/iU9k2tEph8ec//7nGc3vuuSePPPIIQ4YMaYIjMgzDMAzDMJqa+kSF52eoB0Vb5JlIFBWe09z9nSiEKQMY6OY+HjgM6IBExc+Q12K82+YeFPp0Auq2PRSJjSJCUQHwU1Qx6jpgHsrpiHIHCsH6GgiaUFRAKwuFOvLII3nkkUeYP38+mzZtYuHChTz44IP069ePjz/+mIMOOogVK1bUO09ZWRklJSVxN8MwDMMwDKNlk5eXRy/kTahNVAAUoopL44CjSS4qPKch0VCKRMUFKFH7PGADqhh1BBIVnVHp2SOBnyMR8SISFZciz8M5SHCUu/mrkUA5F5WmHQKUudfq6rvTFMSCRh7FpZdeynPPPdfoHd19992MHTu29gOJxQBYsmQJffr0afT8dbFmzRp23313Fi1axAUXXMD1119f5/grr7ySq666qsbzxcXFZGVlbdNjMwzDMAzDML47YrEYI1ETu7PqGHcp8F8kGs6oY1wVcLkbew4SFRegZO5fIo9DibuluPEdgHQUJnU3kAmsR96R05CoKEcJ4GnutWLU9+IWlNtxKQql2t6ioqSkhE6dOjXIDm50KNSyZcuYO3duow+qKeO9unbtyoUXXsiFF17I888/X6+wuOyyy7j44ou3PC4pKaFv377b+SgNwzAMwzCM7Y3vQ3OTe1ybuPgHMt7vRoLgtCRjvKh4D4mK41Hviv8ir8RP3bgsYBPKpchEoiINeU1SgMlIVJzqxqe5e++1yHT345GQeBK4BlhO8/BUeBotLCZPnszkyZO3x7FsV0aMGAHQoM7e6enppKenb+9DMgzDMAzDMJqAxoqLO93j0xJeLwJeRSFTxyORMAr1ungbuBeJh/VIhHQm9EKkujlORdWfdvLH5u5T3TYVyGDPRF28X0eCwr+P5kSryrGoi9WrVwOQmZlZz0jDMAzDMAyjtRMEAQWofOsddYz7BzAWiYv73XNVKA9jZ/faf1D1phgSBjcCe7p570T5F+VAJxQGlebGenZKeAwSFFVIYKQA96GE7+6oHG5zpFVWhUrGY489BsBee+3VxEdiGIZhGIZhNAca6rn4K/Is3ImSqeciT8X+wG9Rt+273NhTkLi4BFV5uhWVh61ASdzH+H0nHoubO8WNjeZY3IfyKcagHIu7kCBK1iW8KWlxHotDDjmE4cOH88wzz8Q9/+yzz/Lpp5/WGL9+/XouvPDCLQnn0dwJwzAMwzAMY8fGey4eQn0iEqkEJgH/A+YDf0Y5FeOAT1EZ2L8gz8VdwIPIU3EuYbO895Cn4WXgWfecx3sqSoFvUPWo2kTF7agD989RpaieQPv27bfq/W9LmsxjcfXVV/Piiy/WeH7ChAmkpSllZcyYMdx6661xrxcVFfHll19SXFwc9/xbb73FDTfcQL9+/cjNzaVz584sW7aML774grVr19K2bVv+9a9/ceihh26/N2UYhmEYhmG0KHyVqJOR8R/Fi4olqEP2U0A7JBqOBx5AYuIK4J/Ie3EVapB3CPAYEgLHASNRNaeX3dzHoBX+AImK9e7vdShkqiM1RYXnQeANYA1QvnnzVn4C244mExZFRUV8+OGHNZ7//PPPt/zdrl27Bs83ceJESktL+eyzz/jkk09Ys2YNaWlp9OvXj+OPP55zzz2X3NzcbXLshmEYhmEYRsunrtKzUVFxHtAehUDtjnIrUgirON0F/AZ1zG6HvBfdgEFAHpDr5tsZCYpn3XbHoF4X65EAqUBeig2oPO1k5Lm4DYmOGBIb17n5/u1eay4hUY3uY7Ej0pj6vYZhGIZhGEbzp7Gi4k3gx0A/oAB5FQ5FYU0PoCTvzcDvkSfhY1RFahQKo0p1fw9HlZ2eBL4PfA95J0rdmO7Is7EB9a24B3ksbkPJ415UfIFEyPZukrdd+1gYhmEYhmEYRkvm24qKcZFxBUggHAr0AY4FPkdejYXAiaiXxUfIi5HtxlW6bSqBx5EwOQF5QEoIO3QHqG8FSFz8AOWAREUF7vjnIjGTkZHRpL3jTFgYhmEYhmEYOwwNFRXnozyJPwOjiRcVI9x9AfI8pAAnobCnp5GomIRExXKUXzEQKENejVTkqZgDvOPm9n0sipGo6OIej0dlZ28B5iHPhhcVoNCo14Gvgfn5+Y37MLYxJiwMwzAMwzCMHYKMjAx6oYpKyUrLfgTMBH6IEq7bouZ1LwEvuL89XlzMQ96IHqiJ3f5Ab0JRMQIYRlhO1veneAN16T6SUFT4bmvFKJ/Ci4uj3ba3AUOBRe55n28xA5g/fz6DBw9u+IexHWhx5WYNwzAMwzAM49uQn5/PcrTCf1+S1/cDdkNG/2QkAsa721QkLqKMQGFNPVBORBbJRQWEgiINeMvNdSQwAVgdmTMT5W+UIoHRBeVdnAv8CuVjDKD5iQowYWEYhmEYhmG0UmbPnk1aWhoffPABsViMwYMHM3/+fGYgozyZuPgr8hZci8q6liMBkExcVKPQpg1IDGSiHIklQC/kXcDNUYE8IK8BzxGKivtQ/oY/lhg1xUUq8nj8HIVNlaMSt81JVICFQhmGYRiGYRitkNmzZ5OTk8POwL777ktPwrKs8+fPZ8iQIVznxp7u7i9HORLzULnYq5ChfwoSAiBxARIaFUhUtCcMY+qEvBZfo4Z6/QlFxSvAFNSB24sK3/TujoRjyUQCohR5KdqiqlCvAytRPkhzEhVgwsIwDMMwDMNoZXhRkYNW+3sAByOjvDZxMQ94FYmEEShx+3XgSvd6VFw84+6PQEb/RiQAvLjYB/gAeRQqUMO8V1CDvUNQHsadqNrTKOQ9uYR4cVGKvCFRUXEdKitbTfMTFWDCwjAMwzAMw2hFREXFzsg4/wVK2M4mvqGcFxeXolKwWW782UhE5KAeEVe6uU8BDkfG/VTUCK8T8moUuzFeXPQAFqPKT5uQJ2Swm+MWdxzdkcBoizwXZyNxUeH23wGVnr0fuB4llkPzFBVgwsIwDMMwDMNoJSQTFeejUrAlwEQ3LrFbdTdCUXEWcAbqhu09FDcCf0PhTx8CC1DlqEyUsNzZjfPiIg31stgF5VwsAPZFTfPOBf6DKkLFkNfiLBQOdTvKobgJiYtzCUXFDGDRokX0799/Kz6h7YsJC8MwDMMwDKPFU5eoAAkHqCkuRiJhsYlQVEDoefgBynV4ELgIlYadRCg6Yu6+s7svRl6MQciz4cXGTsgDcg/yYlSjLtw3udfPcvtYgfpj3AYUoXCsmTR/UQEmLAzDMAzDMIwWTn2iwhMVF9Uo7KicmqLCEwMygGOQkf82cCDyXMSSjO3s/i52++qJkrszUbJ1D5S0HUMejDmE4mIuEhEbgenAQcBjSGgAzV5UgAkLwzAMwzAMo4WTl5dHL9SD4nPUyC5RVHi8uJgEpAOPu7GJoqIUiY9OqETsOuBUoB9hf4kogbvvHNl+MEq+LkD5EgcCP0V9KG4iXlw85sbMQqFUq1Gytq8YFQ3daq6YsDAMwzAMwzBaNOXl5cRiMb5AHou3gXupKRY8WcjgPws4FoVCRfGiIh14FgmLfZHHoRe1iwpQbkZn5OlIjYwtQCFNfYCfuW1uJhQXjyGxkxaZ6yJ3vwhYA2RlZVFSUlLLu2p6rEGeYRiGYRiG0eIJgoACYBUqGXsHEhe10cbd1yUqlqOGeLsQioq9E8Zvivz9InCxu0+NPD8CGOnmnoY6cJ+B8jcWAy8DPyFeVHjuQyFSX6PO4c0Z81gYhmEYhmEYrYIgCOISsn1fiNo8F4lUIKM/FeVepCLPw0soX2JiwvjZqP9FNsrBeAaFP/k+F0dFxo5A4VQLUd7Ea0gwrEFVo5JxHwqHmgEsWLCAgQMHNvCdNA3msTAMwzAMwzBaDfPmzWuU5wJk8FejFfc2yLOwFnkevkB5FV+gJnee2Si8aSPwEOpNcQRwmbt/Bnk7PHOBr1CY1CtIMMxEiea3EYogT0sTFWDCwjAMwzAMw2jGzJ8/n44dOzZ4bHZ2Nt2gweLiI+AN1GMiQAKjFJiCmteNRSLhMOAJJAq8qMhAidyPu8er3ZzHEi8u5iKB0AGFV93gHhcWFm4J4bqJUFy0RFEBJiwMwzAMwzCMJmTBggVkZmYmfc0LhayNG4nFwgKvmZmZLFiwgHnz5tGhQ4e4sb7Z3U7I2K9EYmEyEhoVhMnWHyFDvw/KYfgQCYUy4EmgEIUwpQDHIXFxA/AIEhVfouZ5C4AlSBzc6eb24uI21J8imajIzs4GiBMXF9MyRQVYjoVhGIZhGIbRRCxYsIDBgwfTk5rlVL1QyAUORZ4CLy56AYMHDwbYsi0or2KAG/+6m+dLoDcqKdsGCYgMlBuxHJV6HQbMR0KkAhgInIjCmy5185yOwqNmuG03uGOaBVuOOxaLbWl49wvU7Xsmqvq0EXlGEkWFx+eHrHbH2NJEBZiwMAzDMAzDMJoALyoShUMQBFtExShUcvVUlBR9M/I6HIe8Bt3dtpORV2EAaoz3U1SF6Z9u/NHAD5EYaA98hsrCjkCiAmCIuy9w98e4ey8uZrhjXOqefxQJgKgY8uLAN7x7DVV9ApWT/ZrkoiK6fVZWFgvy81ucqAATFoZhGIZhGMZ3TFRUXIS8AXegsCLvfRgJnIMqKy1CpVnLgIdRCNNA4GTUpTodeSXORZ6GKtQf4ifIS7AW+BTYCxn865GgGILConyQVV3i4k4kTIIgYN68eeTl5RFs3FjjvSV6Hj744AP23ntvOnToQGF+fq2iwtOc+1TUhwkLwzAMwzAM4zsjmagANaurBv6AekacB/wIiYI2KG9iovv7bmAQKvH6MuozcR5wgptjKcp5OBEYBdzvxpWjUKcRSERUu32nUL+4uA2FPQFkZ2ezMYmo8Hhx4UUFUOf41oIJC8MwDMMwDOM7oTZRAfIyLESeiNOQh6IcCYkuSFisQB6MdOAB1GF7MBIVpyChsAgJi4GoTGxfN//9bvwZyFvhA5jWoRKwieJitZtnMGH/ituomQtSGw0Z09owYWEYhmEYhmFsd+oSFaCE6DdQ5aWfoNyIzYSiYj0SDl1RInYaEheTkFchQKFSS1FFqH6EYU5HIQ/Ehyhxu9q99iLwNBIORxOKi7kojKkv8oascvsoQg3tGioudjRMWBiGYRiGYRjbnby8PHqhZOtEUREgD8NhqJpTT+BIJCo6EfaWaI+8CxXAj4HDkadjrZujExIDS5DA6OPmfxrlWByJcjG8qHgKhVQ97cZNQCVm56DKU3shQbMZdd9+HQmO999/f2s/jlaJCQvDMAzDMAxju7N+/XpisRivokTts9zzAQp5CoAxqNrSfchzcBoKhUpBnoYNyND3XS9SkehIQSFNMVQZCtRbApTo/RgwDoVBeVExxT13GDAViYtvkKiJiopiN/5mVDr2/fffZ5999tn6D6QVYsLCMAzDMAzD+E6IlmMF9XrwDeteAJ5FpWWXEJaQPRUJic5uG18zKYYESYZ7/WvkuYBQXDwAvIvK0yaKisNRA7s1wPeBT1CDuhNQ2JOJisZjnbcNwzAMw2iRFBYW0r59+0Zt0759ewoLC7fTERkNIdpl+k7kcXge9Yg4BeVDHAPsjjpWP4CM/BgSFx2RGFiHci+ykEG7C0rqXovEx7vIC7HWvZZMVKxDwuQl4L9uP88hj4qJisZjHgvDMAzDMFochYWFDBs2LGnH5tqIxWL0AoYNG8bcuXMZOnTodj9OIznec3ED8DnKk/gRcAhhZabD3f2D7t57LtoiIdEOJXDHItvsgipLPYjCn36Ays0+BswG8okXFR3cdk+gUrIjkRfkJvf4TUxUNAbzWBiGYRiG0aLwoiIXha3kEDZVq41YLMZIVG0oF4kL81xsPTNmzCAlpXHmZEpKCjNmzKCoqIjZwHvAPkgExJBnYXeUZH048lA8jZKxV6Mk7i5IZFS4m2cF8jRMda//Dnk/vo8ETKKo6O5uJ6PvUQEKqSpAYsNEReMwYWEYhmEYRoshKiouAq4FfkXd4sKLil+68Rdh4mJbMGPGDEaPHk0P531oCLFYjJ5BwOjRoxk8eDC90P+iG6q8BPHi4lmUsH0SqvDkcyo6oMTtFPdcNRIVG1F+xWlIgFzl5voJ8E+Ua1GMEsK7R47rLMLv0dfArFmztlR/MlHRcExYGIZhGIbRIkgUFdGOzbWJi6io8FWITsfExdbiRcXWeI0AlgPTkTHfIToWdeAuRP+341DYU3fkiYghT0W1e34VEhWdUcnZk4Cfo4pQf0NhUz1RGFUnFHq1IuH4ot+jnJwcgiAwUdFITFgYhmEYhtHsqU1UeJKJi2SiwrMjiov58+fTsWPHRm3TsWNH5s+fH/dcVFRsjdcoBzWyG4sqMr1IaJj+AXgH+Bmq5uQrQPnZK4BK5LVYR7yoqHa3k1HVqXeBKyLH0tndNlK3uGioF8YIMWFhGIZhGEazpj5R4Uk0CmsTFZ4dSVzMnz+f7OxssjZubJDBnJ+fTywWo9PGjWRnZ28RF4miIuo1Op+Ge42+B5yLmuJ1QrkPU5G4+COhqDgVhUgFyDORgrprV7jHxSQXFSnudrKb520kLla5/XfGxMX2wKpCGYZhGIbRrKmrY3Mi3nB9CBmVtYkKz+nADGCl28+mTZu25lCbJV5U5KLP8BXqrqSVn5/PrrvuSg5qHvcGkJ2dzdNPP82kSZOSCrx1KCSqGrg1Mn8yUVHk7k9ESdi3uceHI+M/FYkOLyoqkYhIJ15UBMBXKMk7majwnOzubwYeBv6EcjA6R459BTVzLuaisrYdO3Zkw4YNST8rIx7zWBiGYRiG0azZtGkTy2FLx+b6OAtVEapPVODmewXF+LdUUbFw4UKysrKSvhYVFdGwpREkX433omKkG/cP4EJUsnXSpEmMJLmoiCFj/yQkCmrzGnlR0RWJihOBc4D/AdOQcNgfOBaJCi8iEkVFGurQfSEqJVtMclHhS9Ee4+atAq4H7nevdya55+I+4HX0vcjPz6/xORnJMWFhGIZhGEazJ9pUrSHionvC40qUJBzlDuBG1K8g2ep9LBZj+vTErb4ddRn/UebMmUN6ejoAWVlZLFy4sN55Bw0aRPv16+OEwowZM4jFYnGiwouBHwA/BoYSLy5isdgWUeHFgA8nOgcYjiounRrZ/zpkuLdDydftCMXF/sSLCm/8ZyBR4TkR5Vl8DOyLGtVNRjkVaciDUYVERRv33APA3SiR+353KyU0bKvR/7waNct7FPgPcKDbxx3Ei4v2bvsyJCquQ56sefPmMWTIEIyGYcLCMAzDMIwWQWPFhacShdhcCvzePdcQUdELhUdtrbjwxn/HBOM/kTlz5jBixAi6lpcTi8XIWL+eQYMG1Sou/Lz9kAdiuDtunwfRGRiCch+8qFgALEYegZ8SiotYLEZnYBBwNkp6BhnyKajC0knAl+61auJFRbobn04oLh6MzAMSASnIgF8Xef5+lGB9BHAk8hzcjTpgp6KQpxihwLgXuAs4CHjBbXcfEhtevFS7uYtR+NM9wN4oXOs0YAyhuCgGNqF+GQ9jomJrMGFhGIZhGEaLoaHiohJVGvKiYg5wAGp4dizJRUW3bt1YvHhxjbKoDRUXPuE5ijf+ayvLGovFyM/P3yIqcoHj3bgebv/JxIWfdyRa8e+MhMVQYPTo0fQD8lA36yzU8M2LigEotOl0JC4Gun2Ndp9NN//ZuH15cXGp+0w+JBQMUVHh8eJiJ2S0+3lS3L5Aje7WIcP+bvS/2QuFsPn3fwdqUoebI9U9vgPYDbgaeSPORV6Y+wiFQgrqf/EocDvQF+VY9HT7vhKJi5vdNu1R4vj1mKjYGix52zAMwzCMFoVPCr7JPU7MpahERvMnaJW8N1qlnkjtngrvoejfvz/DCUN4fFhMXl4e+fn5jB49Oukx+dyEnoSJy1FR4UOR/P69uOgF7LrrroBExPlo1X4EMrg7E4qLBQsWMHDgwDhR8SvgTLS6fz8SEcOQAf09FPbTHgmLdkhQ7IzCivqgVf/NyJj+oTue9kiIgRNCSFyAxMUC4DXkNfhN0k8jFBubkaHfyc3jxcVCd7yTkSjYE3gJhUS1R83x3kSJ4KDQrQeR52E3YAJKru7o5rwAWO9eB4m4Kci70cONuRfoj/I7dgL2IBQqi1CSuomKrcM8FoZhGIZhNEs+/vjjWkOHvOfiIeKTbr9CBnwhMB4Ztm2QqChHq/onkrzfxXHI8O6JSpRCfEna2jwXXlQkeiUSRQWEpUy7uWP5MTLwhwLnIVGRhUKPTkVhQzsRios333yzhqiIIUP7YGTYHYzE1Guo2lUuMqir3WfQBgmLfBQCdDLwW+TJyAF2BfohcZHoubgX+MDN+zASF7WRjrwMoNwFT4o7tjtQaFUp8DIy9LsiAdQHGOeO51bkdboL2A95FVKRCFiNcjbWAIe4Y78P+D/gTiSsHnfz3ojyOPZCYVPXofKzs1ECuImKrceEhWEYhmEYzY6PP/6Yvfbaa4sHIBEvBk4mTNR+GYXoFLr7i5HnYTlaaU9DxvNY5MGIVi46C7gEiYDNyMivcvPWJS6ioiJadWk4Eg/J+m4sQob+z4HBbt6fuL8z0Sr868gYPpxQXIwEDjnkkBqiArTa3te972Xutj9a9f8PEi79gPlIfM0DvkE5GO2R4OiLPBadgGxqios7UAhaDFVRGgrcQu3iwldxAokFj59nDjLs30Iemi7uODojYdSDUFy8gv5vF7hjX4XCoAqRN6YIiY1jkfh4FXmudgeeRR24C1Bo1CWEeRSFhYUEQUBJhw4mKrYBFgplGIZhGEaTM3v2bPLy8igvL98iKmrruxAVFT9327+MVrZXIeP6cMKV/1S0en0pKp/qeQEZrT9HHo0s1OU5HcXen4cM5zaE4iAaFhUEQZyoOB15BQ5DnoDJyPCNchlKVPbC5R339xC0cj4LhQ6VopChNLfdNDfncCQooqLidZRPMAyJojUoD2IMMu5fcO9pfzd+BhIKo9EK82K3/+5ICLRFxn22Gz8HhVo97D7Lz93zT7jP7yH3+e3s3lMKEgi+sV3nyPtf4cbPAjIyMuhYWspuhOJwP+SFKETCBRQqNZFQ8OW4Y8tAImixG7enm/8HwFFISExGImsOMHfuXIYNG8YaVEa2sLCQ7Gy9S+tTsW0wYWEYhmEYRpMye/ZscnJy4rwTdeUljERG5IdIMPRFCborUY7CwWiFvCMybE9z+4mKi78hQ/2XyAjNREYqKKEZkouLaDO9xOMEreRXopCozihP4jLgGkJR8TMkPn6LQncOI+y/8BEy3vOQUJgd+ZzeQ4JpJLAEeV8+RknQA1G5129QrkRb1G36auT5mOrm8OJiBRIvxW6enkhUpCLBErjPox9h+dflbn7Pj5HwORmJik3uvceQwEgUFSABcTKwFphVWkopEip7EnqXdnJjC5HXp8Idw7uEHpSzUKjYN26uTcgbszMSNV8gT80C91nNnTuXoUOHEgQBHTp0oDA/f4uoMLYdTSIsVqxYwbRp05g2bRoff/wxS5YsISUlhX79+nHYYYdx8cUXM2DAgG81d3l5Oddddx2PPPII8+fPJy0tjby8PM4//3x+9KMfbds3YhiGYRjGVuFFRS6KkZ+GvA5RY/0XyEj9KzJ890RG8V7IU7EUGTS/QqKgxL1eila1IV5cjEOJvmejCkRtiA/VgeTi4m7CZnpQU1Tg9pfq9n2Me+5ulDtRjESFP5bD3XwvI3GzARnzuyBDP0AhQp7RaEV/HVql/xKt4vd1c692285ChvXhyNCe4Laf6u7HoXCoRdQUFT4nArefF1E4VV/kjXkHeZF+jAz/85DHZxMSEz5/o5qaosLjk+19En0BYU+K0wjFRTUSDvNRnoSvJLUSJX5nurli7nOoRv+bRShnYoX7XLyo8GzcuLGWIzO2llhQWz/37chJJ53Eww8/TEpKCqNGjWLYsGFs2LCBjz/+mP9v77zDq6rSt32fAAklVB3pEEoIEGJARbErRbEh1lEUexl1LKNTv2lO05nfOMXeUHRUxDKCINjb2JUWOqGFXg2kAJJA9vfHs1b2yclJk0CAvPd1nesk++yy9t6nvM9626ZNm2jWrBkTJkxg2LBhNdrv9u3bGTZsGJ9//jmtWrVi8ODBFBYW8sEHH7Br1y7uuusu7rvvvhqPNz8/n5YtW5KXl1et5jaGYRiGYZRlxYoVDBgwgNzc3NJl0aLiDlTF6A00O346cA5K6C1GcfLPonyAEjTrfTpKJv4niq2/j3C2PQ95D5Ldw3dgfsbt/3JkxBZR1qj2nZ09v0DG9fmoytB8t7w9yov4KTLMY5NWc5Ch3caN/QUkVq6KWe8FJC4GotCfNGQwf4dm972hvwAZ4E1RUvrrbuzpKDRoiVsnC5jhxnY88tr4ikz3ITFwHwr7KnL7rkhUrAL+gETYeiS+vqZiUdHIXYfdUftuThiyFUt0hS4IhdpVSFxsRyFYT7l1GjduXNodPbart19/Csr5mOv2GSsqjJpTIzs4qANuvfXW4A9/+EOwevXqMssLCgqCSy65JACCNm3aBLm5uTXa7+233x4AQUZGRrBp06bS5dOmTQuSk5MDIJg8eXKNx5uXlxcAQV5eXo23NQzDMIz9hdmzZweRSKRG20QikWD27Nl7dNycnJwACNrJ5g+CIAjmz5+v32wInoJgMQRLINgCwTMQpEDQAYIRECyF4BoIfgjBeRCMg2ArBHkQfAfBRRAcDsGjEJRAkA/BWgg2QLDGrbfbvRZAsMk9+8cOCArd8hUQbHbLn4LgaAiGQNA3auxBEARA0AmCUyAY6/YfxDyWu3PKjdpn7GMXBJdBMNDtJx+C1e78/Dq73XrzIXgFgvHueZY7z3y33pMQ9IfgGAimRO1rCwQTIbjeLff73enOe6e7Nv6R6675Kggec+feGoJz3f3KhOAJt4/tEBS4+1AEQbFb/p277nlR1z3ew+8f98iA4Gm3zcNxrns0QJDu9uHPv59bf+nSpXv0njVCamIH14nHojK2b99Ou3btKCgo4LnnnuPyyy+v1nZbtmyhXbt2FBUV8emnn3L88ceXef3Pf/4zv/3tbxk0aBBffPFFjcZkHgvDMAzjQMd3Y26HZp+r8/PvezusAyZOnMjll19OQUFBtY/ZvHlzJk2axODBg0sTsd8izBvwM9Tno7CVFqgS0COoUVo34HPkEWiBeh/8GHkripBnwfdLuBj1Nbje7S+JsqVVmxN/9rzYrbcdeQkau+fJyEPS3J1/vL4XPZGXZS0KqbqCyj0XreNco6UohKuNG9swFLrk8z1KoLTzdIK7dotR8nIf5E0odNfpLXedZqAwpOtRt+nJKD/jPOTtgPKeGs9WlLOQ6NZZ6vb3PGHo1C0ojCzaU5Hk/i5x42yAwq2au/3W1HPhk/Yr6ozu8Z6L01AS+xxg6dKldO/evcJtjJpREzt4vys327RpU9LS0gBYtWpVtbebOnUqRUVFdOnSpZyoABg1ahQAX375JWvXrq2dwRqGYRjGAYAXFRV1f45HdPfpdGDkyJEkFxZWuV309smFhQwePJh0JCDuQx2WOyND2ucn+FCdQtSj4APgJpTsPAAZ0p8gw70XEiFNKdvx+SXCJO5X3P6TkfGcQGiARxMrKlqhnIQ33X4KqFhUpKMwqKtRrsdLqIFbScwxUpCRnYsM9lh6AKciI3w3EgefuNdiRQVITAxD1aECZLB/jMqongb8A+WkbEUC7S8oiTlaVJS4c29A1aKiEwoHuw6VgO2Gwsi2UVZUQNihuwTlslzqxgW6lhXJgxtRWFs79/8ct11VogL3ui8ja6Ki7tnvhEVxcTE5OTkAtG/fvtrbzZyp4mdHHXVU3Ne7d+9OmzZtAJg1a9YejdEwDMMwDhSiRUV0n4XKxEV0/LoXAxnIMK2uKOmLjMUM4GTkZRiPyp9ejETAR1Hb9EQ5DJ8gMXM+MhQjyEBNcf93QJ6LtYSGaoCSmX+OPA5PuEciSure5dbJJ+xNEU9UNEc5GP9BBn8jKhYVPrb/BGToVyUudqO8j11xrtehyIif6cYy0Y01VlR4mkT9vcat3ws1+ANdr7boen2OvCpnRm2T4M5tN2GfCT++gLKiwieP3+r2uwwJjYCyosLTADWoexjdo0eBcW79YuIzFnkb1gOzZ89m6dKlFCYnV8urBro/hcnJJir2A/Y7YfHUU0+xefNmmjRpwhlnnFHt7ZYvXw5Aly5dKlynU6dOZdatiJ07d5Kfn1/mYRiGYRgHGrGiIrb7czyREG04X49CYLa77c8GLqxgu+jt+6K+A2e77XYAP0PG46XAr91YZqOZdFAozFfu9fNQcvIU9/89KLl7ATJq2yFxsZpQVKxAhnljt+xhlPTbDBk7xcjL4f+uSFSMQUJoAnAz8Tt0e1HhqUpc5CCDuyXly3H6Y45CXoBFKJG7BWEieqxQgdDDk4AExSY3ZlBjwOlu7KOB7oTJ1BCKAp+sXuyuRUskMOYgb090RaoE4Hfo/n+M7kOsqAAlTj+ChOMT7lgPoiT1xDjrjyVsVjd79mwyMjLo3r17jULuAAoKCkxU7AfsV8Jizpw5/OxnPwPgt7/9LW3btq32tv4N2KxZswrXSU5W0bmqhMK9995Ly5YtSx+dO3eu9jgMwzAMY3+gIlHhiScu4omK95EH4WqgH+q7UJG4iBYVA5C34irgFNRvoRMywFsjgzcdxe93R8aqFxWTgaluP2cio/xy1NthOqoA5cXFXEJR8Yo71h2oatE4NGvuS58mIo9JPFHxLTJ+W6HwoXjXKJ6o8JyAKlgdi7o+e3GRQ8U5Fv6YLdFsfVt3/ocgEZVAxeLCh3YlI+/OSHfNzkWdrLug/IoM5DHxRn20DyARiYsp6D3yOQoFa488IdE9NCAUF8e4bR6Mef1JJOh6oepei5GHZj4SG4/HrB9PVBgHNjXuY/Hzn/+cSZMm1fhAY8aM4YQTTqjw9dWrV3POOedQWFjIiBEj+OUvf1njY9QWv/rVr7jzzjtL/8/PzzdxYRiGYRwwVCUqPNH9BCoTFT5+ILad2KuEHbH99tciA7kjMjCXA13R7P+HaFZ9JOra/B0SE2+gXIYLUViPFxVnEIYDjUHx/5cio/ZmlAexlrKiogvyjvR0Y2+CZv99eVXfwG2He80nFx+CysGOQR6VaHEB8iyMJL6oADVw242SuN8A3kVGfSsqTtz2x/wTEiB3oc7f76LSsiCvgc+18InR0aLC9+k4C3k7vkK9Llqh+5eKQqE8Xqjgnt9z4+2IQtFw2xMzBs+zSCRuQaV+W6BGddGi4mX3eBOFruW6fXkhciMmKg5Waiws1q5dy6JFi2p8oMLC2JSpkPXr1zNkyBBWrFjB6aefzssvv1zt5DBP8+b6aqisJbsfQ1UZ7UlJSSQlxXPwGYZhGMb+T2ZmJu1RZZ2rkUH6LZqNjsUbys8hL4IXFe+g8JzYoORUZCgWI4P5CcrP5r+DZrw/dcfujsKLDkUC4F03niuRR6IbmtEe6tYdGXPcMW6fF7r1U5GR+hPU68KLituQF6QTqlp0LqGoCAhn6CMoRGo7yivwFZiuijperLg4FjXum+eOEc2n7rU+yLjPQsZ8KyoWFZ5G7nyOQkIE5NV5Gc34pxD27vB5ItsoKyoiyLvQzY31XZQLcRLKYznM7ZOo9QNk+E9EVagGI2FRmbgYi5LrS1C+zAPA/Sik7X3KiopxSLQtQ96YWbNm0b9/fx5018hXcDJRcXBRY2Hx/PPP8/zzz9faADZu3MjgwYPJzs5m6NChTJw48XsZ9b5T98qVKytcZ/Xq1WXWNQzDMIyDkZKSEiKRCO+gWf3eqJJSW2Tgx3IjCsE5DHkA/otCngYQegw8C9067VESdmNCUeJFymnI67AKeQ68z38YCnmaBPwSiQJQxSFQqEw/JB78rPpTlBUVDdGs/ndIjPzO/f9nN572SBzFiopI1APkLQAZ6lC1uDgceRWWuf+9uIgVFa+ixnRDqVpU+OZ4N6PGeAuRN2Ar8kykEiZqJyBB9x3KbYgWFUuQ8d4HeYfy3X4ykHga79b14mK3O+5E5Onwnbn9/fC5GtHi4k3krQB5iJajXI5/uvWPoryoyELiYdasWWRmZpZ6tr4lTNQ2UXFwUac5Fps2bWLw4MEsWLCAIUOGMGnSJBo3bvy99nXEEUcAMG3atLivL1u2rLTb54ABA77fgA3DMAxjP2TmzJlEIhEOO+yw0mW+DOc/0OxwV+Ql+Ni9noNmvr0B77dcjHIKFqLSp+uj1lmIZsYPRZWiQGLiNcqHCJ2JPAQrcJWVkHfjPeSRuCxq3QZIHPwceTrGuOXxREVjt8++qPxsW3deFxJ2gx7p1kumrKiI5RBkCH1L2VKwVyGx8zESF54Ut8165OmIJypORbP/yVQuKtai65uJvDM9kQDMdmNJp2wI0hvu+nyG8kS2uXPajpLXD3HnvBXloByPxN1x7u/xyJORhwz/F904R7hrtM3t9zyUdD8R5Vx0dWN9GgnFacjL0su95sXFy27cr6IwrVhR4QmCgA2RiImKg5Qaeyxqi82bNzN48GDmzZvHkCFDmDx5Mk2aNKl6wwo488wzSUxMZOXKlXz22WflelmMGzcOgEGDBtGhQ4c9GrthGIZh7C/MnDmTI444gnbA1k2bSnMevv76a0B5BTNQxaK+yMj/HZp1zkAGva8a9BWaFb8SGcqvuGMMRwZptKjYirwBCYSiJJYLkaE5F3kpJiBvxp9i1vMlYC9CRut4d6xVxBcVOcio7Y8M+L+i0KY7kSDIQ8atz0OInUXd7M4jD4X17HbnDaEYuMo9R3suViIR0g6F/3xLWVExDAmmFchQ34rCoXDrei8Jbh8nI/Hyjrsu36Fr2sE9PG+ga9cDeQ5AyeK48+7sjrkW5UosQYb9KUgA+OL9zwBfuLEnuTE3d+PId/tqTui5eBH4Gr1XOqL8lK+ijt3LPa8mbJ43FIVIzaO8qPCUlMSrc2UcDNSJxyI3N5chQ4Ywd+5chg4dWiNRMWTIEHr37s2ECRPKLG/dujU33XQTADfffDPffvtt6WszZszgb3/7GwC//vWvMQzDMIz9nfnz59OoUaNK1/Gioh8KA0rFlSmNRDjmmGPIAO5GYUr/RbPhi5HReQzyQDwNzEJN1Lyh3BuFEl2EYudfdtscgkTFTDSbPZP4pVCjuRAZp88gAzieqPBJyQmowtElyEiOFhVJhKKiC8onaIU8BIeimflL0Sx+S2Qk+yRnL1xw53sxCrvKd/v1SemxTeyucuf7FhIAS5CH5AfABmSwR4uK0e4curpx5CJx8Yw7p2ei9p2HDP+jkZB6CnlC+iIDvRDlsXhRMRx1HR+GxMVn7vy2IW9Hl6j1xyChMtQdvyVwBBIr093+16L7/zDyTjRDIsx7dvqgXJUvkdjKRWFqx1CWXm69ZUiwpCJPUl+gf//+GPWLOvFYXHfddcyePZtIJEKbNm1KBUEsI0eOZOTIkWWWLV26lBUrVpCXl1du/XvuuYevv/6aL774gtTUVAYPHsy2bdt4//33KS4u5s477+Tss8/eG6dkGIZhGLXG/PnzSU9Ppx1h1aVYokXFLShRtw0SAcuQ4R1bEepPyBi/Dhns76FE7edRcnI/ylZ+OhfF6j+HwmZORIbpZjTjvgmJiwFUPFP5ODKEFyGj40mUAwHlRYXnAnc8n2idhLwFOYSiwoc4fYkE0efuPH7klrdy+/LdEJJRjsDjyKh/AHlpbnWv93Bj3IryGhqjkKFPkTFdjISED086DF3nl5DnZ3TU+L24WIHExPNIkPgQr3ORod4Q5Vd8hhKjb0ECKd+N921CUXG6O68RSExMdfvy8RmpSPy85K7XYe4a5LlrtwTd4wboHl6CBN97qDRuK8LcliUojOkkN+5x7nxepGynbk+056Ij6kjekrDaWHUb3RkHPnUiLHyuQxAEvPzyyxWul5KSUk5YVEbTpk356KOP+Oc//8kLL7zA1KlTSUxM5Nhjj+XHP/4xF110UdU7MQzDMIw6xIuKfmhG+wPKG2fxRMUGlFsQQQbgYW57zxwUanMKMmwT0Ox3QGhYDiP0QDRARulqNCM+DxkN6cjQ7okM/UVULC4eR8blfLddIpohB4VgxRMVfsa8NTL8i5ABv5pQVHhB8gIy1i9FHpVH3LbxxMV45K1ojrwB8934ElF+yGpkNLdw+3/evZ6BvCheVPixnoBERz+Ud+AT2D0JKMTpcSTEbkEeo4eQV+Q85DlYiK5zT2TkJyIh8QllRUUD1ODvMXfeGZQVF+8gIdIHJVY/5q5BW3SPGiMBNtVdu83u/2JCzwVIoMxx12G9u26ViQqPD7nyDQljSxmbuKgfRAK701WSn59Py5YtycvLq7JUrWEYhmF8X7yo6IuM5V7IAH7FPQdBUKGoSEGhLiuRETgBGb1/RF6CN5GBPBoZiiCjvRgZtC8Ag1AfCm8cNkC5Ea+ixN6taKZ7BGG+QA4yXA+lrLjY6I73GZQpRXsKmpH/OQq1akDZpm1QttfCdpQX0AzNuHtR8Zw7r+NQgvpulHS8BnkBfuS234mSxh9DxvV3UdfSl8m9HuUVtHaP55Ch3ReV3I0VFZ4SJC7eQV6AUYTiIlpUgYTAULfvQ1ETvxvduP+CvA3t3LoDUVjRcPfwouIJygq1E9359Ubeh5NRfszTyHPRxF2fVMKQrM5uLNORV2SB219nJNpOcPvZgERQZ+KLio1UnFsTTfR1MJPzwKQmdnCdJW8bhmEYhhHiRUUaMnL7orASb9C9QtjpOlpUrEeioiMyyDsiUQISF5ehZnCxogLCvg4jkfH6H7fciwuQiPgK5RkMQUZqdBJyinuO9VwchgzULcgAvhGFaaW4477qjn0BZYUEMX83dtssQzPpvZE34UmUN3CLO0ZrlF9wHBIFScjQfwqJioVuTOsJDVwvLp50x7kRhf34src/pGJRgVsW3fp3nHveTlljevny5XTv3p1NyFOwmbLN4jogA38NCgGbjXIkoj0V0aJi7ty59OvXD9y1XYDeMxe4a3sbypuZgsKxRiBvxlokuG4E7nXX0QufjSjEaj0SnP+jYlHxOOVLDFfEjei9kYv6iOXn51exhXEgY8LCMAzDMOqYaFFxPuoJ0BvF/beMWm8cmnkejAxoLyo6EXoYEigrLuJ5KjzRZVjPQeFOnyAB0RfNyE9CXonRyACOFw6T4p6jxcV77rkLmk3/KTIuL0ShO88goxXiiwvc8Uuixr3Mnc8E5L34JzKcfcL1W8iw2Ykauc1G3oQOyIvx99jzdx6LzkhU5bjzHwjch0KmmlN5pZtYcfGkG+caQgHTrVs3li1bpl4OzrCORCJlmsWtc4+GyINyCarC1ICyosLvM1pcXEkoKhJRvsY0QhEzHr1XgDLH9CKladOmdO/enY7Im/IG8ohUJCr8WLa6ZZWJi7HuWOuBZVlZlaxpHAyYsDAMwzCMOiSeqMhAxnQxCk+J5gkUJ98aeSM6EQqKwD0nIXExDHkYBhBfVETzX2SMnoIETSKqFvQqCuE5ExmSC5AHI7Y6UAoSDuuR8b+DsBdDI+Q5GIzClQJUcWkJ8sQMpnzPBy8qfA5GCsqVmIDyIf6JBIUXI+OQWBnixv9T5CHpi/o3ACxFxrL3/ESHaP0MCZNTkKhoQFlRV1noT7S42IUEQizdunUrM1sf2yxu1qxZNGvWjNTUVJ5COQ5XU3EoUXp6eqm4eAYJoB8hQ/5fSCQuXryYoqIiCZriYnDn7o85d+5c0tPV6m/ZsmV0796dZkj8/Y7KRYUff7TnJRY/ljlu/926xb6bjYMNExaGYRiGUYekp6dzKCo76kVFdGWmWHFxFYqf/xh1a+6CjGCPFxfrUD5BP2R07o6znhcX/0U5FiehMKjmhLkVXlS0dQ+ILy6WoipRycjw70aYSNwDzcJ/hsTKSHfMDSjPoiEKv/GGrBcVEUJvQQPU/foKFOo0FBm6u9CM+OPIWN6JkriPQt29fbfp6JlzUIjTaYQG8d9Rv40+MdcJqhf648VFH3cuLwIJCQmV9mzwxnl0v4fFixeTmppaapC/TcX5CdHi4iGUHP6e227x4sX07NkTgGInKvwxGzZsyNysrFJRAaFXpXv37oB6m0RXFIsncCoTFyYq6icmLAzDMAxjH7N48WIyMzOZO3cuoNn9WShePlpUJLrnYhSuU4QM6XQUcvQyMr7Pd+t6obADNZdrjkrQJlJeWIDExWsoB2ELChnyouJlNPvvRYUnDXkuVqHk8tZu7BvQjH4mCj9ag0rAFiNvw8mo/8NEZPxuQP0kTndj2+nWbUzF1aJ2obyJAHgU5Vf8EAkJ31fiNTfGqYRGTrSRm5WVRSQS4fDDD+c995o3oPtRnpqE/iSgMCIvYGZXI/QnVjD07NmzVFxsomxOSDzS09PLeT+iRUU8du3aFXd5tLj4l1tWmdfEjz9WXJioqL/USYM8wzAMw6ivLF68mF69etFyxw569OhBT2TQ90RegDdi1k9ERvcGFJqTjgz9a5Gx/jIypovc+hFknDdBnoNClPxbTPkcBi8qNrp9TUEJvZOQwBlMWVEBarC3HoVatURG502oypBP3D7NjSHHHduHYZ2FxMfHlK14lOjG+jnycMQTFTvdowHy2lyN8it+igzeb935XUTlouLwww8nIyOD2bNnM8e9Npb4xBrU81COwuMVrB99rNmzZ5ORkVHBmpXjxUV+06bVrqQUBAH5TZtWKSqqwosLf23upOqqTtHX5k5MVNRnzGNhGIZhGPsILyp6Ik/A28hA74sSnDuiGX1QLgKox8ESZJA3Q4a+z4E4BM3i+45Q3nOxCnktfuD+ju4G7YkWFdeiSkLjkVHeCQmI/yGR4MXFYmRgHobCoJ4kNDoL3HhudMc7wx1jBcoXSEN5Fu8jQXGhWy8ZJTuvQEbJEjfO7lFj9aIiAQmVle6aXYZCuHIJZ85Boih25tyLCo8XF4cffniZ2XnPnoT+7Imo8PTs2ZNt27bVaJuarl8R0Z6L6nhNoHzOiImK+ol5LAzDMAyjlsjOzqZJkyZxX/Oioo37vwgZ9KtQGdCjkbhogsTFG0hUjEF9DiYT5kqADPndqCJQO2TEf4sM9OVIpPRETeJWIY+HJw8lOi9B1YfOc8suQYLHN6N7G4U1baByURFvNr8BEjoBMrb/4bY5EfiNe92LiiUo1OtEFFq1GDW889cpVlRkE3Z4vgOJDN+ELd7Meayo8FTkuagq9Cf2XGtbVOwPeHGxrXnzGnlNtjVvbqKiHmMeC8MwDMOoBbKzs0lLS6Md5TsNR4uKbih34SNU9vMQVOL0aEJxcTRK/p2Ich+OdeskojCgtchgP4ywlOjFqH/CKmR0t0BGeSoSHMvcWDojEXEMEhALUNJ1BBn6l7j1phKKi3x3rE5uu1dQXkNVs/leXPw/JIz6EF9UdEQhXrj9/wUJqytRTkWsqGiP8iEilO2T0LBhw3Iz5xWJCk+s56KqhOnYc/WlWw8mUeGJrWRVHaxPRf3GhIVhGIZh7CFeVGQg0fAWobiIFRV3AJejMqcfA0eipOD3UeLz0SgEqQB5G36BEpQno8pEq5GB3hB5J75EouJslOfQnFBUtEQGfLIb5zJ3rFzCUrWPI2/Cb9z/UFZcNELejWHIoJ+AhMUAFIYVLaLiiYv/oNyNBUggvAJcR3xRgRvPOGTY56NE7pvcucaKCihb7WmuS5b246hKVHiixcX3Df052ESFYXwfTFgYhmEYxh4QLSpuR0ZyEqHRDZQTFa+hHIibkMeiCzLY5yFjeiWqpHQjmvHfjYRDMTLUI8g7kY1m9M9HAqArMtZBnopkwrKy3dzzSne8rqh0a4B6Y9wL/JGw2Z0XF1NQGdXZKB9jDnCNG5sPGapIXPjZ/HnA6NGjee6553jAnXsa8UVFbJ+Eh1EoVDrxRYX3MkT3ZPDjqAleXGRmZhJUUiI2miAISEhIYHZWlokKw8CEhWEYhmF8b+KJig0oZ6EB6jexEhnxdxCKiqmoQtIQ5IFIRGLkJWRUp6FSqpchI7oE9aQY6v7/P5SLcZ3bjy8j2x4Z4atQ+FMzyvaCSEH5GEnu/wLkiShGRvrvKC8uTkFJ4K+7464DBrnXffJyReLCz+YvXbqU7t27c+qpp3LNNdfwKMovOS3qWlaWLP04Cov6JdUTFd+XjIyMSvtOxKOm6xvGwYwJC8MwDMP4HlQkKvoib0FLJAbeQnkDIygvKhKB3kgYRNzyEuA44FLkTdiNvB+73fpFyNAfhMKoAncc38eis3te7sbZAiVEe3ERLSrykafjDLcsnrho5563IQFyBmX7PVQmLpo3b87SrKzSpmtXX626S9dccw3PIk9OdfskPO/Wj632VFuiwjCMPceEhWEYhmHUkHiiYj1qGOcb3LVF+RLtUQO5v6Gk6xGEoqIBEgy9CPtQ9EAdqqcgz8d3SCgkovyIyciLMBQ1oWtH2GCuMRIKPlfiORSKdIV7+B99LyqSkVdjKxIMi1Hi8mdu/E3cvl9E3a6PQknbsY32KhIXBQUF5a7d1VdfzdFHH02/fv32OFnaRIVh7F+YsDAMwzCMGpKZmUl7JBC8qPgBykFojPIZGiOjvwGqaPQlEhWDCT0PuW5/SSjXoh2alS9AydzHE3bOTkTN7nqiErC7UG6FT9T2vR68uJiJKk8tR6IgQOFEOygrKopRkvhzbr+tkDekMaoe9SIqeXsk8UWFJ7o6U9OmTdm+fXuF1y89PZ25c+fSr1+/750sbaLCMPY/rI+FYRiGYVTA0qVLSU5OLrd8x44drEMlUd9AoqIYiYMZwBco5GkFMuIPBW5GXoaGSADkIk9EEjL2vTh4BSVKn+32l4RExTKUO9ESVVgaj5KiA/d6Q+RdKEGds18CDkd5DNuQOBiDvBNNCbtxNwL+i3pMbHSvf+zGPQ54yB27PxWLCihbnSkrK6uSNYUXF5saNKhRn4RNDRqYqDCM/RTzWBiGYRhGHJYuXUrPnj3j9qXwrEdhPM1RdaZ3gDdRKNJxqPpSHupFsRJVcuqLhERDJBIaEYYyfYaqQx0PnIqM/wRCUZGNStGeCHwK/NWNYzjyYDREomIcKknbyI2vBImYR5CAGY1CrLyouN/9/yDwAfJwZCGvyTx3jIfdWKK7TXuicx6ys7NJTU2Ns1Z50tPT2bVrV7XW9dR0fcMw9h3msTAMwzCMGLyoyEBJ1L6zs2fFihWADP9FyNj/Bwo9OhKJiBUod6Iv8mh8iqoqPYZ+fFuiHAafWzENJXqfivpZLENeg2WEjeHeQqJjLOo6vQ6Ji0lun28iUZGOhMVc5PH4DoVX+epPz7llLyNB8B3qY3G+O3YO8njMA7766qu43aY931dUGIZx8GEeC8MwDMOIIlpU/ISyVYsikQg5OTmkpKSQjnIs/oeSnjeiEqrnIDHxOjL0z0OJySuQKJgOvADchTwY24CvgHeB090621GFqRko1GkpEhUnoLK1vmlcMTLq/4EEzgxUsamXG9dwlDg+HwmRzu4Yj6Pci/+hcrcXIVHxpDtPn/vx1VdfcfTRRwPxm9+ZqDAMIxoTFoZhGIbhiCcqoGzVIy8qfJ+JXwGfoFyGdsgzcDbyRowHPkd5C6NQ5aV3kLB4FLgQiYq3kSAZjrwUoPyK5cjj8C6qLvUTwr4UecBIFAL1MPIwnEcoKo4GNiOPx8XAmW6/vwf+nxvbVuQx+R8SHt+3OpOJCsMwwISFYRiGYQAViwrP9Sj06FXkVbgQJWjvQCLjEGSkbwC6o8Zzq5AX4ERCEXAaSoJ+AXkcslFDvBHIO+FTxRPc8lXueP8DnkZeEZ+30Qj4EfKGTEGiI1pU/M/t90dR5+FF0v8hL8v777/PkCFDSqst1bQ6k4kKwzA8JiwMwzCMek9FoqIEhSo1Q5We+gMdkMH+TyQ0LkAlZBcDS5Cx3whYiJK3b0EejdeRxyIJeTQaAM+jfhGLUH+K4e6Yye5RgpK6fU7GVCQ+fMfqQ1A52G9Q+NVq1IyvIlHhuREJoseAIUOGkJ2dTWZmJkElJWKjCYKApk2bkp2VZaLCMIxSTFgYhmEY9R7fl2IoZUXFPNSEbpd7pCPPw2Goi/a5KGehEBn5iW79L92yHm6fuSjM6ES3bQSFPkVQInVblIC90+0zAQmI1W7bQ5AXoiUKYSpCoU1PIc/HESgMqiEKvfofypmIJypAXpABwDXAs0CvXr2qXfLVU1mfCsMw6icmLAzDMIx6T2FhIZFIhHdQYvP1SFSsQ8b8ctT0zs/Nj0RN5JqgsKQi1KiuHfJILEeiohsKUZoDnOVeD9wD5LmYB3yNvAvvIG/HWUhULAdau301R2IBJC6mu+1w400CJrp9HIPCp+KxCuVddAJOcucX2zHbMAzj+2DCwjAMwzAIcwd8VaQjUSJ0CkrIXoMSm/siYXAoynP4Dhn9zZAHogsKl2qMRMVE5F04zx3HF60NUM+K6SgEajTqsj0ZhTJ1p6yo8FzgXn8ICZpDUFWon7t9HYY8JzcAV1G270S0qOjuxhLdMbtZs2Zs27atZhfOMAzDYX0sDMMwDMOxcOFC5qPwohwkKkA5DS2QsJiPRIHvC9EceS5K3LoJaNbOi4ozkKgojjpOxL32IgqVuhZ5KFqikKe3UPft7u640Sxzy092xx7plv3djfNb1GH7S8r2nYgnKqDmHbMNwzAqwjwWhmEYhgEsWrSI3r17kwEchXIiDkEeApDR/g4SFiXIm5BE6KkI3PKGqH/FROSJGI5EyiokVDqj/IzxKOn7MsKwJ+8JSUXCosgt8/svQuJgOuqMPRSViW2LBMRu1PF7HfJA+KZ2O1BORTxR4ftQLF68mJ49e+7BFTQMo75jHgvDMAyj3hMtKn6CyrqegrpY/zdqPe+5WIhKte5AoVDeUPfdrychQXEG8gQsQV6NHORNGI/K0V7hXl+G+lREkHD5AAmMFkiseHGRiErLjkPJ5Peg6lJXIQ/GHCRebkVhXA2RuHgM9cswUWEYxt7EhIVhGIZR74lXFepWlGz9Cmok5/HiYinyCuSjClAJ7nkiyos4A/W0WI08BR1RWdiHkYi4DImT1e7/FOTVmIISuBehErW7CcXFG8iTMhAJhN+5MY0DPgZ6Iy/IzcBtSFxE3L6eBZ5w65uoMAxjb2DCwjAMw6j3fPfdd6xDRvtjbtmDyJtwEdAqZv3TgKbASmS4FyBR0QxVd1qMjP1VKJm7Fwpjeh31xdiKvBYrga6oetS7wIeoUlNnFGY1Dnk/drvn54AhSCDcgMTESDfWjsDfCL0SNyBx0ceNeb5b705MVBiGsXewHAvDMAzDAObMmUNGRgYPol4Q24BRhDkW0XyNQqB6oYTr7Uhc+OZ1O5FwiAAnuOeuyEvxMDATeTyuAo5FAuE1VCL2PPf/q267F5BQmY76YFyDPBij3XEeRGFRzxOKCl/S9noUtpWLRNI8KO2YbaLCMIzaxjwWhmEYRr1n7ty5ZGRkkIEM/ulIIFQkKlYjUdENGfAtkbeiAIVGHQtc4tZ9w21XjITKzSgvIkJYuWk86mlxPsrbOAV5IhqivIrPUFL22W7/uOfTULnY1ii8KVpUADwDvI+ExLRp0wiCgPymTU1UGIaxVzCPhWEYhlGviRYVhagi02VINPjSrp6vgbWE/S0CZPjjnpORMGiKhEFjlHNRhDp0N0BehESUIF6AQpSOB05F4mQXSvJujQTGR6h3xgnuta1IpGx3z5e6Mb2JksG9XHgG+DcKeZo2bRpHHnkkgPWpMAxjr2HCwjAMw6i3xIqKZsCPkRdgHqrmBBIXX6Myrn1QPkMeqtzUAIVFNUCei+bIc1CCPAolSFzsRh21C1DFqN0oofpolMdRgATIGvc4BImXbkA2Ejl90Q93vttvCfAS6kMxlFAEPUN8UWEYhrE3sVAowzAMo14SLSoGI6N+GGGn6nTkeVgGfIFERW+UWP028FvU1wJUxWk38ig0cPtKQB6N4UAGMv43oG7dIJFxJRIsf0OiZiVK+G6HelNkI09EAurm3Q9VnDoMiZovUM7GciRCEjBRYRhG3WHCwjAMw6h3RIuKn6Cu1Yeiykxj3TrzULhROxRq1A6FGb0JTEaei9eBqUhMNEShSQtRXsMSt/x9t6/BqNJTQyQ4GiFxcazb5zcoFyKCRMRnwAQUFjUdhUz5H+1Wbgz3AwtQQvZDwF2YqDAMo+6wUCjDMAyj3pGRkUELwr4Vu5Ax/zkSGbnIy9ATeQK2Ie9BFjL4T0TVmz5A4gLgTJT4Pd/9v8Dt7wskKk5EYqIJquZUhATFe0i0fOe2S0VC4iVUDSrXPR5CouNGyvahWLFiBV26dCESiZRWfzJRYRhGXWDCwjAMw6hXLFiwAFCY00OEXoh1qJTr88BTaPb/NLfNcuBJ1PV6JErEzkOCIUA5FGtRiFKi2/c0JA6ORaKisVteQpjU/QgSGeuRYDkHiYmnkZgZgkKl5rnHg6jZ3XuUFRUAQRAQiURMVBiGUWeYsDAMwzDqDQsWLKBv375kIG/F2yhZ24dEdUThSlOBF4EfoCpPz6P8hkLUGbsPaoS3HeVl/A+JhEtQOdmJqKFdMfApClu6Bv3o7kLlZZ9Gid6tUfnaD1BS9hLkzTjf7evnKNRpU4MGzNu9u7QPRbSo8ARBgGEYRl1hwsIwDMOoF0SLip+g5nTtgP+g8rHtkRfiCtQz4v+A3yBxMRgZ+segUKn7kNfhHCQ6vkQiYSaq9DQbGIS8IOvdMZq4ff8XGIM6bPcDXnbLC5EISQAuRKLiKSR+1gPBrl0AHHrooayYMaOcqDAMw6hrTFgYhmEYBz3xRMVyJBRaoSZ2bwEXI29EH+SFeAeJjHOQMOjmth8D3IvyKN5HYUvXoYpO/0O9Jc4CNgIt3LpjUBjTp26fR6FQqcGoOtQW4Ah3nNORqHgA5WxEeyI2b95ci1fGMAyj9jBhYRiGYRzUVCQqVqG+DycjcfEcCmFqg/IjNqAwqfYoPKkpkIaSuZNRmdeXkCcD4GPgpygXoh1KtG6HwpruBv6EvA9DUXjTj4Eu7jlAIVg93HEfQAnasaLCMAxjf8aEhWEYhnHQEisqrkY5E6uQgOjs1rvYPT+HBMAu1LRuMErYXo9EQlNUvelYFAr1AqrYVACMQqFNu9z+l6DE8ERUdvYvKOSpG/rxvRDlcTwNXI6Sv79Flaeex0SFYRgHHnXSx2Ljxo385z//YdSoUaSmptK4cWOaNm1K7969ue2228jJyfle+01JSSESiVT4GDRoUO2eiGEYhrFfk5mZSXvCsrKgUKMuSBCsiFr3ImA0Eh6noXClHUgcnAp0RcnaSajD9lHI4zEdlZo9CzW42+b2v8HtvwT92EZQg70Gbtn5KGTqXZT4vRzlaExCSeR9gUgkUnsXwzAMYy9TJx6LO++8kxdeeIGEhAT69evHiBEj2LZtG9988w0PPvggTz/9NBMmTGDYsGHfa/8XXHABycnJ5Zb36NFjT4duGIZhHEAUFRURiUR4B3icsKt2N/ec4559GvRFSITkIVHRBnXELkaeh1ZIXASoed37SFRchkTFaiQ2OqME7jWoBG3EbV/itvVC4wLUsXscqgj1hTueH+8DSFyY58IwjAOBOhEWbdq04Q9/+APXXnstHTt2LF1eWFjI9ddfz/jx47nkkktYsmQJrVu3rvH+77vvPlJSUmpxxIZhGMaBiu/v8KD7vzJxUYSM/4Yo7CkBhT6VEDa3a4DCl8YhT8avkWdiDfJqdHL7zEcio5nbb+C2TSAMFyhCwgRUHSoF5WjcgTpog4kLwzAOHOpEWDzwwANxlycnJ/PUU08xZcoUcnNzmTJlCpdffvk+Hp1hGIZxsFGRuPCeihyUJ9EUGf9tCPMpipGoSHTrvohExcnA75CgWIWESlckMlYjkdAV2Or2VeS2b4hERpHbd0OU49HA7Tsd+IpQXCxCYVsNGzZklys5axiGsT9SJzkWldG0aVPS0tIAWLVqVR2PxjAMwzhYCIKgtHv14ygECSQuSoBslDydiISE75Cd5P4uQj0onkLlaP+MEq7XoNCntkhgeFHRxa1/E2qQ18Ad04dDFaNwqMZuHBegvI3NSGh8BZyBumyvB7KysmrzchiGYdQ6+11VqOLi4tLk7fbt23+vfYwdO5bc3Fx27dpFhw4dOPnkkznppJNqcZSGYRjGgYj3XHi/+XUoF6IIeSm2AIegH8cWSGA0QALgDfc4AgmQqW6blijZej0SGSlIVLyGOmx3RB6OYuA8t6+I23cxyuVogsTHdJQ4/iPk5fB9LObOnUt6enrtXxDDMPYq8+bNIzMzs0bexoYNG5KVlXVAfub3O2Hx1FNPsXnzZpo0acIZZ5zxvfbxxz/+sdyygQMHMm7cOHr27Fnl9jt37mTnzp2l/+fn53+vcRiGYRj7H15cPIf6UuwEWqOwpzQkKAopKwB2oGpNnVAPihdQ2NIZSEjMdftJoayoGAwMQ523X3H7GokERZIbT7Fb/0WUs/EjFJZ1mhvTE0C/fv0sx8IwDjDmzZtHv3791NfG5UllZ2eTmZnJ7NmzyczMZPv27WW2iUQitEefeT+h0KRJE7Kysujfvz9ZWVmkpqbWyflUh/0qFGrOnDn87Gc/A+C3v/0tbdu2rdH2Z511FuPGjWPJkiXs2LGD5cuX85///IcuXbrwzTffcMopp7Bx48Yq93PvvffSsmXL0kfnzp2r3MYwDMPY/5g5c2a5kq2RSIS+KGk6WlSkoJyIFqgB3g6UgF2MPArnoDCn91COxonAFGAZKi0bQSFRXlSc7h7LgB8iofAiCqX6AuV1JLn9PYd6YHhRkeuOeZvbRzs0i2kYxoGBFxUZqKy0Lx+dlpZGi+++o1evXrTcsaPM91MkEiEduATIQOIiEonQ+rvvSEtLo+WOHfTq1YvFixfXyTlVh0hQwymQn//850yaNKnGBxozZgwnnHBCha+vXr2aE044gRUrVjBixAgmTpxYa/W7c3NzOfLII8nJyeH222/n3//+d6Xrx/NYdO7cmby8PFq0aFErYzIMwzD2LjNnzuSII46gHQpT8p6Kvqg87JGUFRV+Cilwj3zkuWjsHgmoDOxkJBJORCLiE+SVOAxYikTD6chbsdQtPxTlbvwe+AwZDoOQuHgThVdluvUORaKiLfAM8C9gDhYOZRgHCtGiwjfmfBy4H1gHDAA2AScA/0PhjqDCDbeiiYuxwD9R3pZf/0TgU+Qhzc7O3meei/z8fFq2bFktO7jG0x9r165l0aJFNR5UYWFhha+tX7+eIUOGsGLFCk4//XRefvnlWm0K1KZNG+644w7uuOMOJk+eXKWwSEpKIikpqdJ1DMMwjP0XLyoyUF+Ktwk9FTcBPVBYUjxR4X99WrjXfVhUExT6VAK8joTGLUgwvIuqQk1FJWZPQaKivdtPA/faOnesKe7v/6Eu3j9HDfIWIy/K8ZioMIwDkXiiIkCTCd8hcbENVX3rgb4vHkOeUS8qQA00dwJ/R99Bt6JQzTTk9ezVq9c+FRfVpcahUM8//zxBENT4MXz48Lj727hxI4MHDyY7O5uhQ4cyceLEvWLU9+nTB5BnxDAMwzh4iRYVP0GzfodBqai4GYmErW65LzkbLSpACdq+mV0emolLRN6I85CYmAxcDwxBs4hHAxtRxahWQHNULvZVt+7hwD+Q8PgMCY9NqNleDzeefNSJ20SFYRxYVCQqvPfzB+h7aDiQiiYjOqEqcOnIWwn6vtmGvkP8+mlIhPQHrkWhk/tjWFSdBmxu2rSJwYMHs2DBAoYMGcKkSZNo3Lhx1Rt+D7799lsAmjdvvlf2bxiGYdQ9saLialT5aTvKVzgTGf4RVK2pGfoRb04oKvJRF+016Ic+QNWiGiFvQyIKfUpAVaJAYUutUYhUW+TRmAQcg3IsfKL4EJSnMRq4AomJscBLSMh0BbJQFam5mKgwjAOFqkTFx8CH6HuoCwp/ykXfJ6OQ5/J15NU4FXkz30d5V73Q98EyJC5A4mJ/9FzUmbDYvHkzgwcPZt68eQwZMoTJkyfTpEmTvXa88ePHA3D00UfvtWMYhmEYdUc8UfENynk4G7gBJUbnoTKxuehH39MSiYGngYFolnAtClHoQdih24dNDUHhTo8jUTEQCYQ84CTgS9STYiVqprcY5WMMRHHTg1CI1HVu3QeQcfElJioM40AjMzOT9ij0siJRcRGafPAsBnojsdAdhVm+hsIiV6AKcme5dXPdsn7o+wj2T3FRJ1WhcnNzGTJkCHPnzmXo0KE1EhVDhgyhd+/eTJgwoczy119/nenTp5dbv6CggDvuuKM04fzOO+/c8xMwDMMw9iviiYoAzQwegX7YX0D5Di1RiEEL5JXId4+XUazzKtScLhvogLwaLdy+ctzrjZBI+ADNMF6MjIDjUaWn9u7/FchbciRwJfC5G4vvXQHyWHwNzEPhUiYqDOPAY9euXaxDBR4eB3ahcKbtSFRkUFZU9EYipDdhU85hwIVoMmIkoahYiL6r2iEvamfUb6c/KhLRDgmb/YE68Vhcd911zJ49m0gkQps2bbjpppvirjdy5EhGjhxZZtnSpUtZsWIFeXl5ZZZ/+OGH3H///XTp0oWMjAxatWrF2rVrmTVrFlu2bKFhw4bcd999DB06dG+dlmEYhlFHHHHEEeVmCzejxncPIrHxkFv3cvccQSJgM/AR6jOxG/g1EhCvoDKRA5FA6EIoLj5xr29HORsZbvlwJEamIC/Jk0ikTEQeikEokXw3cBzykPhcihkzZjBw4EDmHqCNsQyjvuMrzz3o/r/EPQ9BYU1TCMUChJMLIHHREBiBcrB87aWFKGzqUOQJBX0ffYs8su/hqt7F9MOoK+pEWOTm5gK6AS+//HKF66WkpJQTFhUxcuRICgsLmTFjBtOmTSM3N5fExES6dOnCD3/4Q26++WYyMjJqY/iGYRjGfob/QfezhecjT0IrlD/xJEqyjhYXzVCexb/cNknAz1BCZRPk0n8TlZrNIBQXK1BllwbAr1CZ2CVo1rAVmnVsCfwXiZeRSOC86Pabg/peFCGjwIuKAQMG1Kg7r2EY+x/xxIUXBBPd81nlthI+jKgqUbEciYpn3Ov7U/PMGvexqI/UpH6vYRiGUXf4krI3oITIH0S9tguJiwXAj5G4eBr4KwpZuA55PAqAPigPYwJKojwPiYv/Am+hZO4RKAyhEFV2aYNETBLKy/jYrX8KyrG4H4Vb+dqEvr+GFxWGYRw8+GZ3tyJxsY0wIXskFYsLz/4kKvZqHwvDMAzD2F/xs4VPuP+vQt4D0A9etOdiDoqH3kiYVJmHEigbIpFwgdt2AkrU/hSFMd2ESsPmutcTUQJ3dGjDyUjEvEUYblUQM04TFYZxYJGTk0P//v3ZunVrpetFey52o6ac1fVc7E+ioqaYsDAMwzAOKqorLt5AORJNgdnIw3A+CnFa5tbvRCguPgZ+iLwZq1HYQneUdJmDwp66IpGRiOKpZ6KwhvEo7OoolFfRuHHj/dIoMAyjYnJycujWrRvtkEci9jO8cOFCMjMz2blzJxB+Fz2PvJvtKS8uznTPvtz1DlQtKoEDT1RAHVWFMgzDMIy9SRAEzAeeQD/E0eU+vLg4FHklhqEf9feRZyIBJWAvQwLCey7uQlVYVrt9dEKhTKDu3auQAVCExMMEJCpmuNeHoqTv9UBWVlbtnrBhGHsVLyoyUFGHvkhceBYuXEifPn1oVVRUutyHQ12OcrW+QxMRJwE9kbjIQ8UmvExogprnlaDQqSI0cRGgPhf7s6gA81gYhmEYByl+tvBlNFvYhdBzcR7KkxiAvBb/D/3Av+Rev5hQXOD+jiDPRgtUIrIzKgG5HImMFGQAfIQMgrYodKoZEi/3o9CohQsXkpaWVvsnbBjGXiFaVPhy1o+j3jORSIQFCxbQp08fuqDPfRJlcyxuRN8vm5FH9EtU8OFcNHERLRMi6PsFJCK+RN8ty936W4jvLdlfMGFhGIZhHJT4H/YrUAiCb4aXhfIreiNRMRolYntixcV89KO+FkhGoVCdkQHgn5chcbEMGIMqS21EDe9mA3cjEWKiwjAOLOKJCpBYAIkLLyoykGfyN8ijcWvUehEUIvmRe5wHnOGWR3ss/LrR4gL0neJ5hv1XXJiwMAzDMA46YmcLQcKgADWrK3T//4SyomK0e/biYjCaadyEREVfQjHhgyA6uecXUDhVRxQnfRZwAvAsYSUoExWGceBQkajwXI++S55CleBuQRXnUtB3z3UocXube3yBvJnnAqcTfoccTOLChIVhGIZxUBFPVGxGP/DJqFdFe5SI3QGFL6VEbT8aWISMgE5olvEQoBcKc9jllkXTAiVWrkVCJQ2JkMVoZrI9YdjE/mYIGIZRnqpERQn6fI9AeRPPoepyXVHVuOuRUChEouJT1LfmPCQqvOBojPK6DhZxYcLCMAzDOGioSFTsQMb/BtQJ+xQUppSARASE4uI5FP88EomDXCQqOiFRUezWixYXTVBC58Oos/ZxyMvRHoVHZLr1TFwYxoFB//79aY9CmyoSFQkon+JqNBGxDomG85CggLKiYiQKfwpQ+eolKIm7J2WrKeUTNsmLoO+fPFQcoi9lxcVY9q/vFKsKZRiGYRwUVCUqWqIf6OGoDOwsZCAkIXGRg0TFFNTTYphb/2SU+J2AxEQjJC6K3DGKUbWXAW4bXwmqG/KI+FnIG4HbKF9NxjCM/Y+tW7eyDvW6eTzmNd+/phHqXzPJ/X0M8LXbpgDYioTFe0A/JCpK0PfNgqi/l6Lvk+3o++dnwFR3jN1IxKxFvXPaIK/IIcgzOghVuNtfvlNMWBiGYRgHPJFIhPZIEFQkKjx/QcbAWGAaobh4EhkIw9EsZVMUOtUo5liNCMXFd2gmcbF7XILCIGYAryBDIdcdw4uL01HX7eTk5No4dcMw9hJBEDAPeJCy4qKBe26ESsa+C4wCXkae0DFuWYC+W04lbMiZjRrgNUdlZ9ujMKclyNv5KhIOE5HIWIJESDNUOCKCSluvROGXi90xAJYt83Xs6g4LhTIMwzAOeGbMmMERRxzBe0gwnEN8UQHwNKrYtAV5KEAiZCFqkHeeW+YTLpvFOZ4XG/koPGGt+z9C2FBvHDJARrn/26BE7vdQL4sl1svCMPZ7ojtogyYHWqNQx9dROdhL0YQEwF+BXyJxARIVA5EH437gaDS5kI6+P5qjEMvxyJN6AQrV/BB4FDgCeTr6Ig/IXPSd8xUKv7oUCYv7gR49erB06VK6d+9euxehBpiwMAzDMA54BgwYUCou/olExUXEFxWPow7YX6Hch3+jBMkfoRjoRORhgLBErRcXEeShaAHsRDOHG932JYR9Ly52677g/h8F/BflYMwFlixZQo8ePfbwrA3D2BfEiovr0ATBF8hLOQx9/hPQZEK0uPjOvb4TmIcmMY5HgmItmtBYikrQbkKTGQEKgZqPJiGORt9lG1Do1DfAdDQR8iN33ATgX9S9uDBhYRiGYRwURIuLR5Bhfzn6AYeyouJht+x4ZPD3RLOKEcKwJR+o5MVFMgqVeg04E+VfrAd6oByMXW775W79i1DYwyfoR/95JCqmTJliosIwDjCixcVW1A/nSOR9KIlaz5eivguVlX0EhSt9ivKutgEPIRHRAX1fvI48E18CTyCPxOeooEQ+8BjwLfqeWYZETV/gBsKcBp9gXtfiwnIsDMMwjIMGLy7mIfHwPApBiCcqHkd9Jzai2vLjkTjwtVUiyDPRDP24v4Lin1uhEKrXUTJle5R42QiJjS7IWHgK5XAcCkxw2/UCzjrrLBYt8rWoDMM4UPA5F5PRd8kslGSdgMSFf0xAIZWbUQjke6gnzli3PAe4zy2fj74z3kbiwa9/PPoOuQT1wfkX8CZquHka8sqOoayouRqVxs1A4qIuci7MY2EYhmEcVER7Lh5GoQafoKTKaFHxAPIogMKb3kPJkedQtgFeC+ANlJh5HPrR/hr4mFB8tI86fg9keDxF2CfjJOAqd/x8IDMzk++++64Wz9owjL2Nrzw3GuVaPIYmHEBezF3os/8oqgKVg75PTgO6o++LS936LyGvxLHI8zkMhUSdB5yIPBINkOczQJMkX6EGfCOAfyBRczZhk06QuJiDPCKZmZkUFBTU3gWoBiYsDMMwjIOOaHGxGf3g3+pe86LCN5xKRx1z81ByNcgYABkCryLjIRNVaylAyZRN0CwjKIfCi4txbj/foWTM5sij8TqKo14PBCYqDOOAIl456x+551dQTkQREhunou+ELOQJbQakunU6Ia9CL+BvaNLjdOSZWIS8He3Q9xKEXbx7o8mKTW6d6UB/lLsREE6EPI6+l9YDwT4WFWDCwjAMwzhIiRYXoFCCOehHN1pUeENhN/B3yoqLV4EXgRPcYz0SEOvQj/oWJBZAhsTHqDRlW+Sl+Br1wXgPeU58NalFixaRlpZWuydsGMZeIZ6o8PwITVzcj0pUn4SEQg/3AIkLkNdyN/oeSAQ6osTsH6JwqBRkmL/q1h+Jwqwao++j+cjT0QxVjzodeTlAHpEnCCdN6qphngkLwzAM46BlwIABioueN49+/fqxCYkDoJyh0AA1pvLiYh6qvjIIhUBtcNt0RzOL81HIAkhcrEEJmvloVvIb4HpkEHREP/oN0axm7969WbhwoYkLw9jPqUxUgCYMIsjYPwVNOKxHYZa9UOUmKCsuXkf5Ejcg74bvi5GAQqKgrLiIoKIR7yPP6hWoMpVnKSpC8Sx1KyrAhIVhGIZRD0hPT2fu3LlkZmbC7t2kAddS3lCIFhf/Q6IiBZWFzECiYhcKiYog42Go2/ZjlEjZGCVb/hjFP28BzkKGxxMoORNCcXH44YeTlZVF7969a/28DcP4/lRHVLyPch9uQd2wdyCP5UK3Tqy4WI4KRpxL+YpSvmRtrLgoQSFOG91YokVFZyRS/oNExfz586lLrCqUYRiGUS9IT08nMTGRdig8qS9hadhoEtCP9/+hH/T7CUtD7kTCIhElevdG5SOHovKSRyKPxMVoprEAiZXmKKnyx279du74vXv3pk1REX369GHhQm+KGIZR17Rq1Yr2KPE6nqhYhLyTnyMvRQvU1+YwNCHRAYkLXyDifJRHsRQJijPc8mhD3FeVAomLfiiv4kHiiwpQUYkpbt0MoG/fvixYsKCmp1trmLAwDMMw6g1ZWVmsR8ZAFvrRjxYXAUq6LkGhB6+inIrPUG7GbuTqb+jWTSMUF9PdPn0iZq5brw3qjLsNuIxQXOxG4mQkMgpMXBjG3mXBggUkJiZWa91Zs2axDlV5ejLmtW/d82HIKzkBVWjqivIstqK+OPHExb2oEpQXEL65ncf30XkT+AB9j/jJi1hRsQV9RzVH3pChaNIiMzOzWue4NzBhYRiGYdQbUlNTyc7OZi7qRREtLryo2I3qzb+Aki/HIg/HJOBDwpr1uG16IY/G2yjB+59o9rKBezRFwqKEUFwch0ImLkWJm9di4sIw9iYLFiygb9++HFJcTCQSqXL9lJQUQF7HX1FWXPitE1Fe1UpUqGEDobDYSnlxUeC2SSYUFBH0PZFA2EPnLRQ2uQl9N/zKHWNMzBhbAxe6/V5MWA2qqKioyvPbW1iOhWEYhlGv8OKiV69ePBfz2qFIUDyFQp5WIQFyLjIYJiMD4HS3fgKaqfwEVYe5jrADdwOgECVbtkQ/uMWoxv17qMHWb1Glqv7AbSjkoU+fPixYsMByLgyjlvCiIgPN6r+N8icqS3L2+RXDgHeRcQ8qyNAKNb/7LcqvuAF5Hye6dY5GwgIUHrkZhU41RmKgGeW9FKDvjKkoETsXhVRdG7WeTwCP9lxcjDwXdV0NymPCwjAMw6h3VCQuJqJZwQRkPOQgj0USCnloiCq6gMTFWyjG+XTkeYAwVKoZMhTykJHRGDW5ehzVtP8vsAKFVXRGFWUaorK4Ji4Mo3aIFhU/QblOvpdNReIiNmn7aeSJjBYXf0Wi4gIkLFq41ya654HI4J8DbEdFICoSFb4PxVvIM3qWW/YKmuS4lvLVpby4WIFCMq8CnqnknPYVJiwMwzCMekmsuFiKKjstQs3vegC/RvkYPgThbPf8Ogpt8DkVPyQMj/AEhMZGHjISnqCsqFiORIWvd3+1ezZxYRh7TjxRAWEydqy4yMnJoVu3buUqQV3jnr24+ApVgxrm9unb0J3lniegkMrmyPPQDX3Gm1JWVASUFRUTCTt170YTE15IxBMXw9B3SBc0MdEizjnta0xYGIZhGPWWaHGxCcVIg8rK/hjNCp6BQh6ixUUJClkYRigqooWFNxhA8dTjUYx2T9SZO56o8FyNZjk3oSTMnTt31sq5GkZ9oiJR4YkVF8uXL6dbt26koT4RsZWgosXFOJQj9Qn6XhiKErcBhgOzgYeB0aj6W3ckKgLCkrLRoqIQiYoe6PumCBno0UJiN/KM+GXjUN7FVW7/8c6pLsSFJW8bhmEY9RovLryo6IoqNV2Dfsx7AX9CImAM8AaaHfyley6M2V+0yAhQffn/IBHiRcUyt78uhALE8zhhEqaJCsOoORWJit3II+m5EeU29YVST8WPUP7TYrfOIrcd6DvhTsLJgEKUvP0R+lwXoNDID4BjUN5FF+S5aERY+CE6/CniXj8dNdV8ComKRPfaBSiP4k3kyQQ4DzgCmOHWi+ZGt692QPPmzatzuWoV81gYhmEYhiMdiYUNqALUqZQVF6OBF1GoQjL64c932zanrLEQQfkTL6DY6ntQzsYqFG99KKpNn4xCGCKEsd/7QxKmYRyoZGZm0h55EqJFxf9DidhnAH9xy29AoYoT3Lo3oM/pMuDfwJduP/eg0KRrkADIQ0Z8e8JGdg1QYYaTUcW3Q1CluR3o+8F32I72WhShog6D3LoTkHHucyiKgDPdNi+6ZanATCDTbRPNWDeG9cDSrKzqXK5axTwWhmEYRr1m8eLF9OrViwzU5O4BNOP3ChIXIKPkBTTreAlKxN6JfuybIXFRgMRBdBhUG1ReNhcZNatRUvihaLazsXvOR6LifsK+GsuXx2vfZxhGVRQVFbEOeAd9rryo+Bx9tj9G+VMB8kwMRKGKN7jtU5CBPw2Jis/d9rtR48z/oZLRv0ahSBciT+Z7wPFICHRCYiIZiZACd7wG6HuiBAmYD1G+VqLbzyUo32IMoeho6F47GomYvyMP6PXoO2WZG/dY5NWYAyxdupTu3X2Q1L7DPBaGYRhGvSbe7KaPY34JGROfImPkR8AIVD4ykbD53S4kDiLIkIj2NVzhnp9EYmQkShRPRYZHIao68yiaZfwRMlC6d+/OsmXL6NatW62er2HUB4IgIBKJ8G/U7HI7cBMwClVPGgPcjMKMeqCQJc9vkEfgRpRDNQEJlKHoM3o48nisQx6LRkBb5HXoSziB0Ivw+2Er+l7wnotlyDO5FX13NEYTDhe4MYxHouIaZKxPQSFXq9F3TCskgCJoMmIS+h6pS1EBJiwMwzCMes727duJRCKls5s3Inf+cDRj+P+QiLgZCY5cZAS0QkZDMQp18j0roLy4GO2en0RC5UIkTpJRTPZY9//FwH3As2jm0cSFYXx/Fi1aRFpaGg1RrsUot/xK5DF4HImCB6K2+Q0y4K936xWhnIbvkEcRFBaVjMTKGOTB+BHKqViIQiDnIwHRBVWZg9BzsRNY4P733osF6HunN5q8KEbeiQYoX+IhNCFxMfKSjnPbXowmIp4C5lK3ogJMWBiGYRhG6ezmg+7/y9GP/RxkeIxG1aByCXtceFGRiH5MWyEjJJ+wrKQXFwmEnosxyFi4ENW5f869/lcUcvEKMmjAxIVhfF+ys7NJS0sjA4U4nYw+b63RZ9v3nXkaiYk/U15UgD7/24DByPPwHGpk+XckEj5EDS5HoM97R1QVKht9f+xCHpFmyAP6HGqa1xt5NFJRA813gHluH13RJMYKt/4ulI91HvA39P1xNwrPXIxyPtL8sXr0sD4WhmEYhlHXeHHxAPIqrELlJG9BP+hbiC8qGrntl6Mf1ZaECYw+58J31o0WF6Cwh7ORmJiHZh9fdq+ZuDCM70e0qPBVoZahiQGQuGhIWXFxDvqMR4uKAE0UFCIPxTWo0MKjwC+Ae1HOxrsox+Js9L2xHoVJbkNVpRq55/8gEZDt9nUpYQW501E1uPnuf//9sxx5OK4kFBUgYXG3W2eg+/tnKLQqMTGRoqKiml20WsKEhWEYhmFEsQpVg0lAguIKZFg0QvHRzVCVlwaEP6KPo67ao5Fh4qtDeaKrRV2JjIx33P6PdPvxTa8ykIFxKmUb5pm4MIzqES9vqjv63OUhkZBAWXHxIqrEFC0qdiGvwSEo/DEBhVNloZyHM5A4AHkj1qGJhyZo8qEPCnH8DxIA69DnfQUSI51RcQf/3XA68AVh4YjlwBrCBnteVHjuRhMSvd0xfDWo+XVQDcpjVaEMwzAMAzWUao+Mi75odvM9FMtciAyF5uiHsxHyahQjUfEIEhv/QfkSO9xrIIMhgVBojEfJ4EOAE5HRMgB11v0f6s59JppVBRlGQ1GcdWZmZoXjb968OcuWLavwdcOoL+zYsaNMVSiQx8J7FHei/AhfcemHKOToCuRd3ISExUTUEG8KMtjXIe/GR6gqVCMkEo5CYuI+VElqKxIyPdBExbtuvQuRgLkVeUeeRuFMPnAp4o49EYVTrUTV4Zajhnv+XKJJR987vhrU/Pnz6dOnTw2vWO1hHgvDMAzDAGbNmkX//v15D4VPzEPGxKPIADmdMCSiETIGxgCPoVjpl4DbCX/8/UyoD5UCiYpHUElKX1VmC5q5HIASRJ8GJqNykhFkiPiGeUF+PvHwoqhHjx51nrxpGPsD0XlT21EORBv0eSxxz7tQXlQimjQoRtWV3kQVmnJR/sTnaHKgAH0+T0bJ1KtQcYev3d+HAF+hSYjDUGWp8ciTOQx9T0BYde4hlHTdCH1fvI4Sydej74Xly5eTkpJSLgcsuit4dInZuhYVYMLCMAzDMAB5A7y4uB2VcvwpCkd4yq1zuntuikpWPgb0RF6NNShnYjcyPqCsuHgRiZRoUeE7cPu+Fle6fT+KRMrRaKaysoZ5kUiEdNS07z1MXBiGxxvkT6HP1WXo87kNlYktQt6LYvQZfQ19hjoB76PP1K2ESdfzUc7FWiRKHkNVoE5A4uJYJCCeRd8H05EH9JaoMW1BHk1f6GEL8lIuQR275+MmEWI+7/HExf4mKgAigbX2rJL8/HxatmxJXl4eLVq0qOvhGIZhGHuRSCRCX2QMXItm4H4NfOb+Pw15FB5GguAtFOawCoU//ADFPn+BEjR/iAyTB1Gs9CvEFxXJ7gFKDH3N/b0R11wrzs+1FxW3Ut7QMHFh1Hf856MzKgH7Q1T+9V2UaH0OMvC/Q+FO7wMnoUmFFYQiIwcJicVu/R2EfWyGoQ7YW1Ao4zDksXgSiYV0wm7aW1CY1CQkJnyidgfk4VhPfFER75z8RMK+EBU1sYMtx8IwDMMwHF5U3IbirXcig+IvqPnVGBRz/ShK5jwEeStWoaTNEjQzejeavXwa+B1KBi90xxhLKCpK0KxltKh4ARk4uSiR++8o5yMSiU4HLy8qQPkYP3Hb9ejRw3IujHqL/3z8GHn/GqA+FH8H+qEqTpORp+JtZKSfiERFd5SLsQz4L/JuJCBjvgPycESAU1CoVB/UgfuHSLTsRB6O36GGduehJO6tlBUVQRAwf/581iIPR1WiAvf6POQB3Z88FR4LhTIMwzAMyoqKG9yybchI2I28Fn9E4Qo9UXJ1PjJWrkM/qB8DZ6HKMXcTlo9NQuES21Fc9VVIjExFM6TXueO9gDwOy1Ec94So8T3gxuhDImJFhSe6kpSFRRn1kcTERNoj70EKEuqtkfcgB4mDnihJ+gtgAwpRTEGiYgYKdTwSeR2ao4IKn6Jys4egggoDUN5ELzRJcJo7/stIeFzu/r8fhWGdjsIqo0Mb+/Tpw/z588nMzCSoZonYIAhITExkflbWfiUqwISFYRiGYZQx1G+IWt4MiQtfPeYeZPS3QaFRbyLPg/cqDEGznxEUMrEJGTP3o664U1ADrQdRY6xhbj8JSHw8jOK3D6WsqPDiwYuLikSFx8SFUZ/Jysqib9++vI1CmgqRgD8DeSHeQaFQxyIv5EkoTLE1yot41r12CeqI/Rz6TB/h/h6IvBs9UWgV6DPsxUUCYfnoy5Fn4RXUvyKH8l6JPn361LjvRF31qagKC4UyDMMw6jVNmzalPTII4hnqzVDYk2+G9z+UpL0RdetdhUTBQOBcVFLyP8CfUH35/0M/tqe79Tu4/R2FjJtLkIj4LRIbv0fhVBfGjONG5E05HomKq1BIVgkKs4glXpnaSCTC7NmzadWqFTk5OdW5PIZxwNGnTx/++Mc/sgB5B45FoqIH+lyNRCFJ65B3cTPyTExGuRFeVHRG3wujUX7FBuRdLEHexy7oO8GXk05A1eKGAYejSYb7UZjVNuKLioMNExaGYRhGvSYrK4t16Md/LGUb24FyLCIoRnsM8jYsRAbD0ShM6hcoVGIe0BaJhiS3XTLydjRFoVSzgbtQ+cuFKKxiIBIbs1CC+C3I0IknLl5DoqKz298hqOTl6zHrPk5YpragoKC0JG1mZiYN8/Lo1q2biQvjoOTVV1/ld7/7Hb3RZ+hs5JHwacdnIHExHzXMOx7lUryP+lNciD5fHi8uVqDP6UXo++JNNOHgG2A2QIb1ZPRZboY8HJ3QZzRertTBRp0Ii/z8fH77299y1lln0aNHD1q2bEliYiIdOnTg3HPPZcqUKd9730VFRfztb38jMzOTZs2a0bp1a0455RReffXVWjwDwzAM42AhNTWV7Oxs5qDQoWhxsQsZDdGioilqXLUMGSp3oOpPA5F3YDWqNvNTFEbxJ6ArcDOqa38j8iYMREmhq5Eo+QX6UT4SCYeKxEVLZPS0Q8miHYBvUJiVFxePo7ApH8vtw6cuRomrP0DeFBMXxsHGq6++ykUXXUQ68iwOQ56KZsjTELjn05FXYiEqFduIsJN2W8IGl55h6PP2HkrC7uf+/pbw+yJAn8PXkOD4HOVfPI2+E3zzzYNZXNRJudklS5aQmppKcnIy/fr1o3379iQkJLBkyRKyXBvym2++mYcffrhG+92+fTvDhg3j888/p1WrVgwePJjCwkI++OADdu3axV133cV9991X4/FauVnDMIyDn8WLF9OrVy8yUGWlKwm9FU8TioqPkHHQAlWbuQIJggTkvchDFaJAsdqPIWFSgkTFNe41/+Ob5/YVQQmj/3LLp7v/H0ax3K+iRHIvKm5FM7GTkOhJcuNq5MYYKypuQo253kTCowTFni8kbMRlGHvK6tWrOeqoo1i/fn21t0lOTmbbtm01ChNq1qwZWVlZ9OzZs3RZVlYW/fv3pwMSCaehxOt0JOJLCMOVvHfhTZRsPQQVXmiN8qN2oc+Sb4bppcADKEejORIhI6LGNBl9ThOBL1G1qCdQmVncPmKrQh0I1MgODuqAgoKC4IsvvgiKi4vLvfbBBx8ETZs2DYDgrbfeqtF+b7/99gAIMjIygk2bNpUunzZtWpCcnBwAweTJk2s83ry8vAAI8vLyarytYRiGceCQnZ2t3xEInoKgCIJHIUiHYKD7/xkI2kPwEwgCCIrdcv/Y5R67ISiB4DEIjnfPgXuURL1eErP8JxB0gOC/btkTEGRCcC4EP3BjewKCtRAUuP3cD8FREAyCIFV2UBAEQYAb+0Nu/Xy3z4fd+RwFQTe3/vLly+v24hsHHHPmzAkSEhJK/1+1alUABO2i3oOxJCQkBHPmzCn9HwgOg6B5JdvEgvsMAsHixYuDIAiCWbNmlX520yFIg+AfECyH4GMIVrjP53fusSvq87cSgqVunXy37DsICiHYGbXeFAjOg2Cw+wx+F/Xa6xCMhuBad/wLIciN2m/g/l8Gwb8h6FuD861ramIH10koVHJyMoMGDaJhw/JFqU499VQuueQSAN55551q73PLli08+uijADz66KMceuihpa8deeSR/OIXvwDgL3/5y54M3TAMwziIiQ6L+jfwc+SpWAb8CM1ejkbegXfQzL//IS2J2VcEeQRGoPCqEbhGd27d3e45OijiCbffQ1F4FMD1wKkoX6ItoaeiOfKgTEQVp45Fs6ytUFJprKci2W3zOor/Ps4d+xC3fm2FRS1fvrzG3v0WLVqwfPnyPT62se+YO3cuGRkZHFZSQiQSYfXq1XTu3JkMNJNfUe+VtiUlZGRkMHfuXCKRCL3QZyMTeRWqChPy7+tLUL+W1NRUlixZQv/+/WmPChZkoc/CWNRXogtKnF6HPsOgz1+APIa7UB5UMfIuFCKvQ4JbFiDPxhhUYvpSFNrk9+U9FUciD2AJoaeiIWHlqNbo8zkCVZ87GMOi9svkbS84kpKSqr3N1KlTKSoqokuXLhx//PHlXh81ahQAX375JWvXrq2dgRqGYRgHHdHiYjwKWdiBhMZY9MO5FFV5eRBVkfFhFYF7JCDjxHfUTnXPhUhceBHiBQZIpDyIDJGPkEFTgsKwnkXhULcRioomqPrUWyje+zskLvzx+6BSl9Gi4jkUA94PVbXx4qItobh48803v/e1W758Od27d6eZSxavDpFIhOSCArp3727i4gDBiwovInpDqaj4CWoieRtlDWffJ+Z8FJqUkZFBL7f9r4GfufVjxUUkEiESidCsWbMypZb/SdgMMjU1lcWLF7MOCfObUYnXHSh0KVpcrCBMtM5H+RJNURno5ijccKt7LkHi4S1CUXEJ+gweQihMXkMJ2pehKlMN0WcvWlR4WrvjnY3yNtpB3In2A5X9Tlh88803vPTSS0QiEc4555xqbzdz5kwAjjrqqLivd+/enTZt2gAwa9asPR6nYRiGcfDixUVekyYEQcCKFSvKJHcnoGTrbEJx0ZBQXBQgEdGMsKN2Mqook4eMel+msoSyouJjtzwRCYe/Ig/JUGSMJCJR8Utk7JyIDK2RhOIiBTXYmw98ggymX6F8j0Fu/YvcODoig6k9cBhw5plnsmDBghpfMy8qKpuxjiV29tnExf5PtKj4Cerh0gWJVV+YAMLyyP590Be9R71nrRe671eh9+uIqPW9uIhEIjRGwrdo+3Z6oU7a8TrNe3ExD3kQelJeXByG+tCsRZ/RrcjIPwyJjYbukYiEfSOUoB0tKoajAgr+s5vszms18mpcjrybK1EOViz+8/82qkK1Hkrziw8G6lwi/e53v2PlypXs2LGD5cuX880335CYmMgDDzzAscceW+39+C+iLl1itWFIp06dyM3NtS8twzAMo0pSU1PZvn07oN+WFStW0LVrV/6FDJa3kfExD4kCUHhDvnt4L4FPzyxChkozZFgkuNfHIMOnEJWiBQmQF93yvigR9F1kLI1E5WqnoQRzPxN7htt2Agor2Y7K3r6HklPXoxnVHii0ayjqwXE/oWdlOBIiffv2Zf78+dXu6hstKn6CDD5fmcp3C48lttHfWCTcunfvzrJly+jWrVu1jm3sO2JFxRUoXLDQ/X8Kek/5Uq03IsP+STRT3x94BL33L3EP/xmJECZCP4A8eQXI2zcINZLsjTxxu5HHAco2g0xNTaUvStp+D31eliDhcb/bbiCaEOiOiib8wB17K/rMtCAUGO+gz1MBcAwKf2qFPvdeeJQgwZ+AwqFAXgtQn5nzCJtd5gG5SIA8ikIY586dS3p6egVX/MCjzoXFpEmTyii15ORk/vnPf3LNNddUslV5CgoKAFUJqIjkZM0b5efnV7qvnTt3snPnztL/q1rfMAzDOPiJFhebkKEeBAHz588nPT2dB5Hhch4SD40IjY+iqL+TkfAoRKFJDyPPQh/gBdTHwouKLshYaYhmcR9GRsoOVI72UjQzmuPGOBz4AoVS3YiExL9RKNUpKN9iNRIW65CR1A8JmhHIi9HFjaO64iKeqIDy3cKjxUW87uHRBqKJi/2PikTF5+geXow8ZlsJxcUq9L5KRMLxD0hEeE/FTpQP0RF5MLy42AX8DX3GLkbC4ghUJe2XqIzsTygrLj5CRv7lyJvohWpPt+0HbllryooKUC7ENuQJTHSP7Sh/qSfKd5oMfIXEBYTlaBPd89nu2YuLy9yzFxfPcPCLCvgewuLnP/85kyZNqvGBxowZwwknnFBuuQ9Lys/PZ9GiRfz73//mhhtuYPz48UycOJHmzZvX+Fh7yr333ssf/vCHfX5cwzAMY//Gi4sjjjiCYLP6Xfft25d58+aRnp7OC8hz0A4ZTUVIbOxGosJnDrZA9e9fQKICZPA/hvI3PqSsqADFhjdBxtQdSASUEMZwL0czrBuQgOiEjK3uSIR8g4yjc5ARNdctK3Kvt0MdiP1sa3XERUWiwhNPXMQTFR4TF3XPvHnzyMzMZNeuXSxdupTMzEy+/PLLMqLiavS+fQeJ2XORod0QeSDykacgAQnpy93fLyBP2XnoM7EWFSpY447dC4mTHsjblo08dj2R1+Aw9L6e6tb34uLvSKhfi/I3thGWdf4HCi8cgErKdkN9Zbyo2Ep5UeEfI5GQT0UCZ6Lb5hzCkrQB8jBGqFxcjELlbA9mUQHfQ1isXbuWRYsW1fhAhYWFlb7eokULBg4cyAsvvECrVq145JFH+MMf/lDtvhNegGzbtq3KMVRVreJXv/oVd955Z+n/+fn5dO7cuZItDMMwjPpCly5d2OxEhSc9PZ2+qGKUN4O9iPD18GPLkXRz629FlWT+jsKdXkOhTNGiYjcy5rogQ24wYfgSaHbYx2xfhgy9RcBiFD4yAs0KT0RG1CAkZBai2d/RqEnYCmTInYkMp9epWFxUJSo8seKiIlHhMXFRd8ybN49+/frRjjA/ph1KtPYVl65GgnYr8rK9h8TExSgXYpfbl+8239gtG4FEdwkKH0x023cgTKoOkCB/A70/b0TG+FtITNyA3s+vEIqLEuRJOA2J6E6En4tLUBjh58hjd407H+9l2Io+D80JE7obEoZmneWeX0ei4Ww3tjXoM9cNfSZ9dbeA+OJisft/IfIWHqyiAr6HsHj++ed5/vnn98ZYSrn66qt55JFHmDBhQrWFhW/ss3LlygrXWb16dZl1KyIpKalGFakMwzCM+ktFxnIEiYlGhCEbsdzo1nsAVcbZjEKZBlJWVPwShTwNR+EhnwI+BqAZMnamoZnho9x+mqOqT63demcgA+9VNNu8Chk9R6OO4GuQkbfBvZaBjLh/EV9cdO/enXaExmZl+OvyHBIxFYkKz9Uoj2UTkJmZaSHJ+wAvKjLQzP5b6H10KRKt3kPxGHA4sBG9TxchcQF6T25A7+kOSCDvQl6I5ui9ugJ9JtoiAfAtMvTbIm/dlyg0bzSqIPWGe4xEgrcEeetAItyL7u5IPPeOOqf/IvFwIxIB2whFxS4kzpsgD+I2t8yLCygrLsYjIXSkuza93Xnhlnsx4sO5lrjrttldn41IrB/MogL2gxyLePg8iY0bN1Z7myOOOAKAadOmxX192bJl5ObmAjBgwIA9HKFhGIZhVD0D72dBK+MG9/wACgvZHPP6YhQffioKcXobGXjFyADcgeK2Mwh7ZRQgQy45aj+B20cxMtTOQ56N2e4YPwDaoDCNIiRSoKyBX1RUVHre7ZDIeRslalclFm50xzysivVw+3vb7T8wUbHXiRYVd6CKYl2B55HhfRt6f85DoUU/RJWRvgJ+i94fryBDfSAy8Neh90435LVoiozt9ShcqRESFY3ca4F7fQryPoxAwuAd9L7xfV0S3D4vQp+vj90+ByER7d/7z7t9neMey5A474IETkP0+diGxEcjQm8LlDWQT0T5ERNRWNaphNXbEtBn3OeIRNDnMQsJlrFITNUHUQH7YblZgPfffx+AXr16VXubM888k8TERFauXMlnn31W7vVx48YBMGjQIDp06FA7AzUMwzDqLdUJ66lyH+75BmS8tUXiIprebvmbwCRUo7+H+/srJAjOQoJgKjKq/Gxw9HF8CdthwD3I0zAPhWc0QiEisaIi2sCPFhV9kWHXD81kP+DWrYpYUbEbiC1s66tJzYe41aSSkpJYuHBhNY5mVId4ogKUXP1jZECvRO/PPui98Boy+C9BnrC+6H33LvJY9AHS3N9L0cz+5+het3frFqL73xSJlG9QblGaW+8+5Bk4Bzg9arwlSISkoNCmy1Euxgz0Pt6GjPm30efiTCQKNrnj7CL83LVyf69Exv9cN+adhPkTvvfMULe/Bci719M9l6D3ajahqBiPxMvnyJNyFfp8HX744RXdhoOGOhEW48aNY/r06eWWB0HAa6+9xm9+8xsAbrjhhnLrDBkyhN69ezNhwoQyy1u3bs1NN90EwM0338y3335b+tqMGTP429/+BsCvf/3rWjsPwzAMo35SU1FRmf+9KnHxODJ65gNPoRniw5FAeAcZUOei2O6JSFxE/7hHd5JIco9maAZ3HjKMehOKiouQYRXPwPfnfQvKq7iJmosLz25Uvecy1GPDn2tloiISiXBIURF9+vQxcVELVCQqDkFG91XAdUhc5KD3elP0njoJvQ+nIZG7Ec3yv4k8BR2RSNgETEeCpB3yXmxEnrnVqATrFPc4C+Vq9EdC4TwkXHwBBC8qQMb+d0h0HIHCA0Hv3ffdPs5Fn50FKPeiExIZWwlLzL6Nqlu9gLx5a5G4KCIsHR1xxxrq9vsRKoTQEHkNWxF+PqNFRYo7p484+PpVVESdhEK98847XHbZZXTq1InDDz+cVq1a8e2337Jw4UJWrFgBwC233BJXWCxdupQVK1aQl5dX7rV77rmHr7/+mi+++ILU1FQGDx7Mtm3beP/99ykuLubOO+/k7LPPLredYRiGYVSXJk2a0B6Fa1RHVDxO1bkFPvHzBuRBGI96YyRS1tD2CbUdkdHXHhl1CYRlMCe65zMoKyo8SWgWeQEyntqgHIsAVa4pQb01HiS+qPBiKo+wd4avdPOA+7+q6+JFxZfIMPyf29dKKhcV6YQ9Cvr06cOCBQvo3bt3uXWNqqlKVIDeCyPd32OQwdwJvZcHIYN5IcoLao8SpF9AIUxHo/dpMQpxSiYMOSpGnoDD3GvPIYF6JXoPBMgL0pewEd1u93eEMD+iIXrvLHDjbOPGPhx5T152/yegsKSOyMuQiwTIFFSKeQH6TCQgIbDOve4rRX3nXvvIHW+1O/404CX02XvaHS8FhUGluH09iELC5syZQ79+/eLciYOLOvFYXH/99dx22220a9eOGTNm8Morr/DJJ5+QlJTElVdeySeffMJDDz1UZcfOWJo2bcpHH33EvffeS8eOHZk6dSpffPEFxx57LC+//DL/+Mc/9tIZGYZhGPWFrKws1iHjdmwV6/oZ+M+QgVHZjH7E7c8nei6gvKhIRwnZn6NZ2fNRbPdENIN7DqHn4s0KjpOLvBVJaPZ5pzv2Ce75KeAhVL62dGxxPDQtkSF3BjXzXESLipuBe1FPhOqIiluBfxJ2W+7Tp0+5CIb9gdmzZ9fYholEIsyePbvqFWuJzMzM0kpPFxJ2kW7lXi9B4UnvIwP/OrfsVhR+9CZ6r0xFosKXOG6PREcRek/+AOVUfIf6RcxDuRXdCcXD0cAs5AVoj0rDNkbvhxWEgsILDF/W9l1UjOBc9N4P3LqXIVFwP8ohykTv+TVIXDRyx3oWFSr4M/KuvILExndIMH3rziNAXpunUT7Syehz2hJ9rv/PjWMTEhEp1E9RARAJ4n2CjTLk5+fTsmVL8vLyqixVaxiGYRz8ZGdnk5aWVmmp1diwnqrCp3xDrznuf58cHb3tTUhMfIQMvg5unZGE3gNQecyphJV0PAVRjw0o6dRX8dmJZmKfRUbd5cATbvyVjTteN+HjkZG2HeWDeGJFxSg0+7wazXKPQQZdVc30QNf3IXe81157jfPOOy/O6PY9s2fPJjMzs8z9i2bFihUMGDCgtKAM6Bzbo5nyrKyscrH4bdq0YebMmXTt2rVWxxp9bYchA7oNYc6DD19ajwz+HihB+gXkVWuPvF3nImHp33O9kQegDcq32ISqRzVBIVBtUPhSE/TeW+f29ynyxI1En4Mc9F5MRcnku924E9D77WV3rEuQUAnc41nU4dtXgboGCe7Vbr0v3OvZyEPzO5R0fgX6PJyHSjonIrHkvXGLUAjYc+7hP6/t0Gfq/5AomsvBJSpqYgfvl8nbhmEYhrE/06tXLxYtWsQcZFzEei7i5QoEQcA84nsuokXFokWLWLp0KYXJyaWioj0y/M5Hxs6lKJzkA2QwRYuKPGTcdEaeCx84vBsZP4nIy5HiliWjWdkk1PF7PcrdGIFmqY+n8lySaM/FGjQDfRnwH5Qz4ovGxxMVIMNyI/KYXOTG5mf7KxIVX6Fwl9uQ5+L888/fLzwXXlRkoHvUF8p4LlasWEFKSgpJW7aUO8dL0LlkZmaW8VxEIhEab9lCSkpKabh4bRH9nnwX3Ytc4BPC6k3HION9HQpbmojeH6cjQXE1eh8+iwTBaShPqCt6vy1AXoFuSCAcTlgBars7Zid0vU5A75unkLegHwphWobe7w3c+lNRCOBF6L220q3jx/+8O8ZcVAXqCSTGO6HP2r+RgE1DYuJRd9wn3blOQaKmMxIv97nz6EJ5UTF37lw2RCJsQ2LlFQ4uUVFT9stys4ZhGIaxv+PFRVpaGv9yy66m8gRkLxQedP/fSHlR4SsiFhQUAJrBzszM5D1kqJ4N/BTNCF9HmFsBEhFbUJhGNgpxSUYhJAkofMR7LDqjGd15hD03BiPD6BwUKnI+Mharak/XEhlp7VEloXwkII5Ds9AlSDzEigrfo+MdZFTfjgxDqFhU3IGMxFNQ6dMNbj/nn39+nXou3nrrLc4444xSL9aV6BqPQeeSk5NDSkoKGSj86G3CClv+HB9GfSIyMzNL73t0XklKSgo5OTm16rmIfk/6aks7kKjo6dZJI/QELEYJy2lIPGxB76tByDtR7Lbpit4Ha5CgaIHei+3Rey0bvYdTUe5GR/S+2I3yFi5w6xYiT0mSG8PbSFScg8Svx4fudUfetkfReyQViaSH0Ht9CkrQ9jke65DIWIsE0evIY/GIW/4WEkaFKEzs5+heeFGRnp5OSUkJoPv5rTtefRQVYKFQ1cJCoQzDMIyKiA6L8gZjRbkCnthE5FhREYufCe+HZl3zUKL3lVHreFHxIUowPQd5OZLQLKKvse/LZyYgA/JbZJQlI2PwORSW0hEZ8NWZgWyPvBbRouIqNKP9CpoJXo6Ezr1uGy8qPkMz1R8jo3QnCrcpRknCP446zh2oQ/hJSLBsR96WRGQgroe4XcKjiUQizJw5k/79+1fjzMSCBQvK9PGIxYuKPoTVvd5E92QemuWfD2VC59LQ/bgKeXLeRWFCi1BY2Dw0e34H5QVoPHFR2XnNnTuXjIwMEhMT2blzZ9xziEQi9EQ9KkYjgdHAPQJkcP8XiYoO7rEAGd7nIlGxEonQzuh+ryMsL9vc7acZEprrkEhpjbwYDdA9n4OE5kzksbgMiQXc9fwpEgu/iBl/jjtmF7f+E0jgb0DviyNRSNY6oGHDhsyaNau0y/h6t49oIXsUEhvr3Gvz5s0jPT29dP2KelI0aNCArKysg0pUWCiUYRiGYewjosOiXqRqUQFhCMqLVC0qQPXvs7KymItmT69EBleJez1WVJyPZnxLKFuTP0ACogkyykuQgZiGElZzkVF5CZppPqUa51+ZqACFq1zvjvkm8AxhWNSHyIj7I4p134XEVpEb10BCD8YdKETneuCvbn3fJLDY7bMP6hK+YEFsdwzhw8oGDBjArFmzqnF2EhV9+/blkOLiuAnZ0Z6KW1AY2kvonrQhLHvqez1cjWb3k4AfIYP3v+jadUPi40x0T3zyMG47n7QeGxYVfV6RSISlS5eWvuZFRTsgUlRU7hwaNGjA2LEK5itCZV7nu7+L0D3xOTuXIw9WaySAJiBRcAbKM+hMWN51HRKqPVEC9/+QKJiKBFU7JF62ovd0ERIHecgT4YsB7EBiuASJkpGEIVnRpKD3xAok0EagnKS2SCTvdGMKgoDi4mLS09OZO3cumxo0IAiCMmFhd7rxrANeeeUVgiCgb9++BEHApgYNKm10t3v37oNKVNQU81hUA/NYGIZhGFWRnZ1NZmYmO3bsqPY2TZo0ISsrq1oNYSORCF2QoX4LCstIRkbXVsqKihGEM7gdkRHnPRe7kJG1kzDnoiPyBKxAhno2MnYnI6PQ9wiIpSpREc0rwB/QjGZHNFudggzmWSih9lrU1diXAU0B7kIx82+h2ehrUOz8J0gEDUOC5Uk0670DGbaxnot4XqKqPBdeVPRDIusDygrHeOFPU5Fo60pYcWs9KkH6NjJ6m7trdgPy1GxEuSWHoypG2SgM5350zxLQ7DmU91ykpKSQ7q7Ds+g+rweWLFnCjh07yMjIoB/ypr2H7nEBlMnfWYfeI5cSen/+jqqGbQH+hITRj914troxvoWM/ItRGF0EiYQ1KHypIxIEbyNxkoYEyVluvI3c/9+i99kGJASy0HtgCHov70Tv9WTCJnQT3X5Guuuyyo3lMCR8mrv1H6d86eTKiO4qP2vWLDIzM6vc5mCnJnawCYtqYMLCMAzDqEsikQh9gGPRTOoZ7u+mSFh8iIx/LyqWIQOyOTL4OyHD0QuLQpQX8R0yMrciIXIIas431722DcX9t6S8uLgBeAMZle2pXFTg9v8sMopboBnpIcA4ZCzeiWLst7qHFxeFyLDshYzR8e4cbkNem0I0y/0Wyk8odOe8tVGjMt3Co8Ncoo3zisRFtKi4GBnF85FAmg+sWbOGjh070sWN+y8odGwrCmHqggTDKiQMNqC+B58gAXIDMqDXu3VbEvZoKADuRoby8agCWD6qakTM+Pu6a/ExElQDkUib69bth3JXRqME5QcIxUW6uxcrkHDp6JbdgnJ5it3jHWTMj0DelC1uvKtRhaiFyMs1FHmOiqLOZwrybJyF3p8vIfF1ASoDu4KwY3Z7QlEx2K3T1J37TsI8jcCNx5eaPZLyoqK5O/+AMCyqJuLCREVITexgS942DMMwjP2Ypk2b0h5VQboP1dL3PSqOQUbX/1AIzghkPM1HQiIDJch6g7QtmrV+AyWonoMMymzkeYgWFaOQByAbVcZ5B832gzwhWWhm+0233ZlULiqWEMbU90FJ3KuRmBmMhBKEfRTOQkb2o2gW/iZkqLcGTkQGd8T9n+CuT4CSbpcCS1w4VLwkcF8e+F8ofChWXMSKin4oNMsbTa8AHTt2JAMJnnkod6QPMvRT3dgPc+uvdtfoCJQfciahqOiJcgK2ukcTdI+XEybnpyLR1Mnt62pkKHsvwsfoPh2FDPTjkKdggzvv0e4aXu/G48XFfEIBgDv2QMJk7UbutdPd65OQZ2sDErfnInHny8/uRveyGaGomOjuzelIkJzkXnsGhYSdikROa5RX8QYSkBe4MRcjQeFFMEhc9HHX5Qn0XrjILU8iFBW4Y92IPCO5QFJSUoV5Jh6bc//+mLAwDMMwjP0YHyr1HprBv8YtfxMZcGeg2eX3UJnO5sio3YiMqa7ox3418jp8gQzHN5CB6A3zjpQXFWPRbHknZADi9p2AZqi3IIO2EIUJ9UIGXjQ5yINShGb0e6IZ8ZVum1tRCFCOWz8FGbsgAznPjftjt80NqKGa75TcndD7cpob94NAz56qaVRRD46KxEU8UdEPGbEL3TY+xCkD3ZNjkPi6xa3nq2xFkLgI3DVojwz/j914vagoQkZ8K+A3qIfEdWiGvxglVEMoLnq4bW5CHpBsJCx3IvEyCxnbM1FoVbSxd517/gW65z1QbsfnSBSchTwfoPdJovv7dDeuDwnDz0Di4ofIGzLeHWs4oajwgnMrClNriQTANuRlKkHvmbnIA5GKxM1uwmZ4iVHjKEDv5VXuXLchYZ2B3qO+OEG0uBiLPh/rgQVZWRh7DxMWhmEYhrEfk5qaSnZ2Nr169Sota+vFxX+RQToSGen/RsboD5ERuBAZuGnIqB+LjMe7UOjR/cjou5z4ouLfaBb7E3cckOHQwh0zAYVKLXavPeaevbjIQQZ1OzSz3AZ1LW6JPCDHIEM2ErU+SFxsQUbjlUgcvIrETHQx2WXuuQ3Ka2jq1l+AchkaU3kPjlhxMW7cOEaNGhVXVIBCnKIrV32O8hJw5/INMtbPcsu8MdwWXdOVSDwUUFZUFKPr+kcULnWN228LlH+wk7AC2GMoBOqP7vjRouISJAAmA68hj8B0lP/xL8KKPVlu/L1QXkt3JEC/dGPsSiiivLh4A1VV8uuvRuJiF7o//d05TkDvhzmEORA7Ce/PJ0hwXIM8M++5c5+PPFHtUbjUAhRaN4Sw8lcyeo9kI+EQQWFQG1A52eMI845w60SHjS1YsIDevXtj7D1MWBiGYRjGfk5l4sJ3LP4fmsX/GM30+jj/BUhUfIpmeFei+P2PkEfjEzSj3oeqRYWntXv2fQTiiYuBlBUV36Dysj1RCNUgZERPQGKoi9suB81u+ypJXd1rQwg9GQGauQcZmZvdOu1REvd7yFi/m4pFhedqZHRuAkaNGkU75BGJFRWeDDe2Rki0fYbCgfqia/s6Mnh9x/NEdE07I0Gxxe2/JbpPxW7936F7dzUShi3c+j5xuQWhEHrGPaCsqBju9nk2EhGvIsM7WlzcjkRdHyQSjnJjucqd01jkCelAKC4WIe/C2Ugs+NC6E935znJ/X+2ONRUJHJ+nUeLOYYo79oUoxKs5ErfvIzF6vhvzRPQe2OWOMwTd8+XoPd7AnWdXFGI3G4U5ve7OCSQuxhM2qzNRsW8wYWEYhmEYBwAViYsvUIz7SrcsC4WJ/AYZ739EM8DHIQHwI1RlaR2a0Z/j1v8F1RMVnsrExb+RYXkxMg4/Qkavb4z3CZotBxl/UF5cpCDDEWR4e1Hh/w9QmNEGFPaVjkTFw2iW/X3CHIfKeBxVLVoPrFy5ki5duvAV8kDEigrP8UhQgHJFuiJD2HeGnuhe8+KiAcpp+NadY4uo5Y2Q+HsDhXL9EF1b38jQiwpfKvgW9/wcEmleVPhjBciQ916TV1HFpY9RrsU0dO1vRSLJ5yQ0RwIJlNdyk3vtBZSHcwJhwnZXZMgf5sY8Cd2v05C4O96tuxMJnEboffCOG/sAd5wz3HhfQp6cT1FX9TOQkJmHGgyCvCSL3DgPR2GAH7jrcD7y3jzmrv81KA/mAeSJM1Gx7zBhYRiGYRgHCLHiYg4SFitRwqnvWbCVMPzDN+zzlZc+QUbwP1Hi71hkXD+MZv/fp6yo2IwSj+NRkbgocNu1RR6DIcij8gwKvermxpPgnmPFRXvCMCKPFxOeYjSDvRkZtS+64/dExnR1iO2Sfthhh9Heje9T5G0ZUcG2Xlysd+c5wO0HN66JyBhvge7PCndundx5+PCvJBTKVYIEWH+UuBwrKjwNkHdhO0qkjhYV/vVocbEOzdpvJRQVt6MKTvko3GwbMtS7EoqL+914d7l1u7jxLEdCrhAl3vdGouAN9D45B4kK34G7EaGxOcyt9zwKv+voxh5BAmIbEk7D3D5eQO/Jf7nr3Q+951oj4fEc8rj8zJ33uUgULURCqrE73z59+lhC9j7CGuQZhmEYxgGEFxe+Id8Cwio2/fr1IwgC5s+fX65h3zxkqK1A4SK+o/XVKLa+JUpAjhYVTyPD9ZlKxtMaGZYjkFfiEGQUZyPDbyPwD1T16UFkVLZARrdvzjcQiYvX3D5jRYXHG+O+mVoOMso/YM9FBcDGjRtZh4znJm5fkyrZx/Fo9nwTEg990XkvQh4bLypyUChUp6hz8GZuAbrWv0XJzf9GCeHxRIXfdj0SlJmEs/6eEsIu69lI9FyKxFIK8ioMc6+3QILCCwyQuDgD9e0oIhQVHZGHw/e5+AHyWPgCAv1QzsVmwvCnRCQqCtwxfHL4LPQ+WOOOmY68a02RkJmKEs+vRiFWOUgoFCAP1Z/ReylaVIBCoToQ9l/53B2vHcRtbmjUPuaxMAzDMIwDDC8uMjMzCbZvL/d6nz59mD9/vl53vRwaNmzIrl27SEPhJuMIQ5NAM/TvoMo6jZDh/RBhHwDQTHk8WiMj/AMU7gMyFkcThiP1RDH2/0HhLFchQfEmMshbUNZzAaEhHU1A2PyvK/KEPIpyF2ZVML5Y4omK0v27xnEg4eCFSkWei1R0vXLQ9ZuFwpmOQTPnG5Go6OjOx4dLRZChXIhExGXuHP7gxtcUhfREe2q8p6MDEnHvIiP6HMLO6n6dJe74vhnfjcjQfgMZ7t5T4MOy8t2zDy0bge7DRjf2ruiaFyGR4sOnmrp9ZqGciuaEoiLB7XcLEp+JhKVrp7jnM90xzkHvgzFuDJejcKxthKWKJyHh+hbywFxAKCo8r6P31FAkXnw1qJkzZ2LsfcxjYRiGYRgHIKmpqWyPIyo8ffr0KW0QBypbm4sqP7VAM/zjYrY5jVBU+Ph03PpPULHnYiyaQZ7n/o8t8ZqDQrKucscY4/b1NTJUU1HuRxdkyOcRGsmxrEJx/r5M7kvIOE9CnpGqqExUeLyH5zOq57lIQWFGb6NwnIuRwFqAZsvjiYrdbtyJyBjfgc7/cuQRuB95jKCswPIj9l3HX0XiIlpULHbHbotCq3yy+TnI+J+IxIXft/dcLCLsgdKK8qJinTvXQ936ie6cJyMRdhJhUnuJO7+lbixb3DbJSFychbwSr7vjtUClbzu47c9D97c5CjM7mjB/JduN7WF0P2M5A4kKHw5YVYd1o/YwYWEYhmEY9YC+ffsyb9485qPwnIrERazhPQ8Z8hWJi+hynhC/b0RnlGcxHc3kn4xEwCQUo5+GxMZKVDHIz6LHiosVSKR0d9skoXKmzVH4zRxCr0c8NqLwrOp0YI4WF/koTCuvgnUfQ96AS5Dh7kOjWiJDPFZU4P5PRsKqAAmYVGSgX0GYG1ETcREtKg5DCfuHumMVIDETKy78vlYjz8AOt/4yJEw6EYqK7shbkYyu91RU8ngIYQ+JnW4fO93+Vrtzy3b79OLiVOR1WeDGvAm9F6cjT0S7qPPNQWF80911nTJlSun9eZDy4iL6PWmiYt9ioVCGYRiGUU/w4iI9PR3QDPnD7rVRxJ/Njw4N6kTZsKjqiAqQoXqz+/s5ZABvcdu1Q6LibcI+FfGM6GJkYLZAxq0XHWe757Fodvwdt/4lcc7/MGSMb0Ex99URF/7cb6VsZSrPYyjc5lJkEG9BRnUqCsFZTSguIkikeOHkm7j5vgt93LYlbqz/RvkWI5BAiBcWNdr9/190jRoj4+44t3wzEhQtCMWND+ua6J67IW9TEsqjWIXE4GFubGvc3ylu/RJ0v3yX7FPccXeh8sG+R0lnN5YfuP0scdv/wK17PBKLL7gxTHTr5brr1oVQVLyARMjy5ctJSdFI/P150O33RkxU1DUmLAzDMAyjHlGRuJiBYtOryjvw4mIuMiCrEhUeLy4mo5CbfiiJ/BXKi4roBGdvTDdChm0OmkH3VYpA4UcNkECZ6dY/CYXVxOLH9wBVi4tIJEI6Ei8d0Ex8k6jXZ7ixH0coKrYhI749MrZXovCQFJRX8Lobry8HGy0uCpGASEQGenvU36FV9JioWFxMQsZ6W+RB6eXG3MI9ogWbFxePoFCj/u71EiQKmiCRAxJ/G925NEcCYBLKm+mKBE0Ht2wKyn94BoW39UW5HqluDPPdefVE77cZ6N6+joQhqOhABHmjJhNfVHiixcUiJPJMVNQdJiwMwzAMo54RKy6aImNuLZXnHXhxEb0+yFA8jaqb0f0/FPbSGxmdG6lYVEDZsqwBYZ+L5cgATkFGdwQZ/yWol4XPE6iI6ogLLyouRNWXUikrKnJQeNFwwkZ/AwmN+BI0Yx9Bs/IfImM/ldBTEC0udiGDvClKcF+CvEIXUT5BOVZcgHIzFqFqUbciEbA9akzxEuGPQyWI84CfIgHgSxP3RB4M3HmAxGQzJCiPQpW91rptPkYFALqjkKc0d01y3Bg2IE/FDnT/ZqHEey+ClgBLly6le/fuRCIRxrlz+JyKRYXHvze/JUzUNlFRN1iOhWEYhmHUQ2JzLioTFR4f177KrT9lyhRWrVrFOjRTPLaSbeehsJ7OSFRsoHJRsRPN/hcRhhB5cdHNjWGle60VMvrPQl6RnSiBujJuBG5DM+qxpUhjRUUfJIY8Oe7Y3YDrUfiQzwHwYU7+fDohQ/spFE70cxQ6NJGwMtJONGvfCrgH9RK5EoVX7UYCJpp4d+l5N6bz3Zh7Iw/IIuKLCpDR3tuNZw4SAO3QvfJhS0nI81KCQpTWofyIZUgYdkD9Nx5wY12DRN4At5+XkbHfHt1z78H4N0pw/xIJBy8qQO+zRe76VCUqSq9JEJio2A8wj4VhGIZh1FO8uMjMzCQoLq56A2TANWrUiHlZWfTt2xeAVatW0blz59KO4FfH2e4naGb+RDRL/R8UclORqNjplu1EYTITkKF/HmU7dIN6WWxHs/0/Qkbxa2j2/thKziWe56IqUbEDGfCt3TjWopCjXcgQbkrYJwKUh/AhClHahDqcLyfMKdiFkp8TgXuRsX8FCm9qgYz5Qrffhsh4j50Vfh6Vnr0QiasSN6YEZMR/jaoqRTPJje0mVI1pgTv2ccA3hBW+eqLrvMQd+zt37oNQ2dckd+wtbrzXuHP6F2E3+Ifc9RiFEs1fRt6PTUhsRIsKTxAEtGrViuWzZlUpKqK3MeoWExaGYRiGUY/p27cvxdUUFZ7Y9Tt16lSpuBiOZrJ/jEJoNiBRMR0Z4z4B2wuJQjRL3hgZouPQTHpsh26QwbvR/d8OeUXeQx6OmVQuLEDiYhEy4CORCO3d2OKJCtyYOiOPybOomeCRKGckGeWMgMTFZJRUPRR5BR5FIinDHTPVvR4gY/5dJCouR0b6Nnc9mhFWdvLJ3ztRT4doUXE2Mv4jSFT0cmOJFRdeVJyNKnQ1csvjiQsfXtQZCYalKHSqjTvnpwhzI9aia+7zHL7++msGDhwYNwdi0aJFHHnkkSzNyionKjxbt26Nu9zYfzFhYRiGYRjGHlORuLgbGZIXALej2fQvorb7j3s+GxnLOSgnoQeKw38ZGfkDUMnSaHHRDs16b0QhNs+imfLlyAh/BBnkleV+jCVsojZu3DhGjRrF18i4jhUVIKM9BYUrPY2M92nutR7u+RU0Iz8LNYC7GCW8T0Zem3HuumS5bZ5BeQpnIlHRComKfCQqWiCxlY88G2ORIOmDPDUXomTsAAmKEvfw4mIrEkK9UF6DFxWD3XiLkQcFyosLX52pBbqPRSj8KQFd/5Fuu/no/uS7a+lFBZTPgVi0aBG9evWioCA2yMs40DFhYRiGYRhGrRArLuagyj9rURz+WCQ2jqW8uNiFxMFyFPbzHPIGnA/cgoxdz3g0M98flVPtigztaFERrxRpLLGlSU877TTaEnb07oCarcUy2b2ejMTB8ZQVF8Wou/lZqAv3WFSWdjMST3932/iu0oe58/nUXa8RxBcVye7cH0Oz/9+iRG1f4cl7KgIkLEC5EOvdfj4m9BANdusmuvHGExcZKAl7g7tG+ShXwnfP3oTCm7oQlimOFRWeIAho0qQJi7Ky6NWrF8bBiQkLwzAMwzBqjWhxsYlwhjotLa2MJyNaXHyH4vBPQUbyl6jqVCGhoXIcobjYiYzrk1DVpG8oLyqgcnERr9/B9OnT6dKlCzko9Md7R6LFxSQUntUEzeYvQKLoZCQuCpFH40YkHsYioxsUujQPeTquRE0CL3LXYaQb/7+QAX8p8UXFA4QdvQehfJVFSABA2UpaS9y6c1GifBN3LtGioqF77CAUF3ko/+Nxd41bU1ZU+ByPtlHXtTScrH37cqLCs2PHjrjLjYMHqwplGIZhGEat4sVF0LYtQRDQq1cvFi1axBxkOI9FBsixaLZ+BzK4x6GwoAdQWM1aynZWPg4ZsztR3sB4ZKTHExWeeB2aK2qi1rlzZ1auXMk85C1p6o7xptsuWlR8ioz2NWvWMBd5A/JQ0vgCNJv/IKGoKHbn+Mknn5DtzvVOZJDPQ4nQh7rzeNwdK56omI88AuuRV2SeO94i5KXwwmK5Wz7DjQnCBoIfEYoKT2MUNrYEeWQ+QWFPD6Du47GiIprocLJvvvkmzhpGfcE8FoZhGIZh1DqdOnVi/fr1pf97cRHruZiDjOgN7rEVGaheIMR6HBah2fR1yPB+GRn08USFpyZN1Ly46NJF6eEnIqN+AcqZ8KIiupHgmjVr6NixI/ko72EJmr3/ABnrXlSsXLmSzp07l8s58NvnovCm3cA/3fmNoqyo8Mf8+uuvOfrooxlP2GU8QHkUS93xZqF8j40oKfxZ5L14BFVzGkrZnhgrUTL4K+48Frp9bUcekYpEhRdpq1evpmPHjnHvgVE/MI+FYRiGYRj7hFjPxZ1oVn8hMpiXLFlCQbNmZQRCtMfhTrfdfOCll16iABm9lYmK2P28SNWdmeN5LmYSX1QAdOjQgTVr1rAShUc1QfkUfSkvKqLH07BTJ9asWUOHDh0IgoCNCQk899xzbHXX5BHgD5QXFQADBw7k66+/Zi4SHvPcNr6a0ywkEBYikTPeXavV7vz/jQSW32M2Eh7jkZiY5rbdgfI0HiL0+HhMVBixRAIr+lsl+fn5tGzZkry8PFq0aFH1BoZhGIZhVEh2djZpaWm0o6x3ojIikUjp+r6q0KxZsxgwYECN+hdEIpFqN1FbtWoVXbp0IR11mf6E8gZ+vP379T8lvqioCn9eQJXX6JtvvuHoo4+mH/JcpCGvxCtRY507dy4ZGRkkJiayc+dOIpEInYDDgTtQ8vWzSHTluG0aNGhASUkJCxYsoHfv3qXndSvyHJmoqD/UxA42YVENTFgYhmEYRu2SnZ1NZmZmjRJ6mzRpQtY+rirkxcX3FUE1FRXRLF26lMzMTAoLCytdL1pcnIJCsKojgDqhKlbdURO/nGpskw6cRhhOZqLi4MeERS1jwsIwDMMw6i+rVq3iyCOPZOPGjdXe5rDDDmP69OnfW1TUFC8uaiqAWqE8iy012MYfw0RF/aAmdrDlWBiGYRiGYVRC586dayQqADZu3LjPRAWEORfVFRW49bYC25OSarRNpH17ExVGXKwqlGEYhmEYxkHAwIEDa5RvAtUXIdGsXbu2xtsY9QPzWBiGYRiGYRiGsceYsDAMwzAMwzAMY48xYWEYhmEYhmEYxh5jwsIwDMMwDMMwjD3GhIVhGIZhGIZhGHuMCQvDMAzDMAzDMPYYExaGYRiGYRiGYewx1seiGvgaz/n5+XU8EsMwDMMwDMPYd3j7tzo9T0xYVIOCggKAfdpB0zAMwzAMwzD2FwoKCmjZsmWl60SC79NysZ5RUlLC2rVrad68OZFIpK6Hs9fJz8+nc+fOrFq1ihYtWtT1cIw9xO7nwYXdz4MPu6cHF3Y/Dy7sfspTUVBQQIcOHUhIqDyLwjwW1SAhIYFOnTrV9TD2OS1atKi3H6KDEbufBxd2Pw8+7J4eXNj9PLio7/ezKk+Fx5K3DcMwDMMwDMPYY0xYGIZhGIZhGIaxx5iwMMqRlJTE73//e5KSkup6KEYtYPfz4MLu58GH3dODC7ufBxd2P2uGJW8bhmEYhmEYhrHHmMfCMAzDMAzDMIw9xoSFYRiGYRiGYRh7jAkLwzAMwzAMwzD2GBMW9ZipU6dy9913c84559ChQwcikQiRSITVq1fv0X6Lior429/+RmZmJs2aNaN169accsopvPrqq7U0cqMiCgoK+H//7/+RlpZGkyZNOPTQQznrrLP44IMPvtf+TjnllNL3RbxHu3btavkM6h+vvPIKp5xyCq1bt6ZZs2ZkZmbyf//3fxQXF3+v/U2fPp2LLrqItm3b0rhxY7p168att97Kxo0ba3nkRjxq634+88wzlX72IpEIb7311l46C2PRokU8+OCDXHXVVWRkZNCwYUMikQh//vOf92i/7733HmeeeSaHHnooTZo0oXfv3vz617+msLCwlkZuVERt39O77767ys/owoULa/ks9n+sQV49ZtSoUeTl5dXqPrdv386wYcP4/PPPadWqFcOHD6ewsJAPPviAjz/+mLvuuov77ruvVo9piI0bN3LiiSeSnZ1N+/btOeecc9iwYQNvvvkmb775Jvfffz+33nrr99r36aefHldEVLdhjhGfO+64g/vvv5+GDRsyePBgkpOT+eCDD/jFL37B5MmTeeedd2jSpEm19/fqq69y6aWXsmvXLgYOHEi3bt2YNm0aDz30EK+88gqffvopPXv23ItnVL+p7fsJ0KNHD0444YS4r3Xs2LE2hm3E4dFHH+X++++v1X3+61//4s477yQSiXDiiSfStm1bPvnkE+655x7++9//8umnn3LooYfW6jGNkL1xTwEyMzPp379/3Nfq5W9kYNRbrr766uCee+4J3nrrrWDjxo0BEADBqlWrvvc+b7/99gAIMjIygk2bNpUunzZtWpCcnBwAweTJk2tj+EYM5557bgAEQ4YMCbZt21a6fMqUKUGDBg2ChISEICsrq0b7PPnkkwMg+PDDD2t5tMaECRMCIEhOTg6mT59eunzTpk1BRkZGAAR33XVXtfe3Zs2aoGnTpgEQPP7446XLd+3aFVx++eUBEAwcODAoKSmp1fMwRG3fz7FjxwZAcOWVV+6F0RpV8eSTTwY//elPgxdeeCFYsGBBMHr06AAI/vSnP32v/c2YMSOIRCJBgwYNgqlTp5Yu37ZtWzBkyJAACC644ILaGr4Rh9q+p7///e8DIPj9739fuwM9wDFhYZSyp8IiNzc3SExMDIDg008/Lff6n/70pwAIBg0atKdDNWKYN29eAAQNGjQIcnJyyr1+7bXXBkBwySWX1Gi/Jiz2HgMHDgyA4M9//nO51z755JMACJKSkoKtW7dWa38/+9nPAiAYOnRoudcKCgqCli1bBkDw1ltv7fHYjfLU9v00YbF/ceWVV+6REXrRRRcFQHDdddeVey0nJydISEgIgGDBggV7OlSjmuzpPTVhER/LsTBqjalTp1JUVESXLl04/vjjy70+atQoAL788kvWrl27r4d3UDNhwgQAjj/+eLp27VrudX/tJ0+e/L1j943aY82aNXzzzTdAeG+iOeGEE+jcuTM7d+5k6tSp1dqnfw/E219ycjIjRowA4LXXXvu+wzYqYG/cT+PgoaioiClTpgDx3x9du3Yt/c30n2PDOFCxHAuj1pg5cyYARx11VNzXu3fvTps2bcjNzWXWrFl06NBhXw7voKaqa++Xb9u2jcWLF9O3b98a7X/ChAlMnDiRHTt20LZtW4477jhOO+00EhJsbuL74O9XmzZt6NatW9x1jjrqKFatWsXMmTO59NJLK91fQUEBS5YsKd2uov0999xzpcc2ao/avp/RLFmyhN/85jds3LiR5ORk+vXrx4gRIywW/wAiOzub7du3A5V/Pj/55BP7fB6AzJgxg1/+8pfk5ubSsmVLBgwYwDnnnEPz5s3remh1ggkLo9ZYvnw5AF26dKlwnU6dOpGbm1u6rlE7VHXtW7RoQYsWLcjPz2f58uU1FhYPPPBAuWW9evXi+eefZ+DAgTUfcD2nOp+Vzp07l1m3MnJyckr/rmifNdmfUTNq+35G89lnn/HZZ5+VWda4cWPuvvtufvGLX9RwpEZd4O95q1atKjQ27fN54DJ58mQmT55cZlnLli154IEHuOKKK+poVHWHTTcatUZBQQEAzZo1q3Cd5ORkAPLz8/fJmOoLe+van3jiiTz55JMsWrSIbdu2sXr1aiZMmEB6ejrZ2dkMHTqUBQsW7Nng6yG1fb/8/irbp3329h574/PXrl07fv3rX/PVV1+xadMm8vPz+eabb7jiiivYuXMnv/zlL7nnnnv2fPDGXsd+Gw9OevTowT333MPMmTPJzc0lNzeXTz/9lLPPPpu8vDyuvPJKXnjhhboe5j7HPBYHID//+c+ZNGlSjbcbM2ZMhWULjbpjf76ff/rTn8r837RpUzp27MgZZ5zBiSeeyDfffMOvfvUrJk6cuFfHYRj1jeHDhzN8+PAyy4466iieffZZMjMzueuuu/jjH//ItddeS9u2betolIZRfxk9enS5ZccffzyTJ0/mtttu48EHH+QnP/kJF110EYmJiXUwwrrBhMUByNq1a1m0aFGNt9vbDXi8i3fbtm1VjqFFixZ7dSwHErVxP/f1tU9KSuLXv/41I0eO5K233qK4uJhGjRrt8X7rC7V9v6LDK7Zt2xa3drp99vYe+/rzd/vtt3PvvfeyefNm3nnnnbgGjrH/YL+N9Y+7776bRx55hE2bNvHVV19x4okn1vWQ9hkWCnUA8vzzzxOoVHCNHrGzX7VNSkoKACtXrqxwHd/V269r1M79rOra5+fnl7rYa+va9+nTB4CdO3eyefPmWtlnfcHfg1WrVlW4jn+tOvcruhJYRe+BmuzPqBm1fT+rokGDBqSmpgLhd6qx/+Lv+datW8uELUZjn8+DizZt2nDYYYcB9e8zasLCqDWOOOIIAKZNmxb39WXLlpGbmwvAgAED9tm46gNVXXu/vFmzZvTq1atWjvntt9+W/l1fq198X/z7/9tvv60wWdPfM39vK6NFixalHbWreg9UZ39Gzajt+1kd/OfPPnv7P2lpaTRt2hSwz2d9Yffu3eTl5QH17zNqwsKoNc4880wSExNZuXJluSomAOPGjQNg0KBBVmq2lhk5ciSgCjLxZqz9tT/nnHNqLWRp/PjxgDwXPvHQqB6dOnUqrabl7000n376KatWrSIpKYkzzzyzWvs877zzKtxfYWFhadWS888///sO26iAvXE/K2PGjBlkZ2cDcPTRR+/x/oy9S2JiImeddRYQ//2xYsUKPv/8cyD8HBsHNpMmTWL79u1EIpEKSwwftOyrTnzG/g/V7Lw9ePDgIC0tLXjttdfKvXb77bcHQHD44YcHmzdvLl0+ffr0IDk5OQCCyZMn1/rYjSA499xzSzsvb9++vXT51KlTgwYNGgQJCQlBVlZWue1Gjx4dpKWlBQ8++GCZ5R988EHw4YcfBiUlJWWW79y5M7j33nuDSCQSAMGYMWP2zgkd5EyYMCEAguTk5GD69Omlyzdv3hxkZGQEQHDXXXeV2ea1114L0tLSgsGDB5fb35o1a4KmTZsGQPDEE0+ULt+1a1cwevToAAgGDhxY7n4atUNt3s9t27YFDz30UJCfn1/uOB9//HGQkpISAMEJJ5ywd07GKEd1ujQ/+OCDQVpaWjB69Ohyr02fPj2IRCJBgwYNgjfffLN0+bZt24IhQ4YEQHDBBRfslbEb8dmTe7pixYrgueeeC3bs2FFumwkTJgRt2rQJgODyyy+v9XHv75iwqMf88Y9/DI455pjShxcWAwYMKF120003lduua9euARCMHTu23Gvbtm0Ljj322AAIWrduHVxwwQXB8OHDg0aNGgVAcOedd+6DM6ufbNiwIUhNTQ2AoH379sHFF18cnHLKKaUC4P7774+73cknnxwAwe9///syy//1r38FQNC2bdvg9NNPD0aNGhUMGzYsaNu2bel75ac//ek+OLODl9tuuy0AgkaNGgXDhw8PLrjggqBVq1YBEBx//PFlBGIQBMHYsWMDIOjatWvc/b388stBgwYNAiA45phjgh/+8IdB9+7dS+/j4sWL98FZ1V9q635u2bIlAIKkpKRg0KBBwcUXXxycf/75Qb9+/Uo/exkZGcHatWv34dnVL6ZPn17m9/HQQw8NgKBTp05llkffg9///vcBEJx88slx9/nPf/4zAIJIJBKccsopwcUXXxy0b98+AIK0tLRg06ZN++js6ie1eU9nzpxZOpFw4oknBpdccklw7rnnlv4GA8Gpp54aFBQU7OOzrHtMWNRjvFqv7BHvC7IyYREE4Yx2v379giZNmgQtW7YMTjrppODll1/euydkBHl5ecEvf/nLIDU1NUhKSgratGkTDB8+PHjvvfcq3KYiYTFjxozgpptuCo4++uigffv2QVJSUtCkSZOgR48ewRVXXBF89tlne/ls6gcvvfRScNJJJwUtWrQImjRpEvTr1y/461//GuzcubPculUJiyAIgmnTpgXnn39+8IMf/CBITEwMunbtGtxyyy3B+vXr9+JZGJ7auJ87d+4Mfvvb3wZnnHFG0K1bt6B58+ZBw4YNgx/84AfB0KFDg8cffzzu/oza48MPP6zy9xEIli9fXrpNVcIiCILg3XffDYYPHx60adMmSEpKClJTU4Nf/epXcb1TRu1Sm/d08+bNwS9+8Ytg8ODBQZcuXYJmzZoFjRo1Ctq3bx+cffbZwbhx44Ldu3fv2xPcT4gEQRDUPIDKMAzDMAzDMAwjxJK3DcMwDMMwDMPYY0xYGIZhGIZhGIaxx5iwMAzDMAzDMAxjjzFhYRiGYRiGYRjGHmPCwjAMwzAMwzCMPcaEhWEYhmEYhmEYe4wJC8MwDMMwDMMw9hgTFoZhGIZhGIZh7DEmLAzDMAzDMAzD2GNMWBiGYRiGYRiGsceYsDAMwzAMwzAMY48xYWEYhmEYhmEYxh5jwsIwDMMwDMMwjD3m/wOpFIo/RhllQgAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAxYAAAJOCAYAAAAqFJGJAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOydd5gURf7GP7OZXXbJOUeVYATMCfFERcB8Zgx3J4oBwx2emM74M2AC8ymenqcYQDFgQlFAFASVoJIl57CwLBv798dbNd0zOzO7C0uu93nmmd3p6urqmp6q7/uNIc/zPBwcHBwcHBwcHBwcHLYDSbt6AA4ODg4ODg4ODg4Oez4csXBwcHBwcHBwcHBw2G44YuHg4ODg4ODg4ODgsN1wxMLBwcHBwcHBwcHBYbvhiIWDg4ODg4ODg4ODw3bDEQsHBwcHBwcHBwcHh+2GIxYODg4ODg4ODg4ODtsNRywcHBwcHBwcHBwcHLYbjlg4ODg4ODg4ODg4OGw3HLFwcHBw2MfQr18/QqEQLVu23NVDcXBwcHDYi+CIhYPDHoK8vDyee+45TjvtNJo0aUJGRgbp6enUq1ePrl27csUVV/Diiy+yePHiXT1Uh70ULVu2JBQKbdfr66+/3tW3sUdj+PDhEfPZpk2bCp23ePFikpOTI85duHDhjh2sg4PDPoeUXT0ABweH8vHdd9/x5z//mUWLFpU5tmbNGtasWcOUKVN45ZVXaNCgAStWrNgFo3RwcIjG119/zYknngjAV199xQknnFCl/c+fP5+JEydy1FFHJWz33//+l9LS0iq9toODg0M0HLFwcNjNMXv2bE455RQ2bdoEQO/evTnnnHNo3749aWlprFmzhp9//pnPP/+cr776aheP1mFvxmeffUZhYWHMY5dffjlTpkwBYPr06XH7aNWq1Q4Z276IjIwMtm7dymuvvVYusXjttdciznFwcHDYEXDEwsFhN8ftt98eJhWvvPIK/fr1K9Pm5JNP5pZbbmH16tWMGDFiJ4/QYV9B+/bt4x7LysoK/92pU6edMZx9Hr1792bEiBGMGDGCJ598krS0tJjtpk6dyqxZswDo06cPb7311s4cpoODwz4EF2Ph4LAbo6SkhI8++giALl26xCQVQdSrV49rr712J4zMwcFhV+P8888nLS2NdevWhdeJWLDWiq5du7L//vvvrOE5ODjsg3DEwsFhN8bq1avJz88HoG3btlXS58KFCxk4cCAdO3YkOzubzMxM2rVrx9/+9reELiwLFy4MB30OHz484TVskG8sIhQMPl24cCEFBQU88cQTHHHEEdStW5dQKMTdd99d5rwJEyZw1VVXsd9++5GTk0NaWhpNmzalV69eDBs2jA0bNsQdz9y5cxk4cCCdO3emRo0aVKtWjdatW9OvX7+w+872YP78+Tz22GOcccYZtGzZkmrVqlGtWjVatGjB+eefz5gxYxKeHz0npaWlvPDCCxx11FHUqlWLrKwsDjzwQO6//362bNlS7nh+/fVX+vXrR7NmzcjIyKBZs2ZceOGFTJ48ebvvdVuRl5fHW2+9xVVXXcXBBx9MjRo1SE1NpV69ehx//PE8+uijbN68OWEfdo7s8zF27FjOPfdcmjVrRmpqaswsV6NHj6Znz57Uq1ePzMxM2rdvz6233hqOQ0r0rAYxdepUrr76avbbbz+qV69OVlYW++23H/3792f27Nll2tvfi42vADjxxBPLBLOX91tKhNq1a3P66acDPnmIRnFxMf/73/8AuOSSSyrV/6hRozj33HNp3rw5GRkZ1KxZky5dunDPPfewfv36hOdOmjSJwYMHc8IJJ9CwYUPS0tLIycmhQ4cO9O/fP2xBSYTZs2dz3XXX0alTJ7Kzs0lLS6Nx48YcfPDBXHHFFbz11lsUFBREnPP1119XOFFA9PMUxN133x0+DrBx40buvfdeDjnkEGrWrBn3u9ueOduW+3Vw2O3gOTg47LZYu3atB3iAd9BBB213f6+++qqXnp4e7jP6lZyc7D3wwAMxz12wYEG43SuvvJLwOi1atPAA77LLLitz7JVXXgn3M3nyZO/ggw8uM4677ror3H7Lli3eBRdcEHfMsc4J4pFHHvFSU1PjnhcKhbw77rijgjNYFvPnzy93bIB38cUXe0VFRTH7CM7JzJkzvZNOOiluP926dfM2b94cdzxvvfVW3O84JSXFe+mll7zLLrvMA7wWLVps831H4/jjjw9fp7zj8V6tWrXyfv3117jXCH7X//znP8ucH30/11xzTdxrNWzY0Js6dWrCZ9XzPK+kpMQbOHCgFwqF4vaVkpLiPf/88xHnBX8viV7l/ZaiEXxWvvrqK++9997zAC8tLc1bu3ZtmfYfffRReIyrVq3y7rrrrvD5CxYsiHmNdevWed27d0847vr163vfffdduWNMtNYMGzYs7n2OGDHCS0tLK7ef6dOnR5z31VdfRcxPIiRaO4LzNHv2bK9ly5YJv7vtnbNtvV8Hh90NLsbCwWE3Ru3atWnRogV//PEHP//8M//3f//HrbfeSlJS5Y2NH330Ef369cPzPKpXr87NN99Mjx49SElJYeLEiTz44IOsWbOGf/7zn9SsWZP+/fvvgDuKxJVXXsn06dO59NJLOf/882nYsCGLFi0iPT0dgNLSUvr06cPnn38OQLt27bjmmmvo0qULmZmZLF++nIkTJ8aNK3nkkUf4+9//DsCBBx5I//79adeuHTVr1uT3339n6NChfPfdd9x7773UrVuX66+/vtL3UFJSQlpaGqeccgonn3wyHTp0oHbt2qxbt47Zs2czbNgwZs6cyeuvv07r1q255557Evb3l7/8hUmTJnHZZZdx3nnnhefk4Ycf5rvvvuOHH37gvvvu48EHHyxz7uTJk7nooosoLi4mPT2dgQMHctppp5Gens7333/PAw88QP/+/enQoUOl73N7UVxcTOfOnenduzddunShcePGeJ7HH3/8wciRIxkxYgQLFiygb9++/PTTT2RkZMTt67333mP69Ol07tyZgQMH0qlTJ/Lz8/npp5/CbR5++GGeeeYZAJo1a8agQYPo0qULBQUFfPrppwwZMoRzzjmnXAvQddddF+7nuOOOo1+/frRu3ZrMzEx+/vlnnnjiCWbOnMnf/vY3GjZsSO/evQFo0qQJ06dPZ/LkyVxxxRUAvPzyy3Tt2jWi/6ZNm1Z6LoM4/fTTw8/biBEjuPrqqyOOW0uGtdqUh4KCAnr06MHUqVNJTk7mwgsv5LTTTqNVq1YUFRXxzTffMGTIEFatWsVpp53GtGnTaNGiRUQfxcXF1KpViz59+nDcccfRrl07srKyWLZsGVOnTuWpp55izZo1DBgwgP3335/u3btHnL9y5Uouv/xyCgsLqV+/PgMGDAhbNPPz85k7dy7jxo1j1KhR2zV3FcU555zD0qVLue666+jduze1atVizpw54fve3jnb3e7XwWG7sKuZjYODQ2I8+uijERqrli1betdff7335ptvevPnz69QH4WFhV7jxo09wKtevbo3bdq0Mm0WLlzoNWrUyAO8zMxMb/Xq1RHHd4TFAvBeeumluP08+eST4XZnnnmmt3Xr1pjtSkpKvCVLlkR8NnPmzLCl4q677vJKS0tjnnfxxReH52XdunUJ7ysWNm/e7C1btizu8dLSUq9fv34e4GVlZXkbNmwo0yZ6Tl577bUybbZu3ep16tTJA7w6derEtH506dLFA7zU1FRv3LhxZY4vWbLEa9q0aVwN//agPIvF7NmzE57/+eefe0lJSQmfieAcnXTSSXGfh+XLl3sZGRke4LVt27bMs+x5njdhwoQIDXGsZ/Wzzz4r9znNz88Pa6pbtGhR5nupjAa9Ioi2WHie5/Xv398DvKOOOiqibW5urletWjUP8EaMGOF5nleuxcJagmrWrOlNmTIl5hiCa8WFF15Y5viSJUu8vLy8uPewYcMG78ADD/QA75hjjilz/N///neFNPRbtmzxtmzZEvHZjrBYJCUleZ9++mncfrZ3zrbnfh0cdjc4YuHgsJujpKTEu+KKK+Kaxhs0aOCdf/753gcffBBTePY8ucfY9g899FDca73++uvhdg8//HDEsR1BLLp3757wvq0Q3LRpU2/Tpk0JrxkNO2ddunSJOy+e53nr168Puw698MILlbpGRbF27VovOTnZA7x33nmnzPHgnJx11llx+3nuuefC7X7++eeIYz/88EP42IABA+L2EXwWdiaxqAj69u3rAV6vXr1iHg8KevHceDzP8x566KFw2w8//DBuu4EDByYkFpYwnH322QnHPWvWrHA/n332WcSxnUEsJk6cGP5s3rx54bYvv/yyB3g1atTw8vPzPc9LTCw2bdrk1ahRwwO8p59+OuE4nnnmmTCJTeSaFw+jRo0Kj2PNmjURx+6//34P8GrVqlXpfncEsbjiiivi9lEVc7Y99+vgsLvBBW87OOzmSEpK4t///jefffYZPXv2JCUl0oNx5cqVvPXWW/Tu3Ztu3boxb968Mn188cUXgIIVrVtGLJx77rnUqFEj4pwdiYsuuijusZ9++oklS5YAcg+qXr16pfoePXo0AGeffXY4ADMWatasSefOnQEVItxeFBUVsWTJEn799VdmzJjBjBkzWLZsGXXq1AHg559/Tnh+ojk57LDDwn/Pnz8/4ljw+7r88svj9nHmmWdSs2bNhGPYGVi9ejVz5swJz9GMGTPCrjrlzdHRRx8dM1Dbws5F3bp1OfXUU+O2u/TSS+Mey83NDQf/nnPOOQnHc8ABB1C3bl2gap6hyuLII48MJ3d4/fXXw59bN6hzzz03oWuZxbhx49i4cSNQ/j0fd9xxgJ73H3/8MWHbvLw8Fi5cyMyZM8PfdWpqavh49PfdqFEjANavX8/7779f7rh3NBL9Jqtizna3+3Vw2B64GAsHhz0EJ598MieffDK5ublMmDCByZMnM2XKFL755pvwxjZlyhSOPfZYfvzxx/BmBTBjxgxAxckS+VmnpaVxyCGH8PXXX4fP2ZE48MAD4x6bNm1a+O9jjz22Uv3+8ccfrF69GoDbbruN2267rULnbWvF8qKiIl544QVee+01pk2bFreIHKhSeiIkSgdau3bt8N+2tomFzeiVlpbGQQcdFLeP1NRUDjnkkF1STHHChAk89dRTfPHFF6xbty5uu/LmKNFzA/7zfvDBByeMR+rcuTNpaWkxv69p06aFK1VfcMEFXHDBBQmvabGrqt5fcskl3HXXXbz++uvceeedLF68OEyMEhGoIIIZ0oLrR3mIdc9r1qxhyJAhvPvuu8yZMwfP8+KeH/199+7dm5o1a7JhwwbOPPNMTjjhBM444wyOO+44Dj74YJKTkys8tqpAouetKuZsd7tfB4ftgbNYODjsYcjJyeHUU0/lzjvv5IMPPmDlypW8/PLL1KpVC4Dly5dzxx13RJxjhbj69euX23/Dhg0jztmRsGOOhaCwUZkNG2DVqlXbNJ6KpHKNxrp16zjyyCMZMGAA33//fUJSAYTTB8dDZmZm3GNBIbmkpKTMOEDkozxBpEGDBgmP7wjcfffdHHPMMYwYMaLcZ6u8OUr03ADhtJ7lBSsnJydHkLUgduYzVBW4+OKLAZgzZw6TJk3i9ddfx/M8WrZsyTHHHFOhPqrqnn/88Uf2339/HnzwQWbPnp2QVEDZ77tOnTp88MEHNGnSBM/z+Oqrr7jpppvo0qULtWvX5qyzzuLDDz/cprFuCxI9b1UxZ7vb/To4bA+cxcLBYQ9Heno6l19+OY0bN6Znz56Asua88MILZbS1iVyCdgV2lCYuKHTfeeednHvuuRU6L1g9uqK44YYbwm4Nffv25YorruDAAw+kfv36ZGRkhOe8efPmLF68uFwha3uxu33HAF9++WU4G1br1q255ZZbOOaYY2jevDlZWVlh974777yTe++9t9z+doYGN/gMPf/88xx11FEVOq880rOj0Lp1a44++mgmTJjAa6+9FrZIXXzxxRV+JoL3PHXq1Ah3pUQIZrYqLCzkvPPOY+3ataSmpnLdddfRp08f2rdvT61atcIZ3+bPn0+bNm0AYv4mjj32WObOncu7777Lxx9/zDfffMOSJUvIzc1l5MiRjBw5klNOOYX33nsvIRmvCiR63qpizmD3ul8Hh+2BIxYODnsJTjnlFJo1a8bixYtZv349a9euDWtsrVZ25cqV5fZjTfTRmtwgSbEuIvGQl5dXqbHHgvVZB1lhKlMx2MYzgFx/OnXqtN3jiYXc3FzeeustQH7YQf/2aJRXHGt7YQXatWvXUlJSklAYqshzUJV48cUXAY1x0qRJcS0JVWUlq1WrFitWrAi7w8VDSUlJ3O8l+AxlZmbusGeoKnHppZcyYcIEXn75ZbZu3QpUrihe8J7r1au3Talwx44dG47/eeaZZ7jqqqtitqvId52RkcFFF10UjnFYsGABH330EU8//TSzZ8/m008/5fbbb+fxxx8Pn1PRdaoq1iiomjmz2Jb7dXDY3eBcoRwc9iI0btw4/HdQS2mFogULFiQUtoqKisKxDdGCVHZ2dvjvRELyunXrWLt2beUGHgOHHnpo+O9vvvmmUue2bt06HIQ+YcKE7R5LPMyZM4eioiIAzj///Ljtfvvtt3KrSm8vbAB6YWFhwuDn4uLiiHoPOwMzZ84EVHk6kXtSVVRBB+jYsSOgBACJhMvp06fHrWR88MEHh39D2/MM7UwL0nnnnUd6enqYVBx++OG0b9++wucfcsgh4b+39Z7tdw2JfxPb8l23atWKAQMGMHny5LAAH13DpqLrVKxq6duCqpizeKjI/To47G5wxMLBYS/Bli1bmDVrFqA4jKAmrUePHoBcDl555ZW4fbzzzjvhQHB7jkWtWrXC2YQSCQVvvvlmlbj7HHTQQTRr1gyAl156qVKCeXJyMqeddhoAn332Gb/++ut2jycWiouLw38n0oA+99xzO+T6QQS/r1dffTVuu5EjR+5w60k07DwlmqNp06bx/fffV8n1TjrpJEBxOp988kncdv/5z3/iHqtXrx5HHHEEAG+88Ua51o94CGZjikdiqgo1a9akb9++pKenk56ezmWXXVap83v06BF2s3nqqae26Xdckd9EaWlp2Iq1LcjJyQkXGowO/A5mC0u0Tv3vf//b5usHURVzVh4S3a+Dw+4GRywcHHZjbN68mcMPP5wPP/wwoea1tLSU6667LpwpqHfv3hGa0r59+4atGffff384g1AQixcv5pZbbgHk+hErZalNl/j+++/HTGv7+++/lwkc31YkJSVx6623ArBkyRIuvfTSuIHRpaWlLFu2LOKz2267jeTkZEpLSznnnHPCqWtjoaSkhP/+978J28RC27Ztw/P86quvxhQqRo8ezdChQyvV77agW7duYSvPs88+y/jx48u0Wb58efg73plo164dAOPHj2fu3Llljq9evbpSLjvl4bLLLgv78t94440xhbHvvvuOYcOGJexn8ODBgFzezjnnHDZs2BC3bUFBAcOGDQtbCyyCiQdi/WaqGm+++SZbt25l69at9O/fv1Ln1qxZkwEDBgAwceJEBg4cmHDdWblyJS+99FLEZ/a7Bhg+fHjM82677TamTp0at99PP/2U5cuXxz2+ceNGfvjhB0Ba/SBq1aoVzuL0yiuvxHS5Gj9+PE8++WTc/iuDqpiz7blfB4fdDrumfIaDg0NFsGnTpnCRpiZNmnjXXnut9/rrr3vffvut99NPP3lff/219/jjj3udO3cOt6tRo0bM4mEffvihFwqFPMDLzs72/vWvf3kTJkzwJk2a5A0ZMsSrX79+uI9nnnkm5njGjBkTbtO0aVPvpZde8n788Udv3Lhx3p133unVqFHDa9u2rVevXr0KFchLVOTM81Qk7+STTw63b9++vffEE09448eP96ZOnep9/PHH3p133um1a9cuZpGrxx9/PGJebr31Vu+TTz7xpk6d6k2cONF74403vOuuuy5cETdR1dt4OP3008PX6NGjh/fuu+96U6ZM8T7++GPvyiuv9JKTk7127dpVyZyUV6Rw0qRJXkpKigd4GRkZ3m233eZ9++233g8//OA9/fTTXqNGjbzU1FTvoIMO2qkF8t5+++3wscaNG3tPPfWUN2HCBG/ChAneI4884jVq1MgLhULekUcembDInj0W67uOxgMPPBBu37x5c++ZZ57xfvjhB+/bb7/1Bg8e7FWrVs1r2bJl+Hvp169fzH5uuOGGcD8NGzb07r77bu+LL77wpk2b5o0fP94bPny4d+WVV3q1atXygJiFHG2hx1atWnnvv/++99tvv3lz5szx5syZ4+Xm5pZ7L0HEKpBXGZRXeXvr1q3e4YcfHm5z0EEHeUOHDvXGjx/vTZs2zRs7dqz39NNPe3369PHS0tK8ww47LOL8zZs3h9eS5ORk729/+5s3ZswYb8qUKd6bb77pnXTSSR7gHX300XGf5csuu8xLTU31TjvtNO+JJ57wvvjiC2/q1KneuHHjvGHDhnkHHHBA+NzHH3+8zD08//zz4eMdOnTw/ve//3lTp071vvjiC2/gwIFeenq6d9RRR1W4QF552N452977dXDYneCIhYPDboz8/HyvYcOG4U2lvFe7du28KVOmxO1v+PDh4SrTsV7JycneAw88kHBM119/fdzzmzdv7s2aNavClbfLIxae53l5eXneOeecU+69xxM2X3jhBS8zM7Pc89PS0rw5c+aUO55oLFq0yGvevHnCOZk5c2aVzElFqp+/8cYbXlpaWsyxpKSkeC+88IJ32WWX7VRi4Xmed/nllyd87p544olyhbnKEIvS0lLvb3/7W9xr1q1b15s8ebLXrFkzD/CuvvrquP3cc889YcKW6JWVleVt2bKlTB+24nKsV3lV7KOxo4mF53lebm6ud9ZZZ1VozTnxxBPLnD9mzBgvIyMj7jknnHCCN2PGjITEoiLXvvrqq72SkpIy1y8pKQlXcY/16ty5s7d8+fKEz1NliMX2ztn23q+Dw+4ERywcHHZzlJSUeBMmTPDuuece79RTT/Vat27tZWVlecnJyV5OTo63//77e+eff773xhtveAUFBeX2t2DBAu+GG27wDjjgAC8rK8urVq2a16ZNG+8vf/mL98svv1RoTG+88YZ33HHHeTk5OV61atW8/fbbzxs0aJC3Zs0az/O8KiUWFmPHjvUuueQSr1WrVl61atW8tLQ0r1mzZt4ZZ5zhPf/88wk1vytWrPDuuece7+ijj/bq1q3rpaSkeFlZWV779u29s88+23vuuee81atXV3gs0VizZo136623eu3bt/fS09O9GjVqeAcddJB31113eevWrfM8r2rmpCLEwvM8b+bMmd4ll1ziNW7c2EtLS/OaNGninXfeed6kSZM8z/N2CbHwPM977bXXvGOPPdbLzs720tPTvRYtWniXXHKJ9/3333ueV74wVxliYfH+++97f/rTn7zatWt7GRkZXtu2bb3rr7/eW7x4sed5nlejRg0P8P7xj38k7Gf+/Pne3//+d69Lly5e7dq1veTkZC87O9vr0KGDd9FFF3mvvvpqwmfw3Xff9f70pz959evXjyApuyOxsPj222+9q666yttvv/287OxsLyUlxatdu7bXtWtX79prr/U+/vhjr7i4OOa5M2bM8C6++GKvcePGXmpqqlevXj3v+OOP91544QWvpKQk4bO8bt067/XXX/euuOIKr0uXLl6TJk28tLQ0r1q1al779u29yy67zPv2228Tjr24uNgbNmyY17VrVy8rK8vLysryDjzwQO/+++8Pk7+qJBbbM2dVcb8ODrsLQp63g5OqOzg4ODg47IZYsmRJRIKAK6+8chePyMHBwWHPhgvednBwcHDYJxHMDGQzQDk4ODg4bDucxcLBwcHBYa9DXl4eubm5EVmZgpg2bRrHH388mzZt4rDDDquyGhoODg4O+zJc5W0HBwcHh70Oq1ev5oADDqBv37707NmT/fbbj/T0dJYtW8aYMWP497//TX5+PqFQiCFDhuzq4To4ODjsFXAWCwcHBweHvQ4LFy4sN+d/WloaL774IpdeeulOGpWDg4PD3g1HLBwcHBwc9joUFRUxcuRIxowZw+TJk1m9ejXr1q0jMzOTli1b0qNHD6677jpatGixq4fq4ODgsNfAEQsHBwcHBwcHBwcHh+2Gywrl4ODg4ODg4ODg4LDdcMTCwcHBwcHBwcHBwWG74YiFg4ODg4ODg4ODg8N2wxELBwcHBwcHBwcHB4fthiMWDg4ODg4ODg4ODg7bDVcgrwIoLS1l2bJlZGdnEwqFdvVwHBwcHBwcHBwcHHYKPM9j06ZNNG7cmKSkxDYJRywqgGXLltGsWbNdPQwHBwcHBwcHBweHXYLFixfTtGnThG0csagAsrOzAU1oTk7OLh6Ng4ODg4ODg4ODw85Bbm4uzZo1C8vDieCIRQVg3Z9ycnIcsXBwcHBwcHBwcNjnUJFwABe87eDg4ODg4ODg4OCw3XDEwsHBwcHBwcHBwcFhu+GIhYODg4ODg4ODg4PDdsMRCwcHBwcHBwcHBweH7YYjFg4ODg4ODg4ODg4O2w1HLBwcHBwcHBwcHBwcthuOWDg4ODg4ODg4ODg4bDdcHQsHBwcHhypDt27dmDx5cvj/rl278sMPP+zCETk47NkoLi6muLh4Vw/DYS9AUlISqampFapHsa1wxMLBwcHBYbvgb1JZQCpQHxnES5k8eQ6hUHUgDwDP8yrU53777cfs2bPD/7dv357ff/+9Ckft4LB7Y8uWLaxZs4a8vLxdPRSHvQipqalkZ2dTt25dkpOTq7x/RywcHBwcHLYJSUlJeF4KUAdoAfwJ6Ai0BzKBLcBsYCbwGfAHoVAaoVAxpaWlZfpLRFBmz14VQVCys7PZtGlT+Nzs7Gxyc3N3xG06OOx0FBYWsnjxYlJTU2nUqBHp6ek7VMvssPfD8zxKSkrYvHkzGzZsID8/n2bNmlU5uXDEwsHBwWEno2fPnnz66afh/0855RTGjBmzC0dUeUjIyQE6ATcCvYH0GC27mfe7gQ+Ax/G8mYRCobD1Qn2lUj5BmQ58Cixm06Z80z4VKGXTpsII4hFEy5YtWbBgwXbfs4PDzsKqVatITk6mRYsWO0Sr7LDvonr16tSoUYNFixaxZs0aGjRoUKX9h7yK2qX3YeTm5lKjRg02btxITk7Orh6Og4NDAnTs2JFZs2aF/+/QoQMzZ87chSMSymrj07DaeCgEiqisu9Cugu6lPnApcCeQXYmzNwH/Av4DrDKflUdQCoBliGB4wE/Ay8Aa4ALgJOA9YASwFs3rnju/Dvs2PM9jzpw51KpVi3r16u3q4TjspVi+fDl5eXm0adOmXGtYZeRgZ7FwcHDY45HIhWbWrGXb5ONfVcjIyKCgoJTKuQulACXhPnanAGifVAwCBkYd3QT8gQT5NHS/0aQjG3gEaIisGBlAP+ITlLXASrRdNcEnIWcCjyKC8jQiEE2BPwMdgNZANURK5hLtjgVFjmA47JYoKiqipKSEatWq7eqhOOzFyM7OZsOGDRQVFZGWllZl/Tpi4eDgsMei4i40u0aorJy70Gwz5k+AeUgzL8K0rQHQQbRp04b58+fHPHbEEUfw3XffldtHUlISup9L8UnFb8C7wBRgUYyzmgNdgLOB/QOfN0UE5WLg74gURmMlskrUAhoAQZeQLOBKRMD+DVyGLCF2fj1kpSgBDkVE526sOxZEumM5OOwusPFHzgXKYUfCPl+x4t22B45YODg47JHYHh//nSFURroL3QysR0J4GhK2rSC9GHgQ+B6oDZyBBPBsIB9ZXtZT0QDosmPAXCvJvKeYv1MQcSlh0qRZEcSlU6dOzJgxI9xPp06dmD59ugnU7oSsC38ADwE/mHF3R5aH1sgKsRWYD8wCxiJXpW7I0hECnkJk4DzTV4eo0a9FpKI+EMsdZAGQC/zF3Mvnpn0Te/dorouAYvOeDpwL9MS6Yzly4bC7YmcHa2/ZsoUlS5ZQWFhIWloaTZs2JTMzc6eOwWHnYUc9Xy7GogJwMRYODrsXqtLHf0csgRpfHeAIZFFZjDTo4RZAMyTo/4608deiWIGgSXolsML01RS59fjkCHJjjt+35OQAjZBLUK75+0REXBqbMa1DQvpc4BtgKSI0mdjAaLk2FZh+nkaWg6eQJSHWuKNRCHwJDAU2oO8rBXjLHFuECIn9HguQ1aamGWc0liDi0RDN3WbkAtXKjC+IoOUijUirx2PAw+yo58DBYVuwdetWFixYQKtWrcjIyNih15o/fz7vvvsuX3/9PXPn/kFBgYfnQSgE6ekh2rZtwQknHM7ZZ59N69atd+hYHHYuKvOcuRgLBweHvRaJffwrgqCP/8NlNNaHHHIIP/30U9yzDz30UH788ceYxw477DCmTp2KBPJkRGJ6EluT/x9gHIoVuMYcjxbO6yOh2LoDZRFL4167dm3WrVsXOC8LOAg4CvjOnHsPkQQgH5GKusiCcgKyIEwBXgUWAgeaz4qQu1EectkaBZyFLDGxXJiikQacChyHBPkXkaUiC5GVVESgLLFYhranWNlK8hCpqBM4Xh0RnDvQ3AYFIEuySs19BInFQHPdF5zlwmGfwrJly7jvvof49NOJ5OXVJiWlOxkZl5KV1ZpQKAPP20pBwXx+/nkWP/74Oc899xannHIUgwcPonHjWGTfwUFwFosKwFksHBx2D/juTxcjl6EtSLN+GLHdZRKhFPgH8ALS5oPvKpSCCEAychtKxcY7SJteTNm0pvbcdHNOyLTLQm4+tyBrAcBI4H7gb0jAXoU06g0QCQiiBJhj3jsGPn8NCfZboq4btDDkAJcDg/EJQAEiDXmmvX0FzeIFiPi8bT4/HlkJmiB3rr8iwpFKYv1UrO1lJSIp7yEi0Nf0vR7ojAjPfHOtmjHOn4m+l3ZEkoQC9Ez0AG41nwXvybpDRVstchFRm4LnFSa4FweHnYMdbbEYNWoU99zzGMuX1yQ7+1pyck4iFEqN297zisjN/ZJNm4bSqNFG7rrrZvr27Vvl43LYuXAWCwcHhz0K3bp1Y/LkyWU+39YMRyIV9vU20pqH8F1dYgnwFnlIeLXZiuog4nAy8F+0FDZDBGUuEvTrm37aInecIiTEr0CC+fvmvToKHD/ZXL8dfnzEHGAG8v+/ALkiPQgMAfqg+ABMHyuB5cBGJOwWILLQChGORebz8YgslCJLQzBgvZq57hvAaGQpOcOMoyVyGVqFBOtMRDyqUZaU5CMrSm1kXfjJ3Pda4HxEiIrMy0MEw8IjNqGw2BwY/2NAVzOGNaa/dej7iLV55SKC0NjcQ/A6aej7Cj5bwePJ5txiIolFDrJc9HdWC4e9Hi+//DL/+tczFBX1pXHjm0hKKj+GIhRKpUaNnmRnH8fKlUO48cb7WLduHVdcccVOGLHDngZHLBwcHCqEI488kkmTJoX/j5VJyA8Gs+4t9ZDA6vu5T548nVAoEwmv5Wc4isz81Bw4BWm22+FnfoolwN+N4he+R0K5FXhL8IXhVabPnubzUebcR5Hm2wrMlsDkmnMeQRr2LsB1ZkxFiBxYwbWDOe4BtwMfAU8CFyKB/abAXS4A3kGkwWZWCmrbG6O4iMlIY98BP2A9LXBvIUROfkSk4nYU07ASxSwkmTlriMiPJWpBVANqIDJzPRL0XzPz08h8Br4Vp9i8Rwv60fcAPnGpg4T571EA+EOm7RZEAnPMWKOx0ly3RoxrYeblPXxLVjQseYrGGYjwrY1xzMFh78CoUaP417+eAfrTqNGVlT4/KSmTRo0Gs2ZNQ+699xnq1KlDnz59qn6gDns0HLFwcHCIi0T1ISZN+i2q0nE6EpibIteZ9kgYtprnP1CGoC9QgPAWoDBhhqPIzE/XIT/9HMoKrF3N+13IkjAECdaNgEtQkHdzcw8FKJB4NCID85BWHnzN+UuoyvPZQBt8ob0mEug/Q65AFwEfIw18olSrfYFeKI7hYSSot0MEZgIwCT+z0uX49R+CmZWeNGO4EgWsBzX6VsgOISG9JiIuSabfXHyBviG+0B49j/Z/e781UCzHWERY+iMiU4KIRAo+aQxalGL1jWmXj+YqCVltnkXPBvjuSqWIFFoLU1P0DOab+4k37tZmPEuA/aLmBnPNEtN/kLhkIHI4O8aYHRz2fCxbtox77nmMoqK+20QqgqhT50pWrFjB3Xc/SteuXV3MhUMEYqmEHBwc9nEkJyebeg91UA2A61C2ndHIKjDa/H8dEuZqoEDfe4DXkbXgGhQs3MW8zkI+9TY70GFIE98Kz8suk/rOD9L+KzAGaZWrEVtgtdhixvECEoJTkOb6VERyMpCA/joiQtcBTyDh9kUUtHwXSov6Jco0dB0iQh6KyfgMuQLlAVcgobububfhwJvm/Q7z+deI2NyKiMKDqG5DCSIJHwADkEXjNjNPnZDr0kEouHu1uZdBwH2IIBWbe7YkIIRI0ndm7m1MxWrkXtXAzHcswT/achH8OxuRxCbAMWjbsC5Q1poUtFzEsoLMQ4TqAjNnVyAC9RwiLBeg7+UuRIh6I9J2uXk/wczLK8hqEW/cNt1wYZw29j2axG5Ez3AoHAzv4LA34b77HmL58po0aHBT+Y3LQSgUon79gSxfXoP77nuoCka3bZgzZw5XXHEFrVq1Ij09nerVq9OiRQtOP/10XnnllTLtP/30U3r16kX9+vVJS0ujcePGnH/++UyZMiVm/y1btiQUCrFw4cKYx/v160coFGL48OFxP58xYwbnn38+jRo1Ijk5mbvvvjvcrri4mJdffpkePXpQt25d0tPTadq0KT169ODpp6Oz2wlffvklZ511Fo0aNSItLY369etz5plnVqgO0c7CbmGxePvttxk2bBg///wzhYWFtG3blosuuoiBAweSmho/oCgaw4cP5/LLL0/Y5pNPPqFnz57bO2QHh70W5deHWIlcWFKQcJ2N/O7PQYJbOpEByEGhzkOa7p7IqjEEua4cAfwU9nEvm/mpFAnl1q1qC5ExE03NZ6uRtr4B8ADS/D9jPgshMlQHBU6fYu6hwBxbh9yG2pt7vhmfBP0ZWR1eNWP9zNzjfYg8xVqnOiGNfLCfC5DgfKOZx5fN2J8xY7nEjCUFX3M/GcVM9DPnQWR8Q3AZf8+MqzsiLkVIcK9N7AxLFtGpcKNRhL4vzJhC+IHQBMbrF/UTliILyndmDCciVzBbwHArsmD9F5HHhsgFrRuyOASzaM1EpPYblO1qEH7NCosC8x4r9W00CfkFkcCpaI5snE4m69cX7NJq7Q4OVYn58+fz6acTyc6+r0IxFRVBcnIW2dkD+PTTweEA4J2JGTNmcPTRR5Obm8t+++1Hr169SE5OZsmSJXzzzTcsXbo0Qh684447uO+++wiFQhx11FE0b96cX3/9lREjRvDuu+/ywgsvVHnMyMSJE7n66qtp1KgRxx13HPn5+WRnK/Pdxo0b6dWrF+PHjyc1NZWjjjqKxo0bs2LFCn755Re+/PJLrrvuuoj+brnlFh577DGSkpLo0qULxx57LIsWLeL9999n9OjRvPjii+XKwDsDu5xY3HjjjTz55JOkpKTQvXt3qlevztixY/nHP/7B6NGj+eyzzypd1r5NmzYcc8wxMY81aRK9ETk4OFjErw/xGdI4z0buKKlI4EsFrkYuQ+loSdmEMgfZKsuxBFUPWR8GI4H3eWRV+DhAbGx1Z5uFaSHwIRK0/yBSGC414z7cnGeDc/+Cgo6tlv9c4AYkfIfwSUU6cptKxbcO1MJPkToE1Y4oNdc+CxGEimzSqVH9PIB8+W9HblCbTX+PIjesG4kMNL4CCeJ3RPUZtBIUo5iPr8x1bCzDUtM2SCoswYsmARbRQvQWlP71ZPxMU5hr2D7seEvwDeEjzf3WBO7FJ2D2uUkxbd5GsTj3IEKx3vQdzJDVCTgdpZSdgIjYn83/h+ATzF/NeJrGuC97b4uQ1WgqSrV7EorNaIq++2z0He+aau0ODlWNd999l7y82jRpclKV9pud3Z1ly2rzzjvvcOutt5Z/QhViyJAh5Obmct9993H77bdHHMvPz49IHDJmzBjuu+8+MjIy+OCDDzj55JPDx/79739z1VVXcfXVV3P44YfTsWNHqgovvvgigwYN4v777ycpKdJB6IorrmD8+PEccsghvPfee7Rs2TJ8rLi4mI8++qhMX4899hht27bl3Xff5cADDwwf++abb+jVqxdXX301xxxzDO3atauye9gW7FJiMWrUKJ588kmqV6/OuHHjOPTQQwFYs2YN3bt3Z/z48dxxxx08+uijler3mGOOKWOacnBwSIzY9SG+R4LtWiR4XYyEvJrIbeVUFMybj/z4tyLBbityf2kbfRXzskIpwFVIez8G392lI3ILykPE4FEiKzxfglyw0vGDt39HdSE+Ao4099HYjH8MIjKD8IXyUvMKWhvq4gdh27iSTERGpgPfIvecSE1SxZBJJJGqi8jJtcjVpyESmOuieU5CGvq1SOiuHtVfCn5xua1m3CvQ95OGn92pIWUtKpZcVARLTNu2iDQ2xHfDKg70beMXUpAl5llksbmJyFoXdu7/bdr0NnNQAz+OZTWyIhQTmUY4FcWmHAmMQM9ebfy5saRkGH58TBD/RVaruohcnBwYfz4Kom+FXL4gVrV2i9NOO63M5u/gsDvi66+/JyWle8KUstuCpKQ0UlK6M27cD+xkXsHKlXKJPO2008ocq1atGscdd1z4fytDXnPNNRGkAuDKK69k1KhRfPjhhzz55JO88MILVTbG9u3bc99995UhFT///DPvvfceGRkZjB49uozCOyUlJSIovrS0NOxC9eabb0aQCoDjjjuOO+64g7///e88//zzlZaZqxq7NMbigQceAGDQoEFhUgFQt25dnnnmGQCGDh3Kxo0bd8n4HBz2FSQnJ+NbCW5EguSNKK6hFnLjmYBciC4APkEC35lI8NyMApWb4btNbUSuRbFgCYbVeA9EAuViM47rkBvMh2ZMi1EQ8UcoPuEUJDQ2Rdr805HL0bvI0jIX1Yd4B5GTAWY8f+BbM0rwte1BNDCfrQh89hMSOi9B6V2L4txXPASF+KuQW9VjwMHmHh5G1pWzUEXrhWh5HmKOnxGjzw1oXmz61XQz7rbm3I3ET9saHFcoxisIG6/QAN13HpEE0cK6qY1ChOFqZGWxpMLDD5oOthmMSFfIHEtG5KUuysBlC/+tQKmEL0AE8s9oDv8PEbWXzfELkXvV+fjxMaBYmsdQEP07wGlEZv2yblRBa1EqcvEbg2J9bPKC+nz88URCoeqEQqEygoODw+6CLVu2MHfuH2RkdNgh/WdkdGDu3IVs2bJlh/QfD926dQOgf//+fPrpp2zdujVmu+LiYiZMmAAo9iEWrrxSwexfffVVlY6xb9++Zm+NxJgxYwA4/fTTK+RFM23aNJYtW0abNm047LDDYrY54YQTALlf7WrsstVw6dKlYVPVhRdeWOb4McccQ7NmzSgoKODjjz/e2cNzcNgnEAopWLW0NBlpuq27zYVII9wPkYizkRb8N6QlfgsJ2lchi4ANsL0S+fnnIQFtXrwrE0kuspDWehIiKKeiAm0Po3iMtxCZKEECoI0tSDOvdPOeadr/F/nq32vGcxbSbI/EF5yt2060IC3BUWSp0Bx/DhGpOyhLOiqLIJEagojKr+a6N6MYkHtMuznmeFrU+WuQRj8HEawcfLKTYdpsRpr8shtbYkRbMuy1LYlYFWgXHZuxDL9Gx1VRfdq2K1Ggdh/0vAQzWtn3ECIWtdBcj0CEYhEimB8ii9bF+HUw6iE3sMHm+L3oGT0HEeBnzPX+SVkXtlJ8Fz8bm1KELEH2cxt0X9e8nkTE5VA8rzahUFql3XZBe539HYZCobhuvA4O24IlS5ZQUOCRnt66/MbbgPT01mzd6rFkyZId0n883HrrrfTo0YPvv/+enj17kpOTQ9euXbn55psj3KDWrl0bJh3x4kDatJFlc+nSpTGPbyuC7k1B/PHHHwDsv//+MY9HY/78+QDMmzcvYq0IvizRWr169fYPfDuxy1yhpk2bBkDt2rXjftldunRh8eLFTJs2jQsuuKDCfc+dO5fBgwezatUqqlevTqdOnejduzd169atkrE7OOzpSElJoaQkCQmx6UhYvxH5lw9EGuG/IxcgkKb/HlQfIQtp7o9FQm0wwHYWypL0LtLGn28+i9aWBQVN+/eJSIA7EfgUZWm6GmnyC/GFfFs5OagxD2raPSSQ3oHvdtTC9Dsl6rrxdCvZaHlcj4T46SgIuToiHUvx/fq3BVnIijIYzeVm5P5TDwmrt6GA53xE+IJYH2hr17Rgkbqt+AQsVmpei6BLWnDuotHUHJ+Pvsdi5J5VO3COJWiP4ae6tQhaNUKILNbCd7eLJhbBtg1Qhq0RiNwOxCcFReY+ayDrzAr0nS1Az0czNL+voTiOTigAfaYZbzX0XVY3Y9iEvvd0IuNQSs11CtHzuBw9m0XIOjYYpRx+nK1bZ1aoyJ7vUmUtZjXR76iUCRNmusBxhypDYWEhngehUNVX8AYIhdLxPF1nZyIzM5PPP/+cyZMnM2bMGCZOnMjEiROZMmUKQ4YM4ZprrmHYsGE7dAyxUqQHsS2KhkTXadiwIaecckrCtruDnLvLiMWCBQsAaN68edw2zZo1i2hbUUyYMCFs+rLIyMjg7rvv5h//+EclR+rgsGehTZs2YQ0HQOvWrZk3z7cclM36dDVKbdobxTFEZyB6HWlnrV96ayQINTTHreDTEbns3ISyIA1Dwv155nhtc160MG5JQTF+LYyn8TXaW5Fwl0Kk60p0H9ECWBJyX1mN3GAuRhrvLUiojHeeHU82Eu7+ix/kC5GkI16mpVhZq6K15N3Nvc4z9zURBbw3Mm0fx48psLDxHzURsQiOPUgA7DlpJE7PC7HJRJBoZCJiNgN9xw2QEB9CBMGePw/F5Nxnzon1fdh0uPcG2tgsU7HIzQfIbelviGCG8LNlWdepFPQsLkVZoqyloQQRuL8hV7F3kcWtB35M0EJzfl3Tvg6RNT7s85FijhejtMETEOm+xLTtgyxl/wL+E5NcRNaEqYm+m2C1801m3MehJAR+4HhycinFxcU4OFQWaWlphELgebFdhbYXnldAKKTr7Ap07dqVrl1Vx6i4uJhRo0Zx6aWX8swzz3DOOedw7LHHkp6eTkFBAfPnzy8TnwC+RSDaLcne06ZNm2Je21oeKgsr9/72228Vam9l4Tp16uwR8cO7zBXKflFZWVlx21SvroC83NzcCvXZsGFDbr/9dr7//ntWr15Nbm4ukydP5tJLL6WgoIBBgwaF4zoSoaCggNzc3IiXg8POQOfOnSPMm507d67Qef451Zk/fx3SxDYE6jN//rqwL3jZ2hDnIkHsT0ioj85A9CSqMm390nsiQSuD2K4w1qJwKtIy90IuTW8hF5p5KC7AFlQLuiItNP1ORILXQHwhMpWyAchBTXc8ra51L6qH/O7z8IOR7bkRMxn4uxrS+k9D1g67cSbhk44g5qO5Ohe5hUXXYTjXHLekL830O8Pc462m7V+RReBbJAC/GThnBdJwW0IT1HxbAjCLSEtOIo13LGE+FhHphubPFtyrjQjb6sC575nPg5ln7DEbf/EufgC+fX5ssb1oWLeqM9E8FiMB3Fpn0s27/T5SkeUnG79SeSPT/i8ormUoEt6bmOPNzbWXmWva79gz19tqXjYzGab/G9Czejd+JrQ/EHm9HKgbJhL63VW0JsxBKA3uEPRbexboQklJVpk6Lxb169cv4xZhBREHh6ZNm5KeHqKgYH75jbcBBQXzycgI0bRpvExsOw8pKSmcc845Ya3+Tz/9REpKSti9MJ5Q/vLLLwNw4oknRnxuicavv/5a5pwVK1YwderUbRqnLXnw8ccfs2zZsnJaizzVrVuXWbNmMXPmzG265s7EXhVx1rNnT+677z66detG3bp1yc7OpkuXLrz66qvhKPl//etf4WwC8fDggw9So0aN8Mst0g47EkFSMGPGEoKkYMaMJVGkINa5FRVaDjbtWqGq0dlICE1GmujPkEB7ozn2OgqIvRa5fGTi+/FbASw64Df4fxZyE7kGxTZ8Y9rkIXeVDVF3Yy0T81Fxu2r4WYaijasVEZbt31nm/hcgK8NiYgvU0UhDgv0y4ICotpZ0lJrj1yO3ry9Q4b/BKLvV/8z7YPO5DSq+HhGpdogspCPXKHvOcHPOpSgt6vnoe5iPng8riHtRr24o5axNQxtN/iqD4HlnmPHajdRmcFpnxrQBWSu64xPAWMTve3yXN0s2gq8goiuIl6L7tkHqwWtYcrEVCfwp5rNi9F3VQM9htunXjqsmCna32cAWIvJSgB+Dk0pkHE8aShbQHBHFDsjF6nwUBzLaXLN6wDrYFZGEiSjd8MXou+pk3i9G1sCJpt1hiAQNQG6BChyPJCtaM1avLiS4ZkBNlixZH0E0HPZdZGZm0rZtC7ZunbVD+t+6dRZt27YkM7Nq6mNUFM888wy///57mc9XrFgRLnjXokULAG6++WYAnn32Wb788suI9sOHD+eDDz4gNTWVG264IeJYjx49APi///s/NmzYEP589erVXHrppWzevHmbxn7wwQfTp08f8vPz6dOnD4sWLYo4XlxczAcffBD+PzU1lbvuugvP8zjzzDMZP358mT5LSkoYO3YskyZN2qYxVSV2mSuULRKSlxet9fNhv7ScnERZTSqGG264gQcffJA1a9bw2Wefcckll8Rte9ttt3HTTb6PcG5uriMXDlUObfipSNhvgawGHfGLh21BdSNi59Mvv5CdRTckLF2Ishv9F7k6XYaIRJq55m34GYgWImvFWUjba2GLoEUHPScSXv6ChO/XUGEzK/jZdKLWJzQDuafsh7Te1qph08OWJyAnGkN3ZLVYgoR3q1WP7jMYb5CE7/LTMqqtJVbvIuJWE79WQ1qMfjtRtujexUQWFTwVzX8G0qjXRG4xDZHV5GkU9zIIafFjuXCdjSxF3yICaS0+iQK4gwQknq5poenndeT2Y+NdMtH3MxdZoy7Cz7Zl59Km9d2MtPq2CCDo+7dpaq27WzIiK0GXKWstiCYtBP6vhgiOvV8Pv4BjGiIXf0OxQgvM/Nh7bmrarDD/NyH2nNlxV0cWhWXo9zsX38XJM38Xm7Gfg0hNW2L/PoNIR5Yt361KVpfN6Dm426QMjbVmVENEOHLNgNwwuXDxGvsmTjjhcH788XM87+YqTTlbWlpIcfFYjj/+5PIbVzFeeOEFrr32Wlq1akWnTp3Iyclh9erVfPvtt+Tn59O9e3d69+4NwKmnnsrgwYO57777OPnkkzn66KNp3rw5v/32G1OnTiU5OZnnnnuuTA2La6+9lhdffJGpU6ey3377ceSRR5KXl8fkyZNp3rw5ffv2ZdSoUds0/ldeeYXTTjuNSZMm0a5du4gCedOnT2f16tURv9cBAwawaNEiHnnkEY499lg6duxI27ZtqVatGitWrOCnn35iw4YNPPvssxxxxBHbPK9VgV1msbDR8osXL47bxh6LF1lfGSQnJ4eLhpSXvSA9PZ2cnJyIl4NDVUIbfXUkbPRDLhU3Ub4mswuQbc6vhQTJd5AwsgH5pL9p3oOWuRVIYL0QCcNnoADUd8zxVUggsRmI7kEC/y2BPqzwVxEhP+JuEfGphWoX1EOCaRZypdlg2rU01zgOXzCs6CZYnlY2zfSbiqw08ymbLtXC3pv1fQcbWOsjCbn+/B+aszeRMFien3EKym71lmk/HJG7uvjWmoX4grSNPTkVCfWnI433vwPHg6/WqMbDs/gBx/ECDBN9h8FjeShepidy+RmCLAMeIratzBg9pMW3Gn8bcG/JjTX529oSlqRaa4CNnyhA5KgWImpBImuDzYMB+JbApEf1W2r6qmaOp5j+aqL5j77/+khg34BfODHaIhdEJzTH9yPCUgNZLV5HbldZyDLV37Sbi75rTP+r0W90NX6qW4tsZA35u+mnIQpAD5nr3gq8gDJc2TWjM2XXjGeQ9UOKPGe92Ddx9tlnk5W1jtzcL8tvXAls2jSWrKx1nHPOOVXab0Vw//33079/f2rWrMmkSZN4++23mTVrFocffjivvvoqY8aMISXF153fe++9fPLJJ5x66qnhitvLli3j3HPPZeLEiTGrbtesWZMJEyZw6aWXAvDJJ58wb948/vrXvzJx4kRq1KixzeOvVasW48aN49lnn+Xwww/np59+4p133mH27NkcfPDBMQPPH374YSZMmMBFF13E5s2bGTNmDB999BHLli3jhBNO4KWXXuL888/f5jFVFXaZxeKQQw4BlAosXjl4a84K1rjYHqxdq0XdWkscHHY2kpOTTYaH6kig2oCE0v8hgSgTuVbcgrTsFkuQlrU+8lGvhn6+3+JnXLLClXUxKTLt2iFh4yxzLBNpzkFWiXR8X/JOKMD1RyScWPO27c8Kb4X4AdAVQTYSsP6FrBI1zXs2fjG6JPNqRewaExUhNImOt0JzXgsJrjcTX+iGyKxP1jXGEp33kaA3gMi0quWNwcK6idnUslnoe8pFJM/Wp0gzn5Wi+b4dxQ88h8hInxh9D0IB86+jmJlaxI9jsIjnNuWhIPKNKLvWDyg4O4Rf46QECcxJ+NmNCvADq4NB0Jjjtkq3dYkCPVul5tgP6PmPHrMlEcE+7d+2H/udBt327LWzkCuWTe4RLWhb68AflM1kFo12Zvzz0HfZEs1RA2A8cmMbhMjYamRN+AMlEIBIvZ5VdqWaMdQx/w9ERPh1ZMGsiebtDfMCPQ9dkNLgAPQ7Xo8IYSdkHXsFPa+5LuPUPojWrVtzyilH8d57w8jOPo6kpO13WyopyWPTpqGcddZRcTN77kicfvrpnH766ZU6p2fPnuH4hoqiSZMmvPrqqzGPDR8+PGbcRrzPo5GWlsbVV1/N1VdfXeHxHHXUURx11FEVbr8rsMuIRdOmTenatSuTJ0/mjTfeKFOSffz48SxevJj09PSYlRUri6lTpzJ79mzAL6zi4LCzkJmZSX5+MRLEmiOtdWcknFi3pzkokPczJCQ0RML9x8gtpDYSivohwe11pAXtiFK/NkNCRg0khM4P9HcbyoxkLQbrkfC5Gblb/Iifgeh/SGgNmretW1K6eeWb6yRCdEDwCcid522kcc3Dd1tZYfpMx8/QE8taEYtcVEQ48ky/mcAhwGS0/FnCEEuAzUffFfiCZgl+HYa+lCUV5Y3B3oPFecht5VskcLZAZGs5SnObhATFDfia9yvRfD2GfPcbR12nEdKUP4AIbH/8wnPRSKTB9tDz8j4SnJsgF6yv0fe4FpEMSyqS8SuvY/4PCs+WpG1G5CFW7EwIEbol6DmPJiW2vyAJstcrjfN/tGG+E7LUbUC/xyCSEDFYhJ9+1l4vGtVM+0IUa2K/l7vRb/smM5bq6LlejX531cx1M/FjR2y9jFxEKpeg+XsUpUjuhH4/xyCrlHV7simev0Tf04FmHE3M2OuieX8UBas/Zd6XEu1a6bB3Y/DgQXz33fmsXDmERo0Gb1dfnuexatXjNGq0kcGDB1XRCB32FuzS4O1//vOfADz00EMR0fVr167lmmuuAeRXFjQ3jRw5kv3335+TTjopoq8tW7YwbNiwmGnBvvnmG84++2xAxYgcsXDYmQiFQuTnpyKt4lAUxPwACvTsggTWLub/+5E2dRgSHi407e9FRb/+gQSeR5Fw9giqG/EgSht7IvIXr4ncZ+5H2tOhSMA5Dd/NqT1wF9Ig/4rvMz8ZuYwE/dltxeQCJKhsREJ2hWfBnNcdaaNtEbo8JGRtRkKX1UDnUTXLkxWYNiFBLQuRhUVIMLMBvrHO24RIWGNkxbGuOg+ZMd8Q57xE44gW5PORZrsx+l7An5v6+C5Hq4hMyzoQEbuHKAsPxXNchzTV96H5LYpqkwh56Nl5HhGTvubzzcDPSJj+ClktRiNNeRISdGPdq3WbKkJkOFaWLwubtSuoBQ32Gc9FybqtBRMLQFmrVFvzXjbTi1DDjG0Fsa049rr55lhtpBhIQvPkoargmfjF9kLoO26Mnrc8RCDmmvvNQL9bm6nqG0SsVqDv4SNEGE5F5KGdeT8TWbE+QWvKGuQ+9T0imDXNODJQnMdFyCXt3/iulTnORWofQOPGjbnrrptJTR3FmjX/Lv+EOPA8j7Vr/01a2ijuvvsWGjeOVmw47OvYZRYLULnz66+/nqeeeoojjjiCk046iaysLL788ks2bNjA0Ucfzb333htxzsaNG/n999/LlG8vLCxkwIAB3HzzzRxyyCE0b96c4uJiZs+ezYwZMwCl8hwxYsROuz8HBz+966Vow7fay0Sw8QAtkZvUpyj+oSfSlL+C3JpuwXdHsgJ5jrnGSqT1LkJuFWehWID7kKXDM/1UR3n9vzafrUWCRwfT31bzWR6+AO6Zv38z17dBq4kKMFnhrAOKTdiCXw+iyLxPMO0KzTWtNjla6AlqsCviHlWKBHOrIW9tzlmMyFWBGUNQ0N1k7rEWsgp8iVyn5iHf9bvwtdkVRfR92NoFjfCL4v2OgtdL8eMu1qD5Wos00SH8SuV34AciR1/ncvP37ShofyCyQmVEtYse01hEbDciS0Vfc6wUCbmzzLVXIOvTQhSzUISyRvVAc2djVLbiz2dTJEzH2nqsEB+Ma7HjtKQqun0o0MZavGx8jiUY0W571l0wXhpx+ztaH+eYxRx0z93wLV+foBiSg/CrzyejZ2s+im/6Bj17KUSOv4XpqwRZVHqjec4yY96InuM6+NbDZHzSfyKqf/EUUjisI5L82tTL35njL1Ne7Q2HvQt9+/Zl3bp13HvvMyxfvpwGDW6qlFtUSUkeq1Y9TlraKO644xr69Inliumwr2OXEguAJ598kqOPPpphw4YxceJEioqKaNOmDYMGDWLgwIEVLrqSmZnJHXfcwZQpU/jtt9+YOXMm+fn51KpVix49enDuuefSr1+/XVbExWHfg08qBiEBoRg/wxHEFuzAr6zcHFUpbouCMOcgl5lriczUFO0CkoSE1VTTTwoS/LOQhrsREixuQ5aOW5BGtBiwqeqaI/cfKxzlIOHMFvWyAmMJEtDWIZLSkNhZkez/QaF+fyScbzHXeAe/pkAbJETF0obFEn6CvvZB0lGKSJYdZwjfvSkfP+uU1Sqn4BMRSwIvAEYhcvELIhvHkzhmobzxFpvxlCCrSA/k2vQGSu3r4Rd+24TmaJ0ZT30z1u4o/uEd5FpmYZ8DD8VYgAjCLchdrjtyrWmDX3V9HtLgf2WucyR6bm3BqGLkSvcqSmv8GZqnJsi6lYOE5tGIiATjK9LNcTtvXyABN7gWBy0DlnhbtyprMUvkylVq5ikn8HkSfsrgoNueDcwOWuKCfYGe9TX4LlvBcdp2M9Az0hiRuwJkjeuJSNF69P2tQr+7oDtjO/RbsClyrUvTh0iJ0Am52WXjV1HPROTSWuWDgevB8f/ZjPkRpDC4Gykp7L1eg8jmDOQW+IgZ58OOXOwDuOKKK6hduzb33PMYy5ZNIjt7ADk5JyXMFlVaWsimTWPZtGkojRpt5O6773CkwiEudjmxADjvvPM477zzKtS2X79+9OvXr8znaWlp/Otf/6rikTk4bBuU0zsHWSoGIEHc+qEncjsopGxl5b8gYeM15O70l6hz4vVXFwmv35u+bAG1G5A14xWUCvMwJGTORBrzrYiQ5CDBsTpl3ZLSkEBui7VtQgLUfPN/zajxWY2z1UIX4KcIXYdS4M4z581FMSMbzHXqEiksR8MKiUFyYduuRQJeY1RLo2XgXpLxLRXWCuMhgbIEv7L4Afh1BdKQcJyCTxQriyLzsmTMzslJyGpjU7CGzBjrIZ940ByXoDlOQ0Lq5Kj+7RxYYfwKNN//h77bccjSEB0D0Rz58Z+NnoeQudZy5Hr3prnf2WhOf0G1Up5EZPcjlJ3sD0SG6yEyYbeZEH463C+RWw+UjZdoat7no1iFEnM8SCyin4OghSk4D9XRb6AB/nNhM4I1QL+3WJa2DNNmCz5ZCV67AJEr6wIGIuF/oHS6toDihyiLVk38dMSp+C5Saea+Opv56IXm9X8ow9R1yHKRjH6/uWZ+1pqxpeATYFsw0ENrTh6KnzofEYh+5p4bmf7ORUkA/oSsWSuAFxy52AfQt29funXrxn33PcSnnw5m6dIhpKR0JyOjA+nprQmF0vG8AgoK5rN16yyKi5X96ayzjmLw4EHO/ckhIXYLYuHgsLdBgdqHoHSQtlhZeaQCylZWBj/moD3SRsZCUHNpXS6+R4KO1cbb6zdDxfKaIK3oNPP+VxQobuMhWuNnmIqGLRxm+452wQrWp4iO0QBfK51qxjoKLUfLzLgvQULianONBsSPuQgSCXsdG2S9AWmJU5E2vge+YNmSyPoJqUhwX4PcTVIC934XcidbhzJsWWJhg8wrYr0owScv683/wQ26AxK6N5sx23vLwrcUFZl72owsFwfgu5YFXRpsULB1bfoIkbhNSHs/AH9OayJhtXZg7taZtl8gl5nZ6Hk4Gwm9bRBJOQo9my+g+T4c+e8/iF9gzt4H5rwjkQXuODPm6CB/GwfzK8p6ZYmWfRbts27Pi7YwBVEzcC+WIMxCBLsasZ9tO3+27+DYbPuPUGzEY4E2y/BjQzLQvI0y93CTuS97fjL+82OvZVNC/9mc8ziyJmxGbm222v1S9Mw1QwTGEkD7bKWheb0TKQtS0DMzGD27PfFTHV+KnoOXkFvdBBQsXhbNmjWLSNXetGnThOniHXZvNG7cmGeeeYr58+fz7rvvMm7cD8yd+y55eR6eB6EQZGSEOPjglhx//Mmcc845uyT7k8OeB0csHByqGMnJyUgovhEJA1aIKC8YeSsSImxxLiuEzEeb/SDzma0uDJFC9TLKulxcioQo6+KzAAlWY5FAuATVszgCCSvFSPC0wc6JgpHtcSvQ2FSZ0S5YVuOcgi/UNzPXfxQJpaciUtMNCfZTUGBwIySw5iFB2gpS0Qi6tuSa65eY87ORILgaWWjeQoJrNn560wKkBd5o7msD+i6ykbBay5yzFGni0/AFd1sszloAgi5ZpYEX+L79G/Hdxixam/cl+OlG7fznmPHYwO5NZix2rMNRxqAM9HzMQ64u3yAScwQKxA6hZ+R50+cRiFSswK+svtic/y36Lg5C1rJ2iASuQm5V6eYerjN9DMHPfvQaCja2VrfgdzYICc9DkEsOgTmz6IZIzfX4Lk3xYF3dGsY4lo4IhyUexYhgHm+Ox/tNRmeUCo7N1vJohMjRhqhzM5AlJ5iOONYza8kf6PnbjIhmCH0Xg809PYueh5PMfVpLYnDsNpbDWsM8RGQGIMLwKiKgw9Fvqanp5y5E8nqhtWIg0D9stfCDurPQ77p+eNxLlmx2qWv3ArRu3Zpbb72VW29VEpwlS5ZQWFhIWloaTZs23ekVtR32fDhi4eBQxVCdimZIOzsLaUbrU37Q9gb0k8wmUqB/FxGFM1Cg7Dp8AcTifaThrIE0lcfju0ZYS0EyyiTTB/m4f46CuW/Brxh9AdJOz0IuGOXB+tHbImgpyFJRhF+fwloRkky/zZDAOwwJtGnIgvI1EnL+DwWYd0NWnyxkBVkamJ9gvIcV8G26zmIkmDVFQl4eEqQ7mvZf4hdeA2n7bZEya6WwBG6rOf66GXMKvlDfCL9wXhGxs0tZWNJhU47WxbdKWAQLvFnYMdqq0lno+2+N705VgAJ2X4jqryGKh/gLfiYkUKrYeei5+g65xNk4CEsCWiELzTmBczeiZ6wOIgbB5/lM9H09hJ7R19F3MACRy1L8ea2HyMj/mTHGErytZeQLVBQwmgBY4rbGzIsltLGE24aI0K5EGa3WIUEaIl3ZggTIZuOKFqo89JuZi35zIdM2Df/ZWYS+j774MS7R4wbfOoS5h2T8hAB2HDaF7aPIYpmML9zHgo3hslZSG4fzESIYDdBvoQ8iMrOAMchN63WknGgOrDWpaOtQtsK3TY89m2CFb5e6ds9HZmYm7du339XDcNjD4YiFg0MAHTt2ZNasWeH/O3TowMyZMyt0bqR2bz0S6IIWhZbIVcT6sEcjj7Ia+RByDToRCS9Wcx3E/yH3k17I1aEavk8+SADMMH1bQSoNCWztUVDu20jYvQO5sdgA24rEEFih2WpLi5HQvBkJWfXw3YxGm/fBZi7OQGSipjmnCSJGvZAryX3mWHMkQK8387Q+xjjs/NQksj7AY0hwewBl2JqFBKvF5nixmbPmRFoitiLy9RQSog5GmbDSkHVjMxLUauAL2dZKYREy47bkpRgJutGkAsq6idn+gp9VN/3YwPaaSNB8AX0PBaZtazPOeeh5jEYbRCjvR+5y95l+Ms08VCdSSPfQ/M1BVd1jkeQm+KTlFhSTshZ918EMWiFERHKRj/8KfFchkLCdiQTZ55D7WpDwgeZ4Bfpe6xMZ0xONVPQ9zUMC+pHmHJsiNhoeIqnJRG6RpSie5HXkmnQQflKA6mYMIfR7qoWsLdGEKdrty/ZrrWM26DwYczIQJVV43PydGtVHdJ92rmxBy2AczlWIYL2Pnvl3kAvew/hJHVqg7+IgZHXtTezv26Ztvxs9E48DM12MhoPDPg5HLBz2eSQy98+ataxcc396ejqFhR7S7jVHLindkHWgGr5ryq9IYB9B2aw7NvNLnUDPNrZiEXJTABGEdfiC1V9RjMS1SDuaSJO/AQkvQRec2kjYqIPIxbeoaNsXSBNpK5tGu7NEw8ZleOZeSpCgtQxfSPsWCbHnIdLVGrmUtEfuP1agyTJz83ck7Nxm7ifd/G+vYV0+7LVtnENQ+HwRuYD8BQn1LyNScjciYyfhFyrcaNoECxUuQULpx4gYdkTC8jH4VZRT8bNmZVC26NlG8391c4/x3Hrmm/cW+OQkKGBi+m+AvnubwjSEUtTGIhDBtsEg+FJz/6OQkHwcvjtNrDoQVqC+ELlPJUIrZOUYg1xwfkLf5cmmb9uvfe6GIPJ8Dfrd2PiTAUg4H4LIiSVteYhYFSHSau8rkTBbA1lopiMrzir8OKbguZa85BIZ1J2Ln6r5FES+C9BvyhLpDPP+G5qvTCJ/M9HjC85vgTk31m8sE/2+b0ek4IA4fYYC76n4ljSb4jkPPSMDUW2bxei3+ARwNH5Sh5fRc/AavnteIqSjQPD4qWuPP/54vvnmm/D/xx13HOPGjatA3w4ODnsaHLFw2GchQpHK9pj71UcOSg95IwpmXYM0wrayroc2d+uC9CVyA/oz0tT2xXd/iRY6bbEwu8FbzeE7SBO/FBGK2yhr6QAJuzWQEGVjD2zmplr4AaHno+XgMRSDcBDSOB9D2aw4iRDUmNpg2iwzFyORcPmYaVeI3J9ORH7vJ+EL5Rcht5WnkGB+N5GBvknErsdgj+chDeooJKBejsjKPGQh+QNp11/EJy1We11ixrw/0uR2D/SfiUjHheg7zUXCniV70WOx339t5JqVCLMQ0QzOd5Ak2fuubca4GgnkzYhNKux1i/EF8QaI7DyOtNb9zP3Z56CYyJoeQYH6OCSEJkKp6fs/+K5kU5HQ2tpcq4P5Ox0RkCtQ8PBfEOk9GdUOaYmE1RHoGT4HuaHZ76cpPskM1oSIngMPkajPzbU+QYL1bab/6N/cRjMHzZDA/xEiN/PQc/RA4Fqr8QvQ2SxcNZErYiJSEfwsWLcjHnnvjn6vY1HmrvJgf4dFiOh5aC3ZD/0ur0K/iz8jIn0PIhSDUQD3NESQKkIsLLLR7+J7YGNChc033/zi4jMcHPZSOGLhsE+iLCGovLlfqI+sCXeijXWF+dwG8RJ4twHOPZGQNgS5oqxDsQ32vCCii4UlIe3jKDPeQyhLKuy1Iu7Y3G82kZmbqptjNRDJ+R5pY+9Fgtzjpn+rzU5ktYCy2WlqIjePV5Cm/9lAH2PNvTc07+cG7tFDQv3vSOj8EdVqOImyy1bwXm0WpKFIm3w7Im5PopS2f0bCIeg7BQmu1hc+GZGa/yN2AbwDEMm8Cz8uIQdfk24Jok0PmoLmO5fYNQeCwuWXiMhFE0Trx4+5prWOlSDXri1IWLZxMtHffX0zjsXI8vIWIhfWHW0TkXNabK73Kfr+5yCB8YkY8xFELnpu/gOsiiLfG8w9/oCe32hNewukNQ+h+IUvTZt1yILxICLE15m2NnZoK5rzWM+mh08w30cVxM9DbkFrkTD9NJFkJ8Xc70IzT9Zq1QgRjEOJjO/YasYTvOaxaH5rljNfdg5snEXwt2+vYdukIAI+rYJ9gp80wn63hYFj3ZB19VMiizM2Qb/7GaimSldkFUqE29Hvy5KZNPy6IR56/jqafg8gnsImJcWjqKgIh50Dz/NYt24dderUKb+xwdq1a6ldu7ar1O6QEI5YOOxziKyGbQlBeYg297+KNshByLXAwrrjlBKJ6IU4OutLDtp8g+dZ4Rx8wfJ9ZK04Hwlo11I2ZWciJBGZuckGHGfgZ5EZjISdG5HPdSM0V8XEznxkXVRsgK7N4mOtCi+aMf8daaFBAt8wJLi8i1zDgtpRO1/vI2HwM+Qq0wZfEGyDX+BtPn62q3XIXec2NK+DkFvH0YhwRKMDEgIPQ4L3f/ErVUcTzlsQ8fgQxcoE3bKSEFGL1oDXxE95WiPqmJ3DL5CgeiFlv8skJKimBtqDhMwSM+7BiKhGWwSCc/M5Etq3ItevGchVrTW+dWE2vhvYYvRsvYuE5XjYimJnRLohN0ID7WcX+gXFqdyGrIKlZq5iWVzWo2fmJWRB6II06b+hZ/Rkc39p+JXTg/NeQNkK4r0Rkd+KnqtCRConm3v0TNtcM7YcJAg/ip9FKkgqNhKZZniL+Xx/814rxlwFv7+ghStE2TUjiGL0vX5A2dTCFrHWAPvsgD8/1vWqB8q+dgua/974KXiro2egoznWwbQ7MdD3UET2s5Al5E+IZNVFa6qNB/oVfRcPIULzD2IpbIqLXXzGzoLnebz//vt88sYb9LrkEs4444xyzxk9ejQfvvYap154IX369HHkwiEuHLFw2KegxbAeEpqvInEay1jIRsJJdZS6MTq4OQdpOLfiC0tB/+1oQaIfsh48bvptjDZ3i6b4xcJqI81gX/TTrY0EyVh+1kF4Ue8h/MxNa8z/1iLS3fT7NhLIrZVhBRLosvA1rEGE8K0aVvO62dzXi6guxZ8D9/04EswOQNrsQXH6DCHN8c1ovucgQendGG2bm/GfhzSvH+Jr3M8jNqnA9FeC3LKKkWA5BWm4H0Q+9dZFrh4iB48jIcsKYnasJUR+z6C5DaY8ja55kYc054chAS0aSUT6/NvYm2FI4H/K3OM7SEh+J0YfLZAg+QL63t9AZGA2kTE5BWYO8vAtRzdEzYFNaWu1zp+iWJNckpJKKCkpKxj65GIK+i7bIAtLB/wg41gEsRuyMD2P4n3modowQwLnt0C/hxIzjlmUrSDeCLlDvY+ITX30PdyCH+j8AHpWT0LPSl3K/raC9VFqmXfbZomZw8amPxtgH09QttnS7PNQSGzCYC2A7fBdmiqauScJPw7Huo5tMn0egqwwc5AL55fIEtca/XZtgoIZiJRegObxJfP3SmTBuRGRkmXoOa1j2tl1IOgCOhQR8+uRcqT8+AyHqkWYVDzzDJ3y8vhw2DCAhORi9OjRfDh0KJ02b+aTZ54BcOTCIS4csXDY6+EvftWQ4H8ucBraCJeYY7YGQ0XMwivQxuohYfcE5FIFvnbbWhiiMwQV4xdls+gPTERC3z+Q8N4AkRibpWcW0mrXQoLeX5Bga7XYiRDLRSRkrrHBjMcKN2lIOP/B/H8DmpMnUWaa/khbnEGkQB2E1RYPRcJHOkpdWmzu+z9IwDscCYC34wexx8IW/KDoIqR1b4KsHa2Qn35zM445SIC0Gvc6KLbj6Dh9e0gLrgD3oECj52YtEqBT8OMwCtAz8ACKP7CB4zYrlvX5DyKY8jRYFK8UxZysRRmQol2kQMJaEZHabkvMLCFrh+8SZ7NxFZqx2SxPQdyKBD0bsyA0b96cP/74owJzEE1CyveTj6yLUIq+13gk6ARkEWqJCFAu+n38GQn/H6A4iRG2d/OejATjHoHz8xFpt+5QZ+FbmUKm70eQpeoMFGsQngF8i9xm/MrnjcxcbAhcvxDNp11LVuMHzRNoB5HVxEvQ9xTLfcquF6n4xLKQiiOEnu9maC0pNfeQhebJ3t+xqCL7n0y7DWh93B9Z0e7Cj+s6Aq1DfzGfZ6N1NBc95zYoPvg8pKFaNdYF9GFE/Pqb49noO2gIPOzIxQ5CkFScm5pKj06d+GTRIkYlIBeWVPRNSuLUTp34YskS3nbkwiEBHLFw2GtRNjg7GRGLu5GgFczck4sEUSuMWi1uDmVdLDYjwfYWJGzfj3LuW6SY/hoRKSgW4gvjKfguRRnIz/kOJGyloUJ2qWbc3ZBwlY82clDw8SWUTyoiZiQwFjuOmvjCkkUHZBHIQ0LGRcj3/x4zxieQ4NaR8t2RbjTzVM3c/xAk6DfHJypnxhlvASJTTyP/7yz0/XyEhJBPKOsiVGSu1RqRvlPj9B10YZoJFJcRZIL/H3TQQfzyyy9RfbyGngPrCpeKTy6iff7TkMC13LSrR2TWqqCbGIFzrYbcCvJLkYXgfeTeE4uQVUffYTyUoliImUBJQgEueOzII49k0qRJ4f+POOIIvvvuuwTXid1famoqxcXL8Qu1dUP33gY970mIyLxK0BqieX0VWT0GIjfGAkQ0C9HvMgsJvTZ17hgUrJ+L5quPGYklgu8j0jwTuVsNQc9UkDzlExk0bl0JN5i+7Pow2txTNXwysZrYleOLo/7PMtew7eyzbGMkbCrh4PUqgiL8xAigZ8mmPN6MX3G+jbnmYmQ1swqSWea+JqPvYSWyaF6DYiZWIre1deaeLamA2Jm6soisp1EHWRMtBiLS/gKpqaku5qIKUYZUNFUyiVObN4c45CKCVDRvDqDzdiNy0bJlS/744w8WLFhAy5Ytd9k4djT2pPt0xMJhr0TZ4OxOSBC/mchgROvaUwdt7GuRexBog4+2aGw1n9tUkgOQpvgHfL/hxmiD3ogE92L8glXBtKjBxbgHEnDGmzHabENzkLb9aSQsH49IhUfsWhgVgd3wS5CmcBUSnqzrVmtzfBHSWIIEvpeR9vM/qFjcCCIF6Gh3pDaIRHhI+Pun6bMEBYk3NPc4ktgxAV8iwawjEtY2muOn46fBXY6sPVtMm8OQ0GJds+y8R8eDWI1xIbJu5CWcsZ9//rnsLIZCyG98GX6sTprpM9rnHyTwFpt72ogyLY1EgeMXxbiqZ/reiuZ1MQoq/xwRkb4JxxwbwaxNuZXSCleWRMSDFRZ9a8hcKmoN8d2pYrmp1UfC/memz/Hmf2vNaW7OnUNkOuHaKDX0bPQ9XkakO2Iq+r3XRL8R+7zbwnhb0ZyONMctUa+HH7yfh185PpjcwKImEtA3mT5sG/DXi6BLUyzEskx+idazs/CLCdbFz2IFkRnRLHlZjuZ3FlobuyMr79OInN2KH5ezAd9NzFrJohHtHmjraTyJTywx47gDmEBx8ZQ49+lQWcQjFRaxyEUsUmGxu5ELh90Ljlg47HWIHZz9f0iAOCnQ0m6EQe19TST4bTB/ZxBp0SjFj08AafLrIjeKJuZVB2WVWYXvg281g0EtXjDmIQ25Nv0QGEsO2uCtVaMrvutEMC5iW2DdspLM2LYGjlltZSyXiwOQC9AWZFVZiOakFnJLqh24x/XmflYhQbgICWgHI23nROR+8SZlU3M2Mu0uQgLV5+Y6H+Bnj8K0OztqjNYSUUx82ADzjxDZqTx8154XUGDxQORrnoRPaKJjcLKRFv5pNHeX4buDRPSOBK+1pp+vkPD6C3oGfkdCaEUSD1jEztq0q7AtdQ4iCUYiF60ifBe69fjFKq0loDoizTYwO4SIwSOIeFyNfo9Z+L+5oAUBc/1fULzKRrTWvIa+G5vooKbpYwWyNiXhp1+2606B6T8J/d6b4WdXKsBfJ2Yhgh8rDiMW8pC15kC0lqzGX+NS8IlKM/waKmko49yj5px7kdIjDRHgJsj9rpqZ2xIUc1Rg5mM+firrREjCL/z3fyiOyyLHHOvvXKKqAOWRCosguZg9ezazP/ssJqmwcOTCIR6ic1s6OOz2OPLIIwmFQuHXkUceGT7mk4pBSEiwgtcP+NWrQUJz0LUgAwmBqcjiUB8JCyHz/35oA05CQkyyea+ONt7f0cb9I7Io2Gwsq0yfaZRNNWphN84O5twt+NmVmph7KDDXX4cvWGxl22GJkc3cFEyHauclOv2uFcys0NwG3XtXJODnIrIx37z/gdyV1qI5PAulnX0TEYshSGs8DrlFPGnex5nPhyBt8+NIiM5Dblibyrm3ZHNPtr5AWuAV/NymIq2c5j4InRcM9j4SuYy9g565aciN5A1ksTkGuYZtRs/jWFRl/EHkljMdZdH5Es3Ty0ig62+ukYcfb3AKCrK331c8bDXteprzdj2piMa4cePwPC/8Kq94mt92Mwcf3Ar9zlYAqzj44FZ4Xp65x63oWe2Hgtz/jdye5qPg/mPxLVl90Ty1Q7Ez56I15AP0nSxARGIk+o4vRUJxK/Rd9UVxQ+PwrVbgrynV8V0pVyJiuQAR22X4FraN+OtFCL/Q5Vik3a/Id2ctU2vNvW9CFpTa+AqOWciKk4lPLN5HrqI9EEnqbvr6Ba1tA/CttUHlRC1kdbTKkNUVGGN1098PyBIaxBlmbA7bi3Xr1vHJG2/QKS8vLqmwOLV5c/qGQsx+++2EpMKiR9OmdMrL45M33mDdunVVOexyMXz4cEKhUDgmrFWrVhFywddffw3Ae++9x1VXXUWnTp2oVasWGRkZtGrViiuuuILff/89Zt/9+vUjFAoxfPhwZsyYwfnnn0+jRo1ITk7m7rvvDrdbu3Yt119/Pc2bNyc9PZ0WLVpw4403smHDhog+YuHLL7/krLPOolGjRqSlpVG/fn3OPPPMMpbhit7n7gRnsXDYI5Co2NKkSb8Fii3loA1/YODsPCKrV1sNobUigDbILfiZWmriZ3bJQhu9fa+GH5OQhtysRiHLhXWnslrOXLQRR+eCj+V/bF2QbNYXe/wNM5bGpr9s/ExRnU2bygiKwYBQ0JwtQ1aa2kRqMqPHG7QEWHeRRubdWoCsi8WH5l6eQGlx46E2Esij4ccCZGWlkZ+fT2npDJQ95pFy7tEiidj6E7/vpKRYWa4qjvK16JaMZSPy+Bh++tJ5KJ7FpjzNQ4JnARKKS4kVHJ3YJSh+1qZgcce9BdOmTYt7TFalNJTxaxCRFh773Vj3uBB6lp9CvwH7vbxHZIrYIvQ9V0OZuQ5Cc74FkcS3ELk4yZxnFQylSFiuhW8BKTV9bUFEfSN+HE5dfCvMV8jyYq1zwe8wFPWZhzI3jUIZtZog8lOKv+XbBAt2jNPN8f+h+InL8clDCBHluihxQ/S1bJ/JaI2yqaxTiAxGj37uQvhZ6N5F7lWY8zLQMz0bh+1DnTp16HXJJXw4dCifLFpULlk4tXlzjmnYkOy08mN5Plm0iBnZ2fS65JJK1cOoCrRt25bLLruMd955h7y8PM4++2yqV/eTVDRs2BCA8847j/T0dDp06ED37t0pLi5mxowZvPLKK4wYMYLPPvuMo446KuY1Jk6cyNVXX02jRo047rjjyM/PJztba8jy5cs59thjmTdvHrVr16ZXr16Ulpbyn//8hzFjxnDAAQfEHfstt9zCY489RlJSEl26dOHYY49l0aJFvP/++4wePZoXX3yRyy+/vFL3uTvBEQuH3RrJycmUliZTserYQ82xQfgEAeTC5CHBvRBfqM5HWvV84ldNLjXnt8UXmKOzAFlCsNmMaynyZ85EQkQwgDOYajRaILAWAqvptBr6N82xbLQJbzB9zcIPRo0eezwEsxbZsaTjZ4upQaQm06LYnGutGkEfdItkfNevjfhFsxKRiniIjAXYvDkovP8HCTAD455dmb5jpUfdFgQF9s6dOzNjxgzzXxKa11rILWUx0tJaAtAYPV9zkNZdgcLt27ePq1GruEtQxbM27a1ITYWioniENDr+xr5aolgn0BqzBD9+4FkURJ6OtPvHmPZNzfFG6HfbHp/E56Dv2QprNjUz+ClhN+HHZNk1oy5aV4Yi8t2K2ETCwq4Zo5Cb3YnmHEui7DW/RNbPs8x1P0HE8xKkgIkm25MRCQm69tnrJkX9b11FV+IrgmLBxpl0N/1bhYWNLzkASOH4448v14LlkBg2IHvU0KFQAXJRUVIxyvPode21FaqDUdU45phjOOaYY/j666/Jy8vj0UcfjRnU/N///pdevXqRlZUV/szzPJ599lmuvfZa/vrXvzJ9+vSYblwvvvgigwYN4v777ycpKVJBde211zJv3jxOOOEE3n//fXJycgDYsGEDZ5xxBqNGjYo57hdffJHHHnuMtm3b8u6773LggQeGj33zzTf06tWLq6++mmOOOYZ27dpV+D53Jzhi4bDbonLVsT9DbjYD0WZWgG+RsLECafgC8kLznoqEdeszbIWyYLYom8vdZuApxa+AXIy/cRbgB3YnIatKTXN8FZEBnMFFygo2BYH/P8WvHv1X5BaUZvpLR5vu52ZeMohtAQmiBH/DTiHypx/Cr2L8B9JkdsdPi2sDnT1z7fKWDQ9lypqJhJGqiwXw4xqig6a3v++qxPTp0yP+15jXU9UEINi2TZs2zJ8/P/x/69atmTdv3rbdwF6EwsLCChBSSzCg7O+oOvq9WUL6pvn8ZPS7noSsc6X4a8dmRKz/aj7LN5/VpKwFLbrOSS0zhjXotzPcHHsuarwRd0nZgoCd8N3/bNIJ8GugHI6skp8hUnsIWk+sq5RdUzYhK0p0pjGraInlV9/AXGcFkZbPWOiALCKF6LdsLUhtgXS++Wa8i7WoAlSWXCTCriYVlcH5559f5rNQKMQ111zD66+/znfffcevv/5Khw5lM+m1b9+e++67rwyp+OOPPxg1ahRJSUk8++yzYVIBULNmTZ599lkOPPDAMs9saWlp2JXqzTffjCAVoNiyO+64g7///e88//zzPProo9t627sUjlg47JaofHXsh5HGsDfaTK3LTjD7yia0QW9Egl1zpEmOtTFa7UYpEu5XIaE7hF/IKhUJ3ZtNW6v5X2OuWcN8XgsJDMuRNSPF3E81IouTTUfa56uQAHwIEmSykKC+0lw/E/ndf46EglPwXX6iNbBW2LEWmFiaKGuJyEHpORcgS0gwsN3Gn5SHSGuANKOnELuKdTQSV3C2iB80vf197ygkSl174IEHxsw6VVk4EhEflSOksdaDICG18QNv4JNE0O/IWkS3ImG5Haq9sQy5o61Av7NoRUYGWpeW4leuzkZuWa+a9s8iJUpr0z6YQe0rIgsChvBTXtu1wFotHkdr2lNI+H/M9H8jkZXh7Tqy2PzfOnDMEmKrtAlmlgJfsbLUtLGKmFiw/S5EAfU2KUUOmt+2wApHLqoAVUEu9iRSYTF37lzGjBnD3Llz2bRpEyUlssitXLkSgN9//z0msejbty/JycllPv/222/xPI/DDjuM/fffv8zxTp06xVzXp02bxrJly2jTpg2HHXZYzLGecMIJgNyw9lQ4YuGw2yEyALui7i6zgYvxN/kgubCb64+otkJ0ZdhYsBtYCGkZs5FgvwY/u4oVuBeY9rVREPevKKaiBb5rUCoiMgVIc51n3kEb9+fAx0gYSEFk4neUFakO0nj+gIKei5HrxVFIuD4eP0NLNKw1wtbNsEJGEMEg9uHmPl9GaR9rUHGUtQZUr16dvLzKxwJkZaWF3Z9iwfM84yZX+b7jVYfeWagKEuFQeewIQnrYYYcxderU8P+HHnooP/74IxCdjvh8tAYkIwIQy/UStCako9//4yiY+nrgJxR4/jYSuMFXIrREwdbnILcnu0bZehvBZ93GXvwDpXt+FP0u2hKZMS84JpskIhk/dW5BoI29DzvumviumynmnoL1LaJh599aSPPRuplvPn8IZZBzVbmrAttDLvY0UlFSUsKAAQN4/vnnEz43ubm5MT+P53K0ZMmShMftsei13lqU582bV24GrdWrK5IAYfeEIxYOuxWkHYgVgJ0IK9Em1Cnq8xR8i8VmpNnri0hLZRBCm2oT874Kbd51kBXjJbSRn44f3GljGFogLePZ+LUabLDVEiQQf4+sGqeZezgUbaxbkUbyZ6TpHIG0fzcjYeI2VBX3aeT6AJEVvWO5W0XDujp4yN3qdzRXL6LNfPuEr82bZc1JSUmhpKT8WICUlJQKF8WyWicXZ+BQUXieR1paGkVFlSekqalQWBj5/FgSEe9aPpEZjxQfp6EMc0XombSxYLY+xmIk+P8P/daDRf2eQmvNUYio1ELrgI2FKkVW0Q1IsWFdqjz81LPvo9S7p6M6KqORpbcH8S2SuciaMg+tpdn4RUQtCvFdR9chC21D0zZxjRifpNjMfEXoO5hj/j4GFdF0VbmrCmeccQazZ89m1NtvVzhQe1NhIaPWr6f9uefuEaQC4Mknn+S5556jYcOGDBkyhKOOOooGDRqQkSGr2IUXXsj//ve/uM9TtWqxYgl9JCIHsY6Vlmp/btiwIaecckrCvuvWrZvw+O4MRywcdisoULsTclWoKL5HG1K7GMdSkaViKdL419rOETZAm90vKJPJJCQQXIjcEFogQlGKTPuzUPXoEfhuCo2R8DAEaffuQylbVyHyEUxN2wkJPr3Nfb6DSNdNiKzchNykGgFXEt8KE28j3mTuZxSqxZEbEIiqLutQcbFfU+Lss8/mvffeC/9/1lln8e6778YZX/mo6urQDns3CgvlvrMzCKn/W/oRkfYXkVBvXZqCRSFnoLViGfot7YcvaKciy0UdFLsxB/02W+IrB2xhvkb461whsoYONX3ehiycjyJScQWKTSvrBiKMQm6mBWYu2hIZrG0tL9WQdbNBYCzz8VPsBpNpRMPGBbXAVwYVI9fQ6mi9+QWlBp4JvOPIxXZi9OjRqlNRq1aFSAUooLtvrVqM+vxzRrdvv0eQixEjRgDw/PPP07t37zLH58yZs039NmmieMuFCxfGbRPrWLNmijeqU6dO3DS0ewMcsXDYbaANuA7y9a1MQO4W/NiDWJvN1ch3twD5/PcsbyTmPVZfIeA7tDHXQ7UJ2qJNMQc/53wGygDUG1kYvkQBk39G6SknIU3kTaat3YRj+SLbOhjdETkZiYT9DYhMrEP+18tNfxUtoGXJzyvIX3x1eLPekVmHtodElAdHIhwqiuDz2r17d7766qvw/yeeeCJjx46t0uv4AfwLkOBfi0hhuzkqCvkYsmDcg4jAY8hNaX9EGq5DLlG3oPXySERS2iOr6iKUXtdWr1+D0lL/H/q9X4KskrejoPSXiYyfsHgZrSt9UNamPyhLDqK1sjbhRnVkSV6Pn1o3PUZ78Av/2bi2VLSmf2rGeUygrYd1+7QaYUcwKodEFbXLQ6wK3bsaaYYYBZVXFra2RosWLcocmzlzJj/99NM2XfPYY48lFArx448/Mnv2bNq3bx9xfNasWTFdXrt27UrdunWZNWsWM2fOpGPHjhW+ZqL73N2QyMncwWEXoAUSxisDG8ewJcaxr9EG9w/gaOQOEKtdLAQzxVi8jFJW9kLxCJ2R1SGNSDek4GaXisjMW8BxKC9+JxTDkGnGV4LvIhV9fhLalG2w91XA39CmPwppHW9HgdznowDsWO5EQXeoQpTdpj9yj9gYN1DaFiI79NA2BAuRHXpoGzxvc7iNg8OeirFjx0YU6KsqUhGE/zux7kIAF6A1ZRxSGFyOXDWbIzeo11Bc2AeovsSFyOo5FRH+6cgKcidaB/oA5yGFx+dI6fE3ZCXtj9ac5mgt6o0ffG0zy1mMQuvL1eaco1DWKZvQIRiDFuu3n4RIUD389NOYv21w+xb8BA9dAn0VojVsEVo370ZxW2+b+XgcrYGHAHUIhdLCri0OibE9pMLCFtH7cNgwRo8eXcUjrDyamqJ/M2fOLHPM1pIYNmxY2A0JVIPi0ksv3WYhvWXLlpxxxhmUlpbSv39/Nm3yi7Zu3LiR/v37x9wTU1NTueuuu/A8jzPPPJPx48eXaVNSUsLYsWMjLO/l3efuBmexcNiNkIXqVCTy5w/CFmQ7CAnSc9AGFcSjyIe4F9oc/4xckAYTH8GAxODiMAp/s70CxUjko43aLlrROd2DG28q0hbWQkL9KER2NqBNOI34VpIstAnXRUSkDxLwH0NuVH2Re8VDSBB4HOWw70Cky8U8FFz+ETAXzd0mfAEjPhL5kzs4OFQMvnvUTPR7fQPf1TAHuW02QnEZq5FLYgmqRn0iUjBkAofhF96cjNLR/opISzoS5r83r2L0O38OpclNxk8fDRL07dqzDK2RfZBF1EOB4W8T2+Jr17hY1og6pu81Zgz5RK413yIXMJvRro+5p6fN+9/NvAT77ozW+esRwXqcgoKZzj2qHFQFqbDYnSwXZ599Nl999RUXX3wxf/rTn6hVS26At956K//85z8ZM2YML774Il999RWHHnooubm5jBs3jtatW3PmmWcycuTIbbrus88+yy+//MLYsWNp1aoVxx9/PJ7nMW7cOOrUqUPv3r354IMPwpYGiwEDBrBo0SIeeeQRjj32WDp27Ejbtm2pVq0aK1as4KeffmLDhg08++yzHHHEERW6z/32228bZ2/HwBELh90CRx55JBK8K2oatNWzQ4g4VEMbFEQK87OQVjANae2iYxISBTUH/47ebNegjbsEaQ5t9iS7AZbib94WK8xnN6IN9iHz6kBkhdpYY6qJBIYQIiZrzH1NMn08jdwgniaymvN7UX3Z9LNrUOG6/ZEm08HBYWchvquhTT9dC60vDdDv/BwUTxHLigoStF8yf0cX9GuMCEcfcy2r/LBrZwi5YtrkFw+h9eamQP9tkOVkGLK6Bt0tQ8SvaWGz5eWZa9dDypE0tAaONGM/GllE3jF9/IGsFDYVdrDvJETANgHnIqLzL1zWqPioLKnYVFhYbuzF7kIurMXg9ddf5+OPP2brVmUxu/jiiznhhBOYMmUKgwcPZvLkyXzwwQc0a9aM6667jsGDB3Pddddt83UbN27MDz/8wD333MP777/Phx9+SIMGDbjgggv417/+xbnnngvEDsJ++OGH6du3L8888wzjx49nzJgxpKWl0ahRI0444QR69erFWWedVeH73N2IRchzv8JykZubS40aNdi4cWNEIZR9CTu6+JafYnY00rwnwla0kdkCeCAt3AaURtIuiGsQUXkCuQjZDTXoO1yRmAQPuAH5J7+BNrQN5rwN+BmgGqENz7oLBM3zNkCzCbI+bESpZFuYsQQFhmhXA4tFSGhoYfpbae73MbRBx1pc8lCmmS1IczkSbeg3oGDIM4EpeF75FgsHB4cdB79aezLSyp+KlAMfEelqGU0uglt4LNJRgoKfT0YWyvfx17wQEs4PQy5W89Faea+5frDvpeZYT8pafO2aFT02m/K7AMWAWeuphxQ8nyLrbWPT9j20Xq9E7p1dUByIJVwW65GyZz98/ehjKNB8xxW+3FnYunUrCxYsoFWrVtvt5rV27Vr+2b8/nZYu5bpO0ZkTy+KTRYsYtX49fWvVqhAJeXrGDGY0acIDzz5LnTp1tmusews2bNhA69at2bhxIytXrtxtMzxV5jmrjBzsYiwc4iIUCplXdebPX4cE/4ZAfebPX0coVD3cpmpgq1Yngs0wYgOarXb/VqSlC/p8TiEyW5TdbCoak2AxH6Ve7YesDrbwVD1z/XREdpYh96J1po3N956PNlVboK4IBTdej7LFLKCscBDLFSvTXGc90mi2RhleqiE3iQeR0DAdaUCnIz/rkWZ+njNz8SZKe/kIcseoWHpXBweHHYfp06fjeR6pqUlozVmF1pIvTYtYa0J5pMJDa88itGasRpbXYPtuqMBeEbJ01savaRHsswlSxIzCt44Er23XYvsqRaQmBVl0bU2LEvx6GjbT1VZEJjqgZBKXo0xWo5AiJ/pegy5cFgNRxrwcF3MRQO3atTn1wguZkZXFF6b+QjzYOhXtzz2XUaWlfLJoUcL2XyxZwoysLE698EJq165dlcPeI/DDDz+U+Wz16tVcdtllrF+/nl69eu22pGJHwrlCOZSBiEIqWvBboLgHm2o0E2m/barRz4A/CIUkMG+fpqiUxIHVJfgblbVU2OudbMb7hBlvdSKzRUVvun2peEzC84gUtMWvQ2FN+aDNLRPN1QZEKDbhF9ADbdq18a0bIbR5P4E281so66ccvVGvNeduNNevZ8bUF5GHqZR1fQJpAw9BFpJDzFiewFbH3tO1ew4OexMKCwvNGjwarbnW/chmTbJCe3l6QQ+Rk7XIeloNWWlHIsXIVabd2Sgd9pcoJfeJxK9pcTYiO88hJUvQ4htNfOx7ijmWjazILyEFyN9Q7NsmM8aCwL3djNbZYcgd7Goi18dYJCsJreUTKCiYEn9a9jGEQiH69OkDwNvPPANLltDDBAIHEV38bnT79gmL6H2xZAlvFxVx6jXX0KdPnypUMO45OPzww2natCkHHHAAderUYenSpUybNo3NmzfTvHlzhg4duquHuEvgiIVDBLQ45CB/2xuJXxzNuivdjTKWPA5sewDdEUccwaRJvyHCEu0KtRFZAjahDXJ//EfXbkTFaLM7G9WFeAhtxDZblPUDDi5+jVHRqfnEjkmwG9lalI1lf3w3K3tdzLVboHmyFWY3I1JSDRGARYjsBH9y6WgTnxx1v9HzZ2M8tiKLwxakeSwy1zsQfQfDTXvrX22JTTGyNtVDlhT5IwfTyzo4OOw+8AO8i5G2/zGU+SkYfxVcz6KFuhJkqViL1nMbHH2Y+T9IDNqglLVPI6vBJXYUUX3aa19m+ngKxXgNQEqS1Ki2RfjkpxAFo1tryU1o7bNrlYfW1hy0ds5FLlfLkevqyWac4RmKc985yHLRn6SkpIhMQPsyyiMXsSpqJ6rQ7UiFMHjwYL788kt+/vln1q9fT1paGm3atKFXr17cdNNN+6xrmCMWDmH4cQ6Xok2sIrUk0qmKALrvvvuOUKg6soKAqk3/F/gJbYDRaIhyvl+ABP5klJ2pF6oo2xhp6IPZouJlL2mNXIXAD3zcYvqsj9yMulG2xoSN9Uil7FxloyDFtfgbbjRB85CF5D1zvVhuYCF8rWMyEjTqoJ/uSmQdqW/GsQT5HLdDBGIF0gI2NX2/iwSUyOrYDg4Oux98cpGPrKY1kRCfRKQlM7iulSJFzEr026+Dfv8bkWJmIVrPWiFlkCUGN6MA8Vxi17QArT3F5lpno4DrB1G8xRBUZ+cAc36yGfcSFDg+FpumWpn6DkZrVzZy0Uo1Y96A1s1CtOadbcZ4D1KG2PsMVuuOhtxCPW9tnPvYNxGPXMQiFRaxyIUjFT7uvfde7r333l09jN0Ojlg4AEFSMQhpfCqLbOSz3xB4OC656NatG5Mn+xr6rl27BvwU84APUUaQGWiDOQnfPaka2qzm4xd/GoM09jegzekeRCYeRlr+TCKzRcXLXmKRiTZd63I1H21msTbbfHOsZZy+GiHT/2rTVyzXhdamjyX4sSDBzFJW65iJNHpLTV+WuOQiv+VVyL3pcPM5aPPdjGIqPkNWk03AVkcqHBz2AHieR1paGkVFK5Aldi4qkFcbv/ZEKVJybEXrQSFaI5qhNWceqta9FiVyKAF6oCxPlhjURuvVcqTkiI6VKDV/2zixJERYhpr+RyDL6zuB9snm1RJZHA5D6WMPQAqQJLQ+fo2vWGlpPmuE1vHpyA1sOHLTOtzMzFa03scSYTJQ+t7Z5U/wPoZocvHrjBnMyM6OSSosguRi7owZiqlwpMIhARyxcAi4P13KtpGKIAYizdQLYXLhLz5ZaDOoj63ePHnyHGOpyDNtFpvjD6LNKFZth4NQXMGtKDj5aeSD2w/57b4A3IU0XAVIqL6LyGBviO0+YDVyqWhTDBaQCqIUbeIp+L7PQdigSRvcXYA2QxtjYWGtGNZtwG7kGxFZ2Io0eo3wY0w2ofmyYysw549AwY4p5v+QaZ9p+i7EkQoHhz0LhYX6nWsdfQVp8C9DQnZ0rJldu5KQImGSOedXc/wb4K9obWmDYhjmIkLwJVovlqD1pg6+C5UlCfGUIzfiK0jmIbfPBvjWUpCCJ2TGcidSDm0x47XrsnWLSkbrXgZ+7N35SMnUBWW0OyDBrHUEUujZsydjxoxJ0G7fQ5BcfPLGG/S65JJyU8Xa4x++9hqnXnihIxUOCeGIhQNa2DuhxX574QfQwXcmqLsiQeD/RUL0ucBfULrF6DoQULbg3GlIo/Uo8sVNQwTpAVSB9jxk/h+FUqsGC9jZvmJp5OyiaWMqgtlHPCT4FxLfbcC6L9VBm/SviADYvu0mba0eJSgoMp9IrWNrZJ2wPsugDddmpCo081cfBUT+GwkJ3ZF72mrzmXN/cnDYk+EraX5CltRmyPJwgHllofXjFyS0f47IRQHwDBLyLzDnd0auR41RAohByBJyLFJabEZrXn38GI14KMCvU1GELCG1YrSbhEjMLWbsF6L9oB1ay1eY4wuA75DlohWqsxPCz7j3JVrPD0Uxfq1iXKs9kM6nn36aYNz7Liy5OPbYYyscB3DGGWdw1FFHUbt2bUcqHBLCEYt9HFog6iCNU0ViKiqCHCTMVzQI/DXkP3wlyp2+xZwfS2iPJRinA7ehdIjPo02tL9p4Lgf+DwUanog0adGwgr7NXhJEdAEpSwJWEhlbESQpwaBJGyBXzRyz6WaLzeezzf/5SAiw7gep5r5K8EmQtaIU4Vskkk0fOWgDXo3mMANtuosQUdnejF0ODg67GpGF9dYh60ASWitkBfZTcmcgN6cbAz00Qhbe15ELZSp+9e4spABaCvwZCfrLkJIkG61h1g2qFCk1tph+rHKmMVp/g1mbPLTGP4ji4W5BcRA2scQGRCJsEHk2Wt++QK5WtyNl0zGIQN2CSNNQpDi6Hj/g3CIDl00/MUKhUKWDi/fVYGSHysERCwe0mfSuwv5sZqS/Un4Q+AIk9J+N4iSC2UwWEj9+IQibevEqJPA/ifx5xyIt/j0o5uJRlC3Kxll4+K5PJeYza/q3SEfatVkoMBz8oOh2gXbxgiYtspCA3wDfrcoDfkNxKU3MOdatqhBt6ssQWYi1seeba74L/I5IRhpyfSjGupc5QuHgsDeiHkpJm43WgI0og9TZxHcTegk4HVkwBiLSYLPLJSNFz1gUzN0MWS024BOIaFgrqiUk1u3JrjklaH0fjsjE9cjCUITWyQ2m35pmDJYMpCNr9PEo4cRQ0/9laK08HSmKHjWvNUS68drEGns23NrtsCOxo54vRyz2eWQhF6VY1oRtQXvzPohITVk83IOCjW8x/ycjgdxmPVqANq1Y2qdgnYck8xqIzOgXIeH8WmQJWYW0Zo0RgbGwWjP7igVLUm5Cm+A6tBFCpPtSkemvLWXjLmqatpuQdQHT/iugK8poFUQq2miLEEHII/bGPgERML/+x4EHHsjPP/8c514cHBz2dOy///789tsKZK28mLKVsOPhMBSL9h/8zHmrketTfZQZysZb9MSv22PjtQrxY702m3Pr4buMBtfpYuTi+ipS+vRAFpMC058lK/XMK5bba3UUH9cQkaEcRChsLaOr8NPnpqH1HjMvBZxyyikVnJfdC8nJisMrKiqiWrVqu3g0DnsrCgqUXS0lpWqpgCMW+zDatGmDFueOVdTjUEQG/ooE/Oh6ENGYhYq6PUhZQbwB2pjWIN/aBmgDCfYTrCVhs6OUII3d7fgB3SBLxQr8bFF34LtF2cDE6D5tv2eizXYE8k0uQdrBjfiBhhlIw1c9zr1mmGOrzHsSMvWvQYJBNKwLVBY+EbEbu53Prcg6sdlpthwc9iH8+uuvUem5K4OH0Dpt18K/I8WIzTh3EFrLj0EWiGB2qJB55ZpXsmkTtAKD1qoFKDj8TOSeaYO07Vq7Gilc6hLbxTWIv5ox/w+5pLZGa+FqFGM3F635dyDCVBso2GMDt1NTU0lPT2fjxo1kZ2e7mAaHKkdJSQnr1q0jKyuryolFyHMSSbnIzc2lRo0abNy4kZycnPJP2EPgp5gdTdmidNuCaiiuYQwyzwcfrVgL42BgommfFnXMbmTz0AaShJ9mNegSVIK0+cVIg1Zsjl2FcqX/HWnJLAYhDVobZIHohZ95ygrswbHmo/m5GW1YjyP3LJtpKZNIfp5oAyhEsRo55nUO2lRfj3HvNlg8nraqFPgHyoDlgrIdHPY1aP0+FK2h22JxHoRclOxaeBJaQ+eb/09Hawz469oW5KZaimIp1iBraxKynDZG7lgFwDVImTMUrX1NkYIlBWX/K0AEoaKxEJtRco9aZuztzGcrkSX4RuRamoUfaxLbJbRjx47MmjUr/H+HDh2YOXNbSNqOQ25uLkuXLqV69erUqFGD1NRURzActgue51FSUkJ+fj4bN26ktLSUZs2aVcgqVhk52Fks9nlY4Xh7MRgt6DdS8SDwKWgzi0Uq7NjqI01aEyTkR7sEechdKBMJ67XMeccBHyFrzLEob3oIBXKfizJH9UfZo05GaQzboo1vKyI0s1Cq2j/MdVNQNpIr8c3/FRXoPXN+fRRD8iTalIfGaFdk3uMJC6WI4PwHRyocHPZl/AF8gNa0yuIhc95VyIWoKXKL7YAULiOQ4uM8fAG9GK3zts7EarQepSOLxFa0Fs8GfkRJNQpMP+loDbT1dZpQuQDr6ihz1W0oKYV1f01D5GYgqvUxBO0Dv+Gv4YsIhZLNWMumPZ81a1lE2vPdYU21wtuaNWtYunTpLh6Nw96E5ORkMjMzqV+/Pmlp0fLX9sMRi30QnTt3ZsYMWzTOaqg6bWevTyDhvaJB4JuQgN2hnHY5SCO1GWnDQJtDEX4mkkVo09mEXI1KUSD0AhQ8Xow2u44oS9Q35v9maCN5DWnUrKneZjyx7yXIYtEKEYKaKAtJsxjjjeX2FSw2VQP5Aw9HsRUNotrZWI/oehcWucC9iFSs2i02QAcHh50PpZ9NQ2tvT7Ytq99hwDQU6/Uo8AZy8UxDipz7kIB+KVrvauMrPLaitdG6N1VD61MN008mWt9ro3Xc1tzYgG99rix6oGDur1Agd1NzvSS097yI0treidbXIuRqeiNyG2uAFEkHETvtuRRJmtddn0kvJyeHnJwcioqKKCkpKf8EB4dykJSUtMOtX45Y7CNIVKROC7R1ixpMbJ//aCwAPkXCvN0gKhIEbl2cxiPLwxx8V6wGMdrb4n2bA5/Z9IrgF4pbhTbD2miDOxJtOnea4zOQy1V/tJn8C+VRt5vdGqRhsxVgD0ZzNRJ41tzr1WhuXkak6G5EYOw4gzElwc/saxNwPyIFKchi0xPfJSuESIXN7hTEVjNPj+NqUjg4OAhFaG37F/DIdvRzIsrA9A+k+NhgPt+EFC+TkbXgfPx1zVqOS9CanIMsGKPNOechMlKNSFEjD782T2WRhqzc4/EDuK3VIzNwrMi0HYGUQXWR2+iBaC+pS+R+Y12B70YWIK2ztsjrrkZqaiqpqanlN3Rw2A3giMVeDhGKVCpWpO4zZE6+GWnUT43q7VWkwVqL76pjheFUlOKwkMhgaPCDqt9AgdqrzflpaNG37j/VzLj+jrRKFhkoo5JNK2ux1IwlA5Gimvib1VrT/37IGrMGbYrTUUzDMDPGC805tZE2LFgsz0NVXl8zr6dMHwPQRj7FzJWt0REkFwT62Ircsh5HpvnuwHtos74S+SLfj++GsD++S5b9bj7F1aRwcHAIwi+aZ7M8DSznjHgIuldGJoNITk6mtHSq6ftZFHvRHq2v6WhtzkWF+d7DT8fdBRGIEvz9wMY+1KHsellRdECEoRCRm+qBfjqZYwuQIull4CyUddC6/K5BiqgUfMWQRTpyD+uJ1vj/7DbkwsFhT4EjFnsxtOGUV6SuFG0AISK1NRfgC8AfIhcikC/rOUSSk8+Qtqw9PomwwdbFaIH/qznWHC30HZFrUQ4SoOcgzdtnyOxeB20K3QJjLsAPZl6INjNb2To66LrAvKch7dtqtPEeiLKU2Pznq5EVIzxrgbGvNeNvaeamEUp5eDnQBxGl/shXOUjY4pECD8V0dEHkZqa5Tj6qhjvb/B9d7MrVpHBwcIgNn1w8hLI8lVc7KBqJ3SutC46u8TMS5regdb4aWusaIOXOMnxr6374e4GFrXtRXnxa0N01hJ+sA/zCqbnIqhzspzUiMf9B+9a1aO8Jtqlr+l6JSEksK3s22tMaAg87cuHgUAnsFsTi7bffZtiwYfz8888UFhbStm1bLrroIgYOHLhN5r8ff/yRhx56iG+++YaNGzfSqFEjevXqxR133EH9+vV3wB3sfvAzPl1K4o3GxhSEiKWtkSa/FAnNNxKbnMxAi342frXoYiQUX4BiGjpEnW81+anoMexi+roL3+XnDGRReMgcs/ELS9Cm0tDcoz0WjG+Yb/5ugDRoNdGGAiJDd+LnRq+Pb7mwKEKarSz8gnY25eEbiFikIYH/J1SgLh4p2GL6yEYbeHzC0KZNG+bPnx8eRevWrZk3bx4ODg4O8eCTixdQbZuBxF6rg6ice6V/jWVoDT0LOAUFU9+OhPpL8TMDWvIQJBY2LW2soG1bMC8PXzEURBp+qm7wLd02Jg10v6UoycZ5aM2GstaRBsglahlScMXDQLR/vODIhYNDBbHLa97feOONnHfeeUyYMIFu3brRs2dPFi1axD/+8Q+6d+9Ofn5+pfp75513OOKII3jnnXdo0aIFffr0ISkpiaFDh3LggQcyd+7cHXQnuw98UjEIaV0Saa+spt8K5qDF+2FUSC4VZVgagywVwdSstr3Ndb4FX+OfilyLJqGsI2MQabEbnb1uadT/aagOxRhUg2IEigGxbfKQJaEOPqmwxwiMaRZy/bK51mPFb/wFbY5PIotCECvQphWsnp2EXJ/qIDemdPwKr3ko6HGVOXcVBx/cCs/bjOeV0rZtk4hjbdvWM8e8iM1q3rx54c88z3OkwsHBoULQOpKLXDT7oziz25B19AfkqvSD+f824CjTbgoVjdnyr7EFpXtti6zKLyHLa8hcFyS4RyezsCJHsFZQEUo/O9/0nYmsyy0RWWlp/s8yx+ejtdbKBoXmPYRPTuqgtToektH+sSXQTywkodoYHfHj8RwcHBLC24UYOXKkB3jVq1f3fvzxx/Dnq1ev9jp37uwB3s0331zh/pYuXeplZmZ6gPf888+HPy8uLvYuvvhiD/C6du3qlZaWVmqcGzdu9ABv48aNlTpvVwDwIMeDWzzwKvgq9aDYgxLzd6kHazyY7sHfPejiwfzAsejXUg/qevBf00eJB309qO/BkEC76OsWeJAfdSy67yEeNPTgSjPGmR78av6ONZYS0+fJHtxv2m9IMPZNHpziQb/AZ8s9+MmDlYE+g2P80IMDzD3v0p+Qg4ODQxkQ1vxkeVDDrMUNzXsN8znbtH7pvPoefOLBjx6M8eAwD6714BAPPvegngeveZAXtVbbNXy9+X+dWc9nm3U6uAfFW9//a+7lcA+eNGu4XZtHmHX5nRjrdnB/8czx3zxYEmNvin6N8KCOW+8d9llURg7epRaLBx54AIBBgwZx6KGHhj+vW7cuzzzzDABDhw5l48aNFerviSeeYMuWLfTo0YO//vWv4c+Tk5N59tlnqVGjBpMnT+azzz6rwrvY3ZCKYirurMQ50VaLQuTyUxtpa+oA9wTahZCWZymKjViPXH1mmPPHAF8js/iNget4UddNMZ+VxDkOsppcgmI/vkHarQYkNraNRVaNk4if1jCYJes6lBHqV3NPK1EKw3hucz2QW1XlrGkODg4OOwNe2Oq5mf33b0TQWrr//o1iWksrhyRkzT0UuUNdgNb8TfhWh1lEWpI9fKt0PnI1XY7i7Fqb9/IyRYVQ7Y5uwGnAv1FiDYupyJJ8Rjn9ePiZrLaU0xbTX/MKtHNwcNhlxGLp0qVMnjwZgAsvvLDM8WOOOYZmzZpRUFDAxx9/XKE+R44cGbe/6tWr07u3aiy899572zrs3Rp+sPaNVC54zy74llwsQwJ5A/yiRFOR4L0VmaIX4KcNbIxiKD5Dgv/l5v/yyI0tcFRMWVIRCrwPRmb3axBxqhEYbzS2oIxPh6MiSfHSGgavZ4nCS2izq4HM78F2wT7SUWrGEEcffXSC+3NwcHDYtfj1118j3Ct//fXXKui1lEiB/GbkVroRuVYdgPaDQsq6Q2WhTFKrUaHRRlRcFClEiqMjgX8Cf0PkYhRyg/oCKdYq4rbkIYWYrVWUCBmIQGVVcJwODvsudhmxmDZtGgC1a9emVavYwVNdunSJaJsImzZtCsdP2PO2p789Fy2oeJE6iBSwk9DivAW/zoWHL3g/g7RDn6CiSbVQAHRNVIthCcqDXorITfUY17NZo2xK11T8ILx448pGQXTr8APFLUJR5zyOfGxvMn3aQPF4mjk7hu6m7yaUJRWxoOwjhYWFCdo4ODg47F047rjjkDA+O+rIv5Al4VOUlnsJsl4H4+hsko8ipMCpS+UwFu0D55p+bYa+x4C3UKxc3zjnxlrPgxkHAx5kMdERSKFjx46VHLODw76FXUYsFixYAEDz5vHNi82aNYtomwgLFy4M/x2vz8r0t2cii4oVqbMILqBWQF+PrBU55v9NaLHujLJE3YVqUVyPNEOtUXG3zchl6gVkMu6NbwGxGaK2mvfCwP+F6DG0maTioRcqtvRsgnt5CWmuBuK7MaVHtQm+gvUqOiAXqAzKItriYSt7p4Wtbg4ODg77AsaNG4eIwcwYR+9H67qtDfQUco2y8BAxyECKqUSIXnfzgKHAEWjfKUVB2APRfnWXaXdk1HmJyEIww1R557QH0pk1a1Y543Zw2Lexy4jFpk2bAMjKim9arF5dGu/c3Ny4baL7S9RnRfsrKCggNzc34rW7o3Pnzkjzvr3alDy0SOcj16cFaBM5CFkn3kIaqVHAE8i3dhXKF16AFvo/mbEUmJfNZZ6KX1QpDb9qql3Ei/Bzl1uh3yLd9DuPsot+HtrQnkfpBc9G5MhqyKIR9Pe1bdqYYwuj2kDZDc5mmtrlSdUcHBwcdgHykKtTdFrYg4CDgTcRCZiN0oVbV6MCpIRqiNbVaEt1PASt0beZ/jy0zmehdOH5yBpSjcTWZotYWQljXdciA7fmOziUD/criYEHH3yQGjVqhF/W0rE7Y8aMGUhYb1/BM2JZK0rQQr8JBWUnIStBe1RnojoyXzdCi3cHFKD9GdpEctBC3wHftJyKTySsMG6F+WR8smErs1rrRtCqUWDG1RFtHqtM2wLklnU+Mrnfhp+3PKiJCgadQyShsOOxloqtJCYVJeb621o11sHBwWFvwB8oqUY07kbxE98CV6D0tk+aYxvwLeKp+JbqeII9RFqjb0buqsX4a/catDe0QYQnXlKNUIy/LTGKToMefX3wU4s7ODgkwi4rkJedreDivLy8uG02b94MQE5OTtw20f3ZPmvUqLHN/d12223cdNNN4f9zc3P3CHKhhTZzO87fhBbPIvyK1lbgX41chW5GpuwgQoiANEVZlWz109TA8ViwVgpboM9aGaLjJqzbUkvT7/1mbNbf9nDgOSJrTthrF+JX645Fpiy2mvcMIklF9FhWok1tDVBA165d49ybg4ODw94Jz/MIhdKQ1bonkclC2qGg6qeBAcAxqC7SCqSIqotPCqyrrFVCRVwFEYXHEam4GjgT7RnWWrEckZUmwOmIwMxHrrtBRZLtLxpbiVRsWUSf4yHrSwEdOnSI0Y+Dg4PFLiMWLVu2BGDx4sVx29hjtm0itGjRIvz3okWLjGvQtvWXnp5OenpF4xR2J0Rn6qgI7AK6GpGCZCS02xiFZciUPQ5liToOOBCRhwy0MM9DblNPow2jOpGbRKwF3VpHQqZtcjntPWTyDgHDkVXkDFS0L5aVJgk/rWENfIISz0hnq123CHwWTSrWoBiUhuh+i/nhhx/i9Ofg4OCw9yIlxaO4eAYK2n4k6uhfkNJnKHJNzQNeRCnDb0YB1qn4yiSrYEpBe4HN/jQUEYd/oqxTxea1FQWHl6D9qhayWiQBP6GAbkicvrYUubXGSjISCzOBYmbOjBVb4uDgYLHLiMUhhxwCwNq1a1mwYEHMzFBTpkwBiKhxEQ85OTm0bduWuXPnMmXKlJjEojL97Wno1KkTM2YsQVqVbuW0jhbclyOhGaRNqo8W5JHAEJT16U6kiepEWRLQCegKvIKsHuVlSrKbQ9CqEUTQzSiodbJE5AzgdzPWZuZ6yURuIqXIUpGLUhoG0+kGtVH2s1lI61WdspaKEmSpWG+umYXiTOJb2xwcHBz2ZhQVFZkU5/9BKccHRrX4Bwrgfh6tm92A71CQ9XOozlAHpKRKR0qxeSjj4NeImHRDAeANUW2ifLTulqK1uiFSIGH6yURr8+3EV27ZtT0X7UO1E9yl3R8KcGu+g0PFsMtiLJo2bRp2I3njjTfKHB8/fjyLFy8mPT2d0047rUJ9nnnmmXH727x5M6NHjwbgrLPO2tZh77aYPn068TN1JMJqtIDbnN6NzOcvIZejP6FAvKPRQh7PZPwQclUqRvEZ8WB9alNInGs8lvl6DrrHx5Cp/UXgQ9OfjcmwryJEAIpRsGDwUfei/i4EvgQOI5JUlCJt2Tzz3hBZbUajTFkODg4O+y5UYG8VWv9vRYqlIP4C/A+51i5ALlM3owDv8Sge41IUJ3c5cC8wwRx/CBVIzQfmAovRWl4TxVM0xycVHtqbMtB+9jmJXaBsrFwmvqtsPIRwa76DQ8WxS4O3//nPfwLw0EMPMXXq1PDna9eu5ZprrgFgwIABEfESI0eOZP/99+ekk04q09+NN95IZmYmX3zxBS+++GL485KSEq655ho2bNhA165d+dOf/rSjbmkXI16mjngoQMQiEy2+tqL1SKRRuhoVp6uGNox4GbzmI03UQNN2Rpx2NqYimUhjWbzAvejPZqCNIx24Epm7n0K+u9asbl8ZaAPKRBuIjeOIrgTroaJKa1Bu9HxkmViOiMxS03cbtDnmIp/f3O2oWuvg4OCwd8AnFy+gInJvE7kHtQP+C9yD1uX6wCUobfm/kVX8MbTnfGrOvw1ZM0rxA7VtcdSGlE0jXoQsHunIgj4U7YfRa7S1QNhYucYVuEN/zXdwcCgfu5RY9O3bl+uvv57NmzdzxBFHcOqpp3LOOefQtm1bpk+fztFHH829994bcc7GjRv5/fffmTdvXpn+GjduzPDhw0lOTuavf/0rRxxxBH/+859p3749r732Gg0aNOCNN94w5tu9FfEydcSCrbBdhL9oL0OLaB8kvINIRTHx846/i8zJJ6F4B1txNRrBmIpYSBTkXWD6bYk2BQ8RmZpoUypB2iv7so92YzP2lVHXsdfajGJDrIvXAjQHm5CFpjXQCm1YpUijNpNQKFHNDQcHB4d9B57nkZKyBVXd7o9qSdyGMkL9APyCr7haixQ+SWj/qWteVgk0H8VPrEd7RVNgP2RNX4+UYRBZI6kEWTRsco81wKNRo7SkwsbKNaD8mk/+mq8YDld528GhPOzydLNPPvkkb731FkceeSQTJ07k448/pmnTpjz00EOMHTuWatXKM1NG4txzz+X777/nrLPOYv78+YwcOZKSkhKuvfZafv75Z9q2bbuD7mTXQ5qjXJSpI9okHY1886qPAuFy0OPwEH4lbdDCugoJ2fEW4e+BE9Em8He0KYyOHh1+cN62ELsP0cbxd7SRLDdjutZcf36Mc2wMRR20kayJce3H0UZ3JyItrYD9EUFqgm8mLzVt/wPkUlrq0g46ODg4WBQVFeF5hWg9nYafFeoMZMm4ECl4fkFKrP2Qy9NBSGhviR/fB36NilKkALJpalegvSAfX1mVjmIzWqL1+3rgPeQui2lTgvaNVYjI1CnnjoJrfgly4Urh9NNPr8y0ODjscwh5zp+jXOTm5lKjRg02btxYodS3uxKyxuSgeg7RmTqCWIxMx61Q4HIzFEdwPtLQHIcIwlIkyB+JCEee+bwQWQZqA6ehYLk/IWG+G7JufIKfccNm/UinbA2IeHnDLTaiVIIbkOZoCdq8Gpox9QJOQBqyYP0KGySeatqvMeNqYK75IvAMcAsyzcdDrpmT/wCrnAuUg4ODQwVw/PHH880330R9eigwkcTWgo2IQBTgu7Haddf+n4HW8ppof+kFdAduNG2fRWv2mWg/3Iz2gwaUTyqCa/460/8PiCS5PcBh30Nl5OBdlhXKYcdA+cXjZeoIVrPOx6+wbRfpF9CC/iLS4JfgB1vnosclCQXgWReq9cg6MtL00Qxl/RiAFub/C1zb5i4vD8FNxAMeQPEOo8xnTc21V5j3E5EJ3l7D+tyW4MdcNDDvK5HG6i3gIxQwGI9UbEWWl8cRoXFxFQ4ODg4Vxbhx4yL+195k3XXPTXBmDfMCreWb8dOpb8CPqViE9oEfzd9tkaKsCK35LYFhSMn1F6Q4C9bciEb0mm/TooOrvO3gUDE4YrEXwicXD6F4gTvwfUOtSbgILZTW1el5tNi3QKljL0I+r28APyPLxjGIODRC1ooC0/8c0+Z2VKzuZrRpvI7IzQ3EryERi2gEScWTwGumvyMDbVrhWy5qos1kPbJIBAsuBR/xLGSGfxylq61t7v115Ppk63LMRpvKp2izyiUUKqa01JEKBwcHh21F4sJ68ZCKH99XB+1BoP1nFdqDnkZW+AFonynFt3YUAlOBQchCfQpyvUq85sNVpr2Fq7zt4FAROGKxl8InFy+g9H0DkRk3HT+wOh0Rh+eQducOFLRdE3gfBcHVQlaHkxCZKEIao7WIoBxl/m6CzNvDgH4oRmMdqri6FMVG1CZ26r+IkZv3TcB9SOjvhqwWweOYa9ZC8RXrgeOB3mjT2A8Ro3xEfGag4O8/kFWmyLw/jX4G6URuSMXYnOXOSuHg4OBQVShC63GswnqVQTraA55CiqJ8tCedjBRc5wAHBNr3QdaI2cRf87eg4n3vxbieKm9XNP29g8O+Ckcs9nqE0CLeH1kw/oSE7hrIEvABIhyXI8KRAbyK3KH6IIKQafoqQotvDUQSViJSYS0EPVFsxhBESvojwjIckZubzLVs7vEgrPBegFyUhqDc5RcgopIZ4xzM503M38sQESlAWi7rUlVk2oWALaSlhSgo8MlCt27dmDx5cvj/rl27uoraDg4ODjsAid11K4PIhBrqtzrQGSnJovF+4O+Pzbkb0X42EMUKJoIqb3/00UfbOF4Hh30DLni7AtiTgrcttHDXR+bfgSiP+H3IdFyCXxDvWqTlt4XvZiEt0tXIFGwfD+s+ZWMWLNbgZ2gKVk9/CblXDUaE5Uok3DdD5KYTym9ejbJWhSUoMPsl877etI9X8OhLlHHkX2bcK1GmqC3mmt3M/T+MC7xzcHBw2PXw96hLUUxfRdyiLGIn1FCfFQkOryy2Iuv8NLd/OOyTcMHb+zj8zFCX4muDLjIvULXTx4GLkYvSIkQaViOzsq1hEU0qbN7xIOoiC8Fa/ExRmPNXoBoTr6AUgL3Rgv8/5GYUbVXIMu0eRW5NHkohmGH6tOMJxmWUmjYFyHcWZCXpbf52GZ0cHBwcdjfEdtftTWJCkDihRigUwvMqEhxeWajydlKSC952cCgPzmJRAexpFgsFx3UFxhCpBbJfdQ4yF3+IXJpyUZzCIyh+4i1EEGwWqWiBPoQsHMn4weBz8CtUW+QBf0aWjGVAF2RBAVklpuG7KXVBJCWI9Yj0NEdxH9GB3iHT5i7kvrUxcMxldHJwcHDY3SFykYr2peZUPLi6KOaaHn//21bkIjffKaZOh4PDvgdnsdiHoUW6DsrlnU0kMQBl40hHmZoyzbFs/MJF9yHCYFO3Wg1N0A2pFD8NbRLaFOqhIO2t+BaGLORqdQdKCTsWZYxKRZml6pixWDesIEqQ21aqGaeNm0iOarMYGIeIxw/E2oCiYyocHBwcHHYPRLoxrSVxcHX5CTUyMpLZurUqgsMhWHk7IyPWPvX/7J13mFTV+cc/s7C7lN2liDTpsJSFZRW7YjRqRFGRYIzR2E1iTSwxJhqTaPwlptp7L9hFsWPXGGMvCyxlYQEBQem7LGXr/f3xPYd7Z3a2CbiF9/M888zOzLnn3pmduff9nrcZhpGI+fVaJf2RSznatwIkDv4WeT1AxjnA28hj4Dtop7t7Ly42o2Tq2cBCt12qm6MMrTalIkM/ysGE1aDWoHwICL0dFdQkQHkbZagOuRcfPnncsxx4F3k/1qHE8PNRpafPgdUEQTllZWXJPybDMAyjWRAEgbuVMnp0f7Sw9DWwgtGj+xMEpVvG1MWmTZuQl+FB5LH+tsQnh2tewzDqwzwWrY6OKDk61T2OJdxvdq+3J1wFWoIaDB3qtveN7IqAKSgRejE1RUp/lBg9wf2dRdjh2oc1pSGx8jFKfrsVVY7qgMRCVCjg9rHCzbMTYf8NX+rWj1+DBIVO+r6J36GHHsprr71W/8dkGIZhNEvy8/O3avuavZy2TXK4YRj1Y8KiFTFw4ED0Lx3pnokRn5cwBxnz/vVUJBi+QWLAV176CvWueB95G76PEsEHEca8LkAVpF4HngD2AX7htl9J2Pk0BdUSn4KqMp2OSsleQegw82FXVcgLsRqJlD4J77BtZMwaVOlpDlBpJ37DMAxjC9sjOdwwjPoxYdGKWLRoESrfN5TkUW5PoZX/oe7xJhRCVO7GD0K1vq9FOQtXo8Z4qdQkF1WP+jUKb7oFlXo9DjgerfhsQLkXvgxtFepl8ReUY3F65PliJHDKkKciUVRUuzlXuHmfRoLGTvyGYRhGTYIgoH379mze/Anqq3QNDU0Ob9euDZs22bXFMBqLCYtWQlpaGmFORG3N5NYnvP4lOrF6r8YLyLOQ2BgvGf6Em0p8Y7z7kFC5CLmgv0JxsiXopP19YBIqa7sAOBUJi3J3HD2Qt2IzYdLeZsJwJ1DVqnuBlSYqDMMwjFrxuREpKSkEQf3J4SkpKVRVVdUym2EY9WHJ262EigrQKn+AGsMlIxOdRDfiy/XJkG+HPBePIq/DFdQtKpLRAbjcbf8w8ih0Rw3xUpE4uBN10Z6CvA53ASejBOwO7rYaJYcXuftlQCnytMxHfTf+gxryGYZhGEb9VFdXb0kOHz9+f6LJ4ePH778lOdxEhWFsHeaxaAWEJWYvQklqhSipOpEfoYZ1hUAXZPB3QsZ8CQphOnMrjiRAwuEbFKO6Nwp5Wo1yNV5EXpMytFr0EupBcRsK0zoYNcjrg4REOUrQnoNK1a5BuRx3oNCrm7biWA3DMIwdkRdffLGpD8EwWi0mLFoN/VHy9B9RrCjIK7CUsCN2XxR2VACMRsZ+DLgBGAacsZXH4JOwL0SVpK5FIU/zgN5IVPRAogZUcWNnJCqeRJWjplCz+lQ/JDqOA4a450cCbRk7diz//e9/t/K4DcMwDMMwjK3FhEWrwJeYTXd/PwTMRWVkE430CpTrMAmVnF2Aqj9djERHQM0O1w2lGlWdykSJcle743gbJYFnoYpOlW7Mq6hi1GDgMjfHBpT7Ue7eTz/CkrNRhgLpvPfee9/yWA3DMAzDMIxtiQmLFs7BBx+MQpq6AychYzwTlY49lZolYp9BSdofI6P+KeS5OBoJkfUoPAriRUl9eFHSxt0fBNyMOp+uQd6G3u5YVyJxsxSFZkXp6I7LC5zaRE47LEXIMAzDMAyj+WDCooXz1ltvIU/AvSg34UYkMjKBbGToe3yJ2D7AA8jY/whVaspECdEr3H10uygxkguOSsIu3dUo9OoglLA9AZWcDVDjvGKUH7ETqiZVG3V5T3zVKMMwDMMwDKM5YEu+rYJ2wETkfTgShRZtQhWVkpGNQpSuRTW7c5AR3xN5Hr6pZ3+JnoQKt73vd+GFR28kAC6IjA2AycAi5E2poKZQaUgoViFQxv7779+AsYZhGIZhGMb2xoRFC0bVoHYGzkU5Cj4Xoav7ezUKN9qIDPwqVFHpU/f3Eyj0aZDbLhUlV69D4UrJCBLuK91cvoeGf22tO46d3Ov++btQONYpKJdiDqoSVUa8ByIW2SYZBUClJW4bhmEYhmE0E0xYtFAkKrKQgX4cSnYGJT8viYxchXpCvIBCoX6N8i+ykIhYj0KTfI5EJxSutBIlWicLN/LGfgUSFm0JcyuqUG3wVcgD0gGJmvXAn4FbUfWp/3P7eQqJkoCwSVF9bEY5GhsaMNYwDMMwDMP4LrAcixZLKhIIf0KhS+tRkvQm9G/NQt6MGHAV8AjKreiODPq5SFBkuu0WEuZm7OzmWIGMd/98CqF4qCQMf/KiYr3bZqPbv8/H+BD4LRIbpwE/cNsfArxHKEwqCEOj6vpqPo9CuAzDMAzDMIzmggmLFkjYEO9CZPAvQN2pOwC7IKPeO6NOBV5z9/NR3kU1cA1K9E5Bxnw68BX6SmSiUrS9kRhYigz/DJSUnUaYqL0JeRpKCEOi2qBQrHfcnDcCe6Kmdl2Q+JiPPBpfIUGS6eb1XpAYyR1qJaj5XglB0JiqVYZhGIZhGMb2xIRFi6U/qrY0Ha3wd0WGelpkzG+RqLgUGe6PAicjL0cH1L/iDeS9mIQEwjrkpVjr5vDGexkKt/IGfzTBOtXNl4HCoCpRKNVMJICeQg34/HyZblxnt59FKEwL9JUMkMBIS9hPNeqNUUCbNlYRyjAMwzAMozlhwqJF4hviFSFjuxcy0qsJS7R+gkrKnoJEyNXAz4Ffuu0DJDBeR/kXlyCvRQ+3j2pCMZHmXou55324Ugph0nZAmJMx0G3zGXAsoajAzdEGeVZ6ubn+DdwfeT3VzVPp9uuP5zrUrbuEykrzVhiGYRiGYTQnLHm7hTFs2DBkeA9Ghv9OSAz4BOgKN/JnwBB3fy1qgHcsMtS9+BgAXETYOC9KCipjm+5u3kuR4h63jzwPqkC1zh1LJ+BNFEZ1IjXLx/rHHZGn43kkhKK0RWKiGoU//Rb4B7DCQqAMwzAMwzCaISYsWhiFhYXIG5BFmAcBofegCngVhRpdhBrRdQbOj4wDiYvAjRkOXI8ERn0lXxOfq0aeihUof6ILyve4CdgdeSuSCYEYShjvhPpq/Czh9TZIOD0JHI4a7ZmoMAzDMAzDaK5YKFSLpR2q1pTYITtAHoo+qPHdNSgMyve4SJab8DAKrboBCZDuhFWdfPhTdD8xJGBKkaCoRJ4Kv49/Iw/G7fW8h9nIa3I88CvUPTwXmIfyM15BvS5KaNOm2sKfDMMwDMMwmjEmLFokPpeic+S5ze75GErGPgGFGHVFZV29MChD4UdRI7036itxlfv7aFTCNgt5QdoiIVONci42ofCkSjdXfzd/OXA38DRKGO8f2UdiOFQZCpc6BImaXVCjv64onKud2/8mcnKyKSgoaOiHYxiGYRiGYTQBJixaGEOHDqWw8BvkEfBhTZsIez8Uo6pOo5CB/30kDnD3m0kemjSR0MuwCVWPKkUCoAoJB7+/NFQBKtPdQPkV1wNTUQO8ZLkVUd5EFaF+jEK6DkOhWL8HdgX2Bz4CjmbWrFl1zGMYhmEYhmE0B0xYtDBSUlKQob/IPeOFgvcsfOr+7ovCiE6ObN0ReRp6kNzoPxMlg1/r5jkb2As10KtC4VUdkcCoQp6FclSy9kaUrH0p9YuKUtTTYl9gkJsjx20zBIkKkNfC0oAMwzAMwzBaAiYsWhhz5sxBxv27xHsq2iLDfKO7L3GvDSI08rsiz0IJSppORmfkkXgP+ACJiQPdPLsQVoIqQU3u3kGejhwkLrLdPN4rkigwfNnYYuB3hGVrB7u5ZwLj3Fgf3mUYhmEYhmE0d0xYtEjaI2P+FWSEp0Ze8/kT693jdgnbdUAJ15nEewM+Q5WZliNvxzkoiXsBytl4k7Ar9iYkYNoCPwIuQ8JjQJJjDRL+vgd4FriCsKJVzB1bLHLcAIVAGTk5Ock/BsMwDMMwDKPZYMKiRZKCQoZuBw5G4sF7BnZHAuAr93hzwra9UWO9RUgklKPSsM+5OW8BjiK+gzdISCxCoqINMAu4FeVxVAI3R8bGqJnHsQF5Kp5FIVbHJIzfhLwTmZHnC4BKS9w2DMMwDMNoAZiwaGEMHz6cOXO+RgnOz6MSsVdERuyMQqW+Rgb7AlTCFSQopgD/RfkXbYDF7v5k5HnIomYJW5CnI4cwt2IEMB6Vs52MxMW9bqxvwAdh9adbUPjTFUhUREOkqt2xlaOKVCBB9AoSJIZhGIZhGEZzx4RFC2P27NnEYhkoUfos5LXoRXyDuRzgbRTSNAvYA/gb8D7Ks/g+cBryUnwN/Ab4JTLwq5BI8A33Uoj3PvjqUGUoJ+IqlAz+L+AIJAzaIHGzFHgLJX/vi3IqdknyrgJ3nABD3f3zSPQYhmEYhmEYLQETFi2SDcB/gMvRyv9tKDfiYmTQX4L6WIwAHgdeRB2xr0Z9I1JRTsU04BTgQjev91R4cVHmxkY9GMm6cJ/l9n8X8LHbpsy93gP4A6oUVRubULfwzu5xCQqbKrFO24ZhGIZhGC0Eq+XZYlmKqjadgXo/vIo6WL+MyrX2QuJjBXAQ8BhwOGGi989QTsUfkszdBnkj2hAmbAeRWyVhkz5QPsblqHdGd+QJuQXlUmQgwbMbKmGbSAC8hLwTv3PzXo3yKyoa/nEYhmEYhmEYTYoJixZLOXAHSqr+IRIOA5FQOAoJi02outOpxFeHeht5GC5Chn9tPSd8b4xK5MXwosKHSqW5eVORR+Ri1PRuE/KY/BXlc9yCcjSOROIhyjqUJ1IN/Bx5Kh7EvBWGYRiGYRgtCxMWLZY05I241j3eBVV3ehw1tZuOSsEejcKSvols+y+Uf3EUNSs4VROWk92EviJtkJioQAKjLRIUiUneR6K+F/9wjwPk+TiWMOzqQZTf4ff1F+SduBb4rdt2hYkKwzAMwzCMFoblWLRAunXrxqpVFcABqHyrT97ejHIs5gHDgF+758pQ34tUoBtKlD4BGf2BG7MG5W4kCz9KRX0mMt38tX1t0oHDkPfE4ytEZSLR0Av4J6pA1Q0JjT7AnUhgmKfCMAzDMAyjJWLCogWycuVKVxmqPcpjuBmVZl0GLERhUn8mvtlcBcrLWIoExCgkOJYRNrvLQp4I31272o3ZjBKqS1BIU29q9rnwjAJKUdWqbu65qFC4AIVh3eme3+D2Mw+oMFFhGIZhGIbRQjFh0WLZgBK2x6HO2EuB/sBwFLZ0IhII1UgYbEJ9JN5B//ZeqHdEWxRGlUXyXIv27r4HEhYr3HY9UOnaRLKRh+NTd2yJVKOqVe8Cn7hjXW2CwjAMwzAMo4XTpDkW69ev5/LLL2fYsGG0b9+ebt26ceSRR/Lmm29+q/kOOuggYrFYrbeePXtu43fQ1OSjxOgxqOTs20An1E/Cex86IAHQG4mODMI8is7AYLdNbQncnpgbN9ht9zUSGYm0d2M3II+E741RgQROOQqnutDNh4kKwzAMwzCMVkCTeSxWrFjBAQccQGFhIb169eLoo4/mm2++4eWXX+bll1/mhhtu4Je//OW3mnvcuHFJRUSnTp229rCbGTuhhOg/IsGwEXkuTnWvRw32AOnIF9zfHZHXorGkuO1SkbBoS7znYpObPxUJiSgBSvhOBSYAf0e5H4ZhGIZhGEZLp8mExS9+8QsKCws55JBDeO655+jQoQMAL730EhMmTODCCy/kwAMPZPTo0Y2e+3e/+x0HHXTQNj7i5kMsFkP9In6LSsZ6liLjfTA1O2aDwpPeRp6MNVt5FN2QF+IbJGp8zsU89/yukbEpyItRHRmXjkKlCrfyOAzDMAzDMIzmQJOEQs2aNYtnn32WNm3acM8992wRFQDjx4/ntNNOo7q6mmuuuaYpDq9ZI1GRBZxMvKgAhRlB2LMiRnyI08+Aoagj96xtcDQ9kDZdFnluJvKG7OKOo727T3PHUhk5ppFAW7Kzs7fBsRiGYRiGYRhNSZMIi2eeeQaA/fffn/79+9d4/cQTTwTg+eefp6LCui/H0xYZ5FdQUzh4b0BiCBLAWygv4iJgb+ANtr6zdQrynGwkLGv7Ksrl8F4KCEvO+kpT/riHAunMnz9/K4/DMAzDMAzDaGqaJBTq888/B2CPPfZI+rp/fsOGDcybN4+cnJxGzf/MM88wdepUNm3aRI8ePdhvv/047LDDSElp2f0A5a3YCZVsjeaL+CZ3fdzfC1DZ1yj/cq8fBeQAU4HXgPFbeVRZKBxqDfABCsf6e5JxPsfDd/COIU9Gy/6fGIZhGIZhGKJJhMXChQsB6NevX9LXs7KyyMrKoqSkhIULFzZaWNx44401nhs6dCiTJ09mzz33rHf7srIyysrKtjwuKSlp1P63L/1Qh+vEKk4xVAGqPwpzmpDw+mxUgjYNeRR2R/0vDnLbfVt8aNY3qHt2D2A/JCASO3MnejA2Iw+GYRiGYRiG0dJpkuXi9evVuK1jx461jsnIyAAaZ9QfcMAB3HXXXcydO5cNGzawdOlSnnnmGUaOHElhYSGHHnoos2fPrneea665hk6dOm259e3bt8HHsH3piDpbt6tjzF4o7Cka5rQKhStFvRh/cs//axscVzpwN0rcvgcJigrC5PFEEeSfL0R5IXW9H8MwDMMwDKMl0GiPxaWXXspzzz3X6B3dfffdjB07ttHbNYarr7467nGHDh3YZZddOOKIIzjggAP4+OOPueyyy5g6dWqd81x22WVcfPHFWx6XlJQ0ubgYOHAg+nflEG+oJ1Z+OhZ4AuVQHO6e+wSVeI0mSQ9AIVX/ROVjf/4tjywAHgGeRN6P3d3z1UhcpNexbYHbPp28vDzy8/O/5TEYhmEYhmEYTU2jhcWyZcuYO3duo3dUWlq65e/MzExAORT1jc/Kymr0vhJJT0/n97//PRMnTmTatGlUVFSQmppa5/j09LoM4u+eRYsWoUTpwYQ5FckYBOwL3Ap8D4U5bSQMlfIEwE9RH4lbgOWoI7YfU9c+PBuAfyMhsxk4PvJaWyQsqgkTuaMejM0o0TsDqGD69On17MswDMMwDMNozjQ6FGry5MkEQdDo2+GHH75ljgEDBgCwePHipPsoKSnZEgLlx24tI0aMAJQ/sWrVqm0y53dPCjL8kxn80QpRvwXWoZyHWGSbjUm2+xXwG9Q471jgJeLDmJJR7sb9yG13AhIIUeHShrC8rMdXhAJ4HvgSOBtL4DYMwzAMw2j5NIlFN2bMGAA++eSTpK/75zt27MjQoUO3yT5Xrw47PHuPScujGomDqNGfWHI2AHoDFwPPotyHPZBYmFfLvD9FoUy9gMtQCNVfUeWo6cBcdz/VPX+EG9fLbTfIzT8KJW17fHlZf1zee1ECXOeO+zAsgdswDMMwDKPl0yRVoSZOnMgVV1zBe++9x+LFi2tUh3rkkUcAOProo+sMWWoMjz32GCDPhU8Mb0kMGDCARYvWAkXAntT/r/shKgF7Gwpz6oCa19VGfyRC5gKPAu+hEKdEdgHGogpTw91zdyKPRQ9Cb0db4svL+vs2wF9QfsWDKIG77Ft1WDcMwzAMwzCaD03isRg5ciTHHHMMVVVVnHnmmWzatGnLay+//DL3338/KSkpXHbZZTW2PeWUUxg+fDg333xz3PNvvfUWb7/9NkEQH8JTXl7O3/72N2666SYAfv3rX2+Hd7T9Wbp0KQormoVW+JOFKiVWYToT+D3KZdjo7suTbBfddjiqGPU0SgB/AAmO+1Dfi2eBqwhFhc+VGIESxNu646yKHEc1YcftG5CgCFAOSAFQaYnbhmEYhmEYLZwm8VgA3HnnncyaNYvXX3+dwYMHc8ABB7BixQreeecdgiDghhtuSLqKvXjxYubOnVsjTyI/P5+LLrqIHj16sOuuu7LTTjuxcuVKpk+fzjfffAPAJZdcwplnnvmdvL9tTWVlDNiEjPvL0L/O//uChFu06/UPUQnac4H/oXCmH0bG+KTq6oT7FKArasjnKSf0SKS6bV9ETfH+7ca0da9XEHYCr0ThT/9EoqI3Eh4zgFdQErhhGIZhGIbRkmkyYdG9e3c++eQTrrnmGqZMmcKzzz5Lx44dGTduHJdccgmHHHJIo+Y78MADOeecc/j000+ZPn06a9asISUlhd69ezNu3DjOOuss9ttvv+30brYvYcftA4F3gGnAMUgE1Oa9iKGwozbIkH8GeRluBA6lZlM8Lyb8Nom9J0BCoYLQI7EZJYj3Qt4Hv99UQi/FZiQebkRN+g5GoVA/Bv4DJE/gNwzDMAzDMFoWsSAxdsioQUlJCZ06daK4uHiblL9tLBIWY5DHIQv1ipgKZCIRkOh98J6HqPehLfAx6sh9GvB3wqTpcrdtQ0vsVgNlwB9RmNQPgR+gPhntkWdlLsrpeBWJh7Yo32MwCn+6wu1vPkFQW3iWYRiGYRiG0ZQ0xg5uMo+F0Rh8x+0TkDdgFgor+hsSBD5VJiD0NLRx91XIc1COPBbHAJORl+EC4kObauuUnUgMuAN4GOiCvCgvEoZH+VCocmC92+cm4Eq3/WagFPXQSHPCiRr5MYZhGIZhGEbLwYRFM+fAAw9EBvsUoBg4ByViP4xCnC4g7A+RrKldG+RdWIQExkVunn8Cy1APi05unN82KjAS51wP/B8SJwe54wiAlajD9wbktRiD8i7uQT01DkFdwwcRiopiJJJeBb4kFlOolQkMwzAMwzCMloeFQjWApgyF0mp+GtAZ+B1wITL2T0CegpNQWFEGNT0NMZRYvRqFHXV382xEvSpmImP/IhQilRbZNtF7UYaa4V2H+mGcgDwmifvznpJ1wCPAzSgEqzNhiNZ85IXpE5n7OTd3AVBi4sIwDMMwDKMZYKFQLRwfGiTjuy3QDjgFiQqQgf4IEgT3o54TFwFHEYqDGLAQVWPaCYU+VaLE71vcHGeiHhTno5yLw1CTu2iuRCEy9l8FlgDdUNfsvZBIKEXekHJ3nH3cMawGhiFvyDfuPkAej0riq02lA8chsfNn4EFisZiJC8MwDMMwjBaECYtmhARFKjK6+yND/yZk7P8hyRbXIc/BGcB5yINwGDDSzVGOkr0XoZCjN1HTvL2RJ6EUeT0uQELlURTKFM2V8IJhMAp7OhRVd/oT8kQsS3JcPd0xfN89LiP0VqxAFanaJ9kuE4Vo9QT+YeLCMAzDMAyjBWHCopkgUZGFRMSFKDTpMWTUX4iM7miokze490IhTU8iEfIIEgwpbr5Obrt+KM/hOCRaKoAvkbHfGXksUpHBPxuFS3UA9kDCwIdTnQ58ijwX0bwJ7+Eoctu/AbwMfI1Cn3KRqKh0+6+Li9x2d5q4MAzDMAzDaCFYjkUD2N45FhIV3VG40x+RiACFJGUB/0VGfbJG6f7fl49CkIYDee7vm1HVpr4orMqPXQA8DnwIvI9Egu9r4UXI3sCxSDRUu7keAnZGoVM/oGZORvSrVIlK4v4beUyOB85C3ohoGFRtlKDQqE+sHK1hGIZhGEYTYTkWLYhQVPwOrdRHWQ38iNCAj5aT3TKDu69GIuQdVKHpFmCfyLgAhS39DYmJLqhZ3XhgKMqN2IxExyzgdeAJYF8kJp4CJgGXIJESnTd6748nFTgAGIEEyWSUa/F4rZ9FPFno8zjHvBaGYRiGYRgtgGRL4MZ3RBj+dAo1RcVCZKyPJHkZ2Sib3Jj2wL+Qh+Io4kXIVOAnyHtwNeo78VvUY2IXlEMxCoVg/Q5VgLoaiYybgLHIm+I9G6tQqNPTKCF8JTVDtdYj78RfkSB5183dUI5G3hPDMAzDaF4UFhbSvn3NfMGlS5fSs2fPpNu0b9+ewsLCGs/37NmTpUuXbvNjNIzvGhMWTUoqMub/mOS1aSj8aSjxDfCSCYy1hJ2zZxEfphQD7gX+ghK7H0MhRqnu9c4obGl9kmM7GPgHyqv4CAmRo1B41EjgZyjx+2coh2KQe/0tFMpUgbwbFcDZwInAXcj7EX0/0VuUdsA44j0khmEYhtG0FBYWMmzYMDpv3hyp5ChR0bdvX2LffBP3PGgxscvmzQwbNixOXMRiMVK++Ya+ffvWKy7qEi21YaLF+C4xYdFEhN6KCwlzKqKUIlHgPQQp1DS+V6PE7a/d2NWoqtMo93oAPAPchgz7KyLzedJRD4wVKJwqyjJ3bGcjI/9+t68TgOuRF+QVd3+9e34FEhD7I+9IJhI5acBlqATt6e72ZZL3nSgyRgJtGTFiRJKxhmEYhrH1FBUVkZGR0aCxXlSMQNmDOeia7kVFLroa+udx9yNR3EAubBEXic/XJS7qEi210RjRYhjbAhMWTUp/FHqUjAxkXG90j6NdsNehZO0l6F/YyY35Ankast3jZcC1KNzpzDqOoydQhXIgPJvdvt9Fp85MVN72AeBKdNrcA50693CP/4ISzW9GnpAzgcvdMbYBugK/RuKmAFWoeriO4wqQxyadOXPm1DHOMAzDML4dRUVFDBkyhMwNG+o12L2oGIyu3gcCv0TZhF5UXISuvL8iFBcj3bhr3eteXIxI8nwyEVCXaKmNxogWw9hWmLBoMjqi0KT0Wl4/HJV5jcZipqDci4Vuu36oClQ/ZIQXE+/l+Bsy8C+u51jSgB5IsKxyz61B+RM3ovCmp9DprB3yWtRGKkrynoZyRx5EfTY8R6IckK5u3n+6fcQityjtsK+pYRhG62LOnDmkp9d2/UtOevq2X2TyoiLRYE92bFFR8RO0ZFeFyq8cifzr30P+eFAdxAPcnBPdY9zrP0SxBWOBn0eeTyYuoqIimWhJRm1ipqHi4vPPP2+wVyS6z88//7xR2xitD7PYmoCRI0ciA3xkHaMGIiO7IPLcbBQi1Q15Jbq4MW0JvQLey7EAVX86l5rhTxAfUhW4uXZGSdjLkah4EjXe80nbKegUWooa59U1ZwbKz/gNCpe63D2fjgRVIWqydy7KAYl6LqICYzM1Q7QMwzCMlsqcOXMYMWIEXcvLG2S8+pChncrLGTFiRIPERW1J0lGiosIb7OejciZZCceWKCpORDUTR6GMwj2Ac9DV91G3zaPoSvkrYFfgNff8a8jIvxBlOD5KeJVLFBcffPBBnKiIipbaxEVUVJxVy7zvvfceXbt2Tfq5fP7554wZM4aeCXN37dqVL79MFsKscb2AMWPGmLjYwTFh0QTMmjULeQmG1jNyJ+BV5LmY7+57oRX/xH9dO2AASpSeB0xBXoFDImOSJUv7W7Ub3xOYi0KejiVcS/FkIiGzNuH5aLnZWOS5C4CTgPuAz9zzo5A4Wenmn4RyNBYlzBlDAqQM6NLo1RPDMAyjeeFFRX0hPfPmzaNDhw5bDPqeqP5hLtQqLjp06MC8efOIxWJ0TpIkHSVRVJyOMgQDwvqIQ9yxJRMVOW7scpQtOQL54A9HwuFqdz8JGfcjUUzBE+5+JAoWPh61k00mLkYB++67LyOJFxWeZOIimajw+HlHAmPHjiV97doan70XFaPce/Vzx2Ix2q1dy4ABA2qIC7/P490xm7jYsTFh0WSkkNyTEOUKYDHyHPjSrT3ca4lhQz2Rl6ADSuj+CPg+8owkJn3H3P5TiBcBPqH8XuS9+HWSY4ohcbGBmonWyQz/mHsfg1H1KJC3JRX4xG17kXtvVyXZvgCdbs8EskxcGIZhtFC8qBiOQoZqC+mZN28eQ4cOJXPTJoYNG8YoZLT2RsZ/MnERi8XI3LSJoUOHkoM6QI2EpOLCi4pBaOnrdOADFJT7jjumS90+h7g5uhJmE+a4efJR8HBfFJTcEbWWPRyVJpmExAZIeIxES4Aj3WNQ3cPRyK+/PHKMp6ISKP1QMHGiqFiGroyJ4qI2URGdd0/3GR5D/GcfFRXnoyDqc9yYDMJcjai48Ps8240/HxMXOzomLJqMasLE7No41d3f6O57JxnjBUYW8iT0R/kNiwjXVKLjamuw5+8L0OnyPGov89oOeRH8+kqyeaNkolPOcuA/qN9GDK3blCORcQ4SGv9F61KgMKhX3Ot/QKfjVAzDMIyWRVRUlKJ6haOpaRh7UZGDDPZRkTG/QEtQieIiFovRBxm//dAy1EVINIwiXlxsSdRG/v2pKGj4JaAIGdv7oniCk5G4GISu1h+h1rPVKDB5jTvGUmToZxGKiz+gvIsoI1Ax+GiNwxeAD90+1rjnAlQC5W03NjFL8iEUZPwgobj4JRIidYmKanf861GswERC4RCLxeJExZHoSn0MEg0DUPyAD6caMGBAnKiY6MYfiYmLHR0TFk1ATk4OMqjrjv8UR7lxT6IciroYgE6Fi5FHYaB7vi7DPyo8Ktx+ugGH1rEfn9RWWce8iRyJunv/AwmHAFWzSkdhYT9A+RtTUX7IQvf3YtQ/w3fiNq+FYRhGSyJRVHRBBns1EhcnI89ANmwRFX3Q1eFC4Ai0pDSBUFwscNuMGDGCPshnfzDh6np75CP3IUVeXGRnZ5OGluCOQ7UVz3TznY7asm5ApUy8uOjtju0IJEJOR0JiELAPEhcL3XMd3a0NChhO7M4Ubaf3AsouHIEM+OXADCQqbkdByUuBOyLbPIRCrMYAbyJxsc59Ng+5zycZXlS8497HMBRYfQ7y7nR0n5MXFZ3Q/ykLiYaz3Wc1BXltMtxn7UVFFrIcOmHiYkenbVMfwI5IQUEBsVgG8YnZtfE8Oo2+gk5jP6tjbEfkhL0Cnb7TiA91SkYQua8GPkV5GXV5BrwejXos6uoMDmHS9mMoB6QC2D0yVwckZj5GqXOLUHnb9cgpDTrlX4P6dRiGYRjNnWSiwucL3IGy6zJQkO/XsEVUlCKj9YfoarEB+conoCWndSjzsDcSFUcBP0ZXl6no6vdDwhCi65C4AImE81BMwAgUE7AYXYH8FanY3T+Erm6/BMYDj6OOTm1Ru1eQwIEwE3IAuuKVuOd9mZUoUVFxittmARIKjxIa7gXATW6bDkhU/Mh9Di+hK2oZyojE7TMrYX+JoqIv+rxHAS8j62IwEhnjkTjwgdqZbq5j3OM7UGH7AUjEHOP2lxUZD6G35mYkLgAWLlzIgAEDMFo35rFoMjYQJmbXRTk6XZ6D1jCudtvWRgydksrRWkdtYxJPc1Vu3mWEEaS1iQUvKBr79fFJ2x+5Y+yWsI8cdHpviy4NK9Bpai/3unXiNgzDaEnk5eXRDS0HRUUFKHfA10f0jeZWES8q0ghXzqtQt6SNKN7fhxSNR1fJwcig/RHqwPQMMrRPR2FRO7l9/Aqtupe7/V6AlrC8FyUTGddXAm+hsKGfu2M4Hi3vfea281fDbm67hejKm4muWJuQMIqSTFQEbs53URDwroQhYgXA/7ltvKgAXQ2PB95DAsFnP5YQXlnrEhWvArNQB6vvu88ui1BU+GxMLxy8x2gA+t/58R2Jv5L7z2888iL1cJ/PwIEDWbRoEbURi8X44IMPan3daBmYsGhSvgSeq+P1WcihOhKdyi5HawvHo9NIspKvf0PrMe3Q6WhFwuu1JVhXIaMe5BmpK9zI7zfq1WhIeJJP2n6R+ChTzyB0evo3uiRchd7zl8hbAb4T98CBA5NsbxiGYTQnvvjiC1Yh4/JCQlFRgc7qbdzzV6HwmW7oCuNFRZobn4VExQyUQ3Auqjc4Ai1FDXRztUEG7bGE4mId8Dkyqs9CIVC+UHuArqjnI4+DFxd/R1l/P3P7CZDB3BMZ/D9HGYEXoNyIde4Yert5ZiGB0CHyHgJ3PMlExRTkqZiECrHPQEtw3yMUKbujECbQFbsaxSicALyOluMSxcUS99oQaoqKze5/kO7GvETocfCiwpPh/gfjkYfnJ+6z9sKqmprxD0+7/ZS7zzCX2sWFL1e77777mrho4VgoVBMRBAGxWBpyBB9O+HOO8iTxZWknobSwv6HUsOvQOkMOMspXoHWJP6LyrkuRc7kCRVOmkNwL4Ss7VbrHPrm6muSCYRM6FaUkvF5fSJSPLl0J3J3k9VR0OnwYrVed4o5hOXI6P4/vxF3XqodhGIbR9MyePZucnJwtPRtOJjSIj0Yr5RegVfgUZMRXAbehq92rkbl+iVb0zwIOQle3k9FS00PAnUgceMa7+yeR0b4MrbKfgAyfNHQVK3O34934m9FVNg2tzv+U8EoYI8x0PBcZ5g+7bX6BsgS7oyW6QpQE3sWND9xzk9GVMCoq1qAWtLugEC3P7yLHei6wG0oaD5B3xnevOsKNf8zdT0QeGFAye2d09eyEAou9qMhBn7fPCbkHWQvnRo7ZWwK4+zZuziw3x2L3fGd3n4Ku4g+hkLE1KIPyKpRPcx0SF9GwKJ8EfhgSOPvuuy/vv/8+++yzD0bLwzwWTUoFKg3751peL6FmWdpdUMTl4ygi9FPkJD0dRYymIs/ACPdaJnJCzyN5KhmR53xS9ibi1x4Sx67n24UjbUKnsZ3QOoynGp1+5iIn+P5ofQP0/v+AToFnYJ24DcMwmj+FhYXk5OTQC12pzkBn7nLiRcVp6KpVja4O3huxDBmaIFHxkXvtBCQquqBcjAnISP8PMvCj7VQPReFCa5FwOAl5RDoSXu3S3a3ajRmPrnAnEnoqfHH2KItQAvVh6Errl+XeQKFaAwiLwwfu/aQhwVSKBI9f2c9CXpRlSCCBsg37uucHIA/GYPeaz1KMlnM5AshDomGDm3ODO84i5EGZjLwhXlQMQh6QI9xci5B18SfCZUL/eRa7+bq6WyqqZjUdeW7WuXFeVNzn5vatcV9w7/1C4j0XtXUIN89Fy8U8Fk2IvBYxlK7VG/2komRRe1nawYRJzRvQqfZ8ZLCPQnUuXkXrGwehcKIlyIORhdZM2vkjcXP4SMki9NNOIT5BG3TKrSR5Olp9zEWntPuQyFiHBIWPQF2E1o6moXCpDqg6+CXoszkHOZKtE7dhGEZzJdrUbjm6Et2BxMVE5Fv3ogJk2Feis38q4Yr5bWiJLAuJijPQFaofWilf7P72OQcPuvvz3XwVyNg+w82xAl25EkVCOhI1L6Mr6W/csfkV+MR6jIvcvuciL8TRSOS8gZbxhqIcCQhFRTkyyH/i3uMj7vUL0ZXUJ18/ijwPGwgTzB9Foukit/1wanbBeg4lVY9H4VoQZnCe5I5rlntP3VCyu39vV6LGfT1RONczbrs/u+P3oqITWqoMkGeh2I3/ComLPdy296HlzUuQeBno5i8hbNl7PRIXOdTsEA7ybOy7776ACt6omqbREjBh0cSE4uJv6PTzR8KwqOPQz6+QMIG5xgzoFLMLWpcZjU4VI1BE5s1IWOSgn/U3btwqataNaI9CpmajtQXvCI16L1agaMs06g57SqQSeWfaonWYOei0loaa8bVHp6m9UIL6PDf+VXRa7I5Oh7cBZVZZwjAMoxniRUUu8hi8QljZaB66mk1CBrO/wvisvTQkBjYRiotHUciTFxWgUrEBWi6LEYqLAmTcH4mMZG/8j3TbzXb3Pk/B45ezXkKG+Vi0hJZJ3aLiNWA/t++GiIpO7nkvIqLioso9vwCt+B/o3jPoCuiTsF9G+SdRnkOhVEcgjw7u+MvQcuHZ7tjuQCLqESS0zkBX5klu3PkoCf5SQnFxMbWLipFI5BQij8pN6IodFRVdUMB2BQrHSkHLn5+693QsyleJMsG9di36vEeOHGniogVhMSXNgCDwBvudyHH7JDol+AjIZGVpfV6EZ4l7PCjy3J+QgPiXe5yFUrhGoVNCf3Qqz0Snm54ouvQNwlO9jywNkCipIlwPaSgVyPn7GhI+lUgM9UOnJd/a6L+ohsSeyBH9V1Tv4hZ3jCvR6bGShQsXNvIYDMMwjIawYMECMjOT5f3VTmZmJtOmTdsiKi4ivrN2ATLeOyPD1GfZlaMrRCpaXvLeCy8uHkBCoYSQAIUH9SM09KcgY/VQtAS1EBn/vdBVbW/CZbO5kbnKkH/8RVQ6dSK64iWrvej3NRcZ0Iej2ID5yPc+mPpFBUisHIuucu+h5cNq1CMjGxn4m1Dysz/GSe75uchor3Kv1SYqfMCy3+/vkQjyn/XtKHNxErIeLnDvpS3qNjUU/Z+modiGDGqKCl+CJRt5Jd5xn9HF6P/TBVkga9Hy5o/ce3zAzXGGm8c3BgRZAW+g5cdzkIXQDYmLWbNmYTR/TFg0EyQuSlAdinNQ2tdl6Kfsy9IG1BQUHi8E2kWeG4BOF08TRm56L0VbJDQ6o9N4GjqVnYBOBa9F5okhg34tOkU29GtThUKfKt17WEoY5ZpNGE4VoFY/a9CpJ0o6OgVPQ6ehrtRdbtcwDMP4tixYsIDBgweTUVra4GaksViMjNJSjjjiCEYSX1I22lm7ABm2KciXfiehoesrJ/mcBy8uBhGW9agi9HLECMXFg8iXfRDyhJSjgN4KJCg8ieIiUVSMc/vxeQ9Rb8Ui4kXFOJRsvgoFK3sD2GcyFpJcVPjwpERxcTVauhuBEsEnIg/Kk+59pCHPzbko/+ISVAWqIaLC8xgKcl6HlvouIxQVp7kx7ZFXpDNh/sv/3JzJRAXu/e6GrtC90VU+DS2X+spYG1H25LHufe6H/nc93ecGEhUr3fMb0HfmBygrMxsTFy0FExbNiCAICIJyZMR/jhyLFdRdltafYv1peXPC6yehn/stqC7DBuKb5nVGp/C26NQxjDCEaiNhVaZVKGQpE50uN7tj8zU+fJpXlXt+M6HYKUMF6rqiNZ5uhJcI3DHdgsTUAJILp0wULnYJsLN13zYMw9jGeFGRiwzVHEh6ri0qKiIjIwPc6znIMM9F/uZTE8afhYTGCEJxsR5dZSYTXr086e65SsIsvCzi6xD6q8SbyLhei0RDDF2pfEHy9xLm9uKiALiXUFQchUTFZsKwH88mJCq+IF5UbHTPd0dXzg5ujseQh+B94o37Re65Re6xFxf7oCt+T+QpCNzx+AZ2JZHP6EQUqjUN5TJERcUiahcVnsfc60tRAPWvkGDxPIXC1ya59zgSXcmfJz78CcL/gV8ePBOJlHSU2L7JHcdO7t6XZjnPvddKwpK1XlSMQN+hkeh/+yzKYbkMfb9MXDR/TFg0QyQwAoKglJycwejnfD36WUIoJqIn/H7u8YIkM16A0tFeQKexF5GxH0OngAz3uBKdlv6EfuJ/cfOVoFPxzsgj4vMrKtEppwydjsvc48rI3GmoW/Y8lI7VnTAp3Bf6u869x9/6T4D4qtj+uRharzkFyDJxYRiGsY2IiorEMKboubaoqIghQ4aQuWHDFlFxEjJEL0JXkFuIL7ExFRmSRyDDcS4SChnI03AX8VQhg7MSGe+ZbizEX/XuRXkD+yPj+iV0tSpGS1Qj0ZXsw4T5s9BS2aMoIHgCurqWu30lJkb7gOEn0JXoZ+759ihkZzUqi9IJCYcn3T6ecDeQ0b+QsIneIvf8myiU6sfIoPa8gioujSM++Ph+FHLUBnkJvKi4A/0fnqF2UQFwK1pG7ITCxnz+RhUSFT6H4wj0WQ9Hn2Mm+i74/JTo1TkaMH0mWspsg+o5eoGW5fZZ5N5/XyTyStH3w4uKbDfXHFReeDOyHk4mrBhl4qJ5Y8nbzZyCggLatGlDdbUvS/vPWkZ2RD/VWWj9JZGT0DrHVUj7d0P1GUagU+tKdFr9D1oj2oiqWndEosQ3w/NthUDrSG2JP9Unrildj1LRfoicmtEaG5Uo0vYp5InYJTJPspAvLy5+j5yzn9TyWRiGYRgNJVFURMOYQP7mWCzG/PnzGTJkiMqFIoP4SGTg7oxKr+5EWJ3pPORrr0D5Bz7EKcvt4ywkSG5x439O6APfgFa8o52gIbwKeFGxBzK+/bJZV3Qla4MM4TYo9OlDQkM23x3nySis6HHggMi+vChqQ7hcloPKqdyHcgiudccxxI0tQl263yaszfg2SpReizwRfd34+ci4/i8SCYeiK6S/Mr6CxMlhKJncByvfj8LHDkBX86eQONqI4hsKUJhTW8L/XZRbkZA5zX1+r7njOgMFTE8jXlT4K/lwJNTSifdSRPGP56Blzh+7z+MKVBAfJOZ8Z61qwjK6Ve7z8qLiBZQzcyr6DtyIgqSfcvNchyV0N2dMWLQAqqqq6ilL69kLnTZ+TU3nMkhA/A2dSt9Gp7XH3WtlaK2nAiVTn4Yqh9+C1n/+QJi+BWHPUu/MbINOvwESD5tR8vVk5Oj9O6GDLIZO79chR+epyHG6Ank0/JiouIhWp+qELz8bi8VcfophGIbRWGoTFR5voN4AW0TFgegMPx5lyq1BZ/cFxJd+XYjO/qPQSneWe20sYWWkp5Hh+hAKedkJiYoN6IrTgbCPhL8irEZXlnaEomIiWroK3PO+fK1fYffiArRMNgwZ1T2QERtDxrCnGl0NKwgrV53t5r+fmuLiHbRMtj+6ohW7Y56DDONTUXUk3Pj3kEgYi4x531fDi4px7jOqcPue7OYfi5KrPbeifI7ehHksN7nXzkoY9yYSKwNQ8vYv0RX+C8Ju54miwl95o9mbJeh/mSgu5qKlzTS0VDjYHfMVyJNRRNjYbzb63APkERlKvKgYT9i0EOLFxQy0FJqXl0dFRQUNZcaMGYwePZpYLEZ1dcPK1qekpBAEAdOnTyc3N7fB+9qRsVCoFkJYOepvyIOwPsmoH6FT/BtJXqtC1aa/RqeffyGn55tIANyJTptZyOjvgn7Cp6BLxOHoEuDzJgI35xrk1J2DLiOF6LQ7zs3ZG61b+UtKudvvT1C06hXoFN0drWdE60Mknrb8iSAFXYL6aVQstuU2YsQIDMMwjPqpT1R4foZi/n18/WZkyJ+FRENbdPaeTyguJiBD8htkNG5GK/a5yLD/AJ3RJyGj9SRqFxV+Jd8bvF3d+GLUg2Eioahog4z0VOLFxQh3bEsIV8fLkbE/CSUm+0xGH/fvS6ZkuPdY7MaehK6cF7vxL6Olut4obOc/yHifjoqmz0JX0Tvc+NeQENnNfT6+VEqiqPAF2R9DIWOjiRcVw5DHpp37O5okf1Nkf4miorub6/fu/b2MBFYyURG9B4WbXeLuo8xx+81Cn2kpEipj3fzvus9xhDvWYeh/5BPBU6hdVJzm5sxHFsMrOM/HtxAVXYFOW8r8100sFqNLENAZGD16NDNmzGjw/nZkYoEt99ZLSUkJnTp1ori4mKysrPo32I7ox5AFW2pvTCBc6wAZ8V8iT0RHdHpcR1ifYSd0eo/yJTrFDkKn1kvd2FOB36H1gXPQT7kPOoX3R5Gf7dHpYRm6rLxF6HkYjKIkv0Kn5e8Tiod93dw+/ClASeLr3HZRj4tfpypHp7gl6PT7CEpBSyfM2/DFC1U5yr7ehmG0FAoLC8nLy2PTpk0N3qZ9+/bk5+czdOjQRu8vMzOTzNJSfoJW35NRhfzK1ejM/TEy5I+MjJmFDOhKFGTbBRn/TyFxcQUqE+IrPs1DRugNKEzoXCRqArQSnY780sma0+HGLUJejrdRKNElhL5zj78atEWiohBdcXLRFcZnCKagK9fzSEAdTni1qXLblyFPSbU7vieRF2GE22d/t+1/kGga7uYrAGbPns2IESMYiQKVl6OqSMehZcCXkNiZSbyogDDkaxBajstBomguWvX3iei3u/e1F1rdn4WshJNR4HAOugJ7UfEqCqHKRrkd2aifxukkFxQgw/8ZFAa3EAmHI6kpKlLQlfwpVFp2H/Q/XogsiMGEpV+8RVXs9j8QxUdE930X8qy0QZbJLBp3bfeiYjDytvlQtCV1zBOLxRjkxi9w4xfDDuu5aIwdbMKiATQnYQG4nIs26CfZD52KvDPRJ0Efjn7KJegU2RatVWQkmbEc/XSy0JrLV6jg3cfIaXsOWuv43M39JTq1diZe1PRGVbyPR+sR1YRF6m5Fp8E2KEr16CTHUY0uQ6nuWKNUop/1v5FDuysSJ11R5GwmOk0WolPcq+44Fd5lX3PDMJoz0W7VX1O34TRz5kzy8vKorq6mFzJU586dW6u4WLp0KX379mXGjBmMGjUq7rVYLMZI4rsfg87+uyLjuBoZrcMIw2AS8eKiFF1l1qK+Ej9AVyhfygNkNB6JloXORTH+MXR1WEt8QzY/3lONzuzeSP0M5RnsDVye5Lg2o1CfVcgI7++Owx9Porh4Dl1ZfurGbERXsVJC78V6t+2TyD/fGV2Z9nRjTkGBxouRqBg+XHWUYrEYQ1A/itMIk6yfRiVVDideVKxC4VkVyMPxIVq28x3NfY8OkABJJi5GoKXE5Uhk/JpQVAxHwuZ6JAR6U7u48KLiB0gcvYwE0Z6EVZ8OJbzqP+6OaV8k+nzDwvnuuIcSbz3gPvtEj8W2FBUnov/rPDdnAcnFhRcVx6PPbDHyAM1gxxUXjbKDA6NeiouLAyAoLi5u6kOJgy1L+R0D6BRA9wB6ur93DuCPAcwJoDiAoJ7b6gAKAljhHlcF8JsAMgPoEcBuARwSQHYAvw/g4wCeCWBqAO8FsM5tUxVAdeTmH68P4LIAegeQFcBvE8b529oAZgawMeH5xwLYL4AjA3gpgLIASgP4wm2T+H42B/BEAPu6/dlX3TCM5sncuXMDIMiF4CIIcty5PRkzZswIgKAHBF3d+Fw3fu7cuTXGL1myJACCnm7MjBkzaowBgpEQ3O5OoKdBMAKC4yB4AIL/1X8BCQIICiCYCsE/IdgfgisgWAfBBgiqI7fjIMhz+yuDoAKCcgg2Q7AegpUQLIGgJLJNAEGVm+t1CD6BYCMElRA8CsEECP6acDzl7rX9Ifi7m3eduxW7/VW7+2K3v8sgGArBze75byCY525+3uUQFEKwEIILIBgFwRz32r3uswSC2bNnx33GORD8BYIVkbmq3Hv4xv0dPf5iCG6AYDcIznavvwfBUxB8kOTz/437v33iPlv/Perh7kdB8CcIprj/1ZMQ/My9hx9CkAHBrhA87/blP/vnITjDfZYb3edaDsF97v/4G/ceqtxneDsEe0Fwofs/rnPzVEEwH4JXIJiVcOx+X89CcAoEj0Fwp5t/DHX/Jmpj+vTpARAMhuAPkf9R4N7T4RD0TZgXCAa570H0GKdBcCQE/dz46dOnN+pYWjqNsYMtx6IFE0TK0u6zzwi0lvE18lqsROsXd1PToZmMrqiux0q0pvBnVCPi9yiisQ3S6z9FuRxtUb+L8cjRmUnNErhRMlAux5+Qs/xOtE6VSJabO5prcReKLD0crYMcgdaL/Nc3WRJWOnI0v4K8L92tPK1hGM0O76mor8wryFORm5vLSLSSPQCFk/gynMOGDaOwsHDLeO+pGInKko4CcnNzmTlzZty8QRBsics/EK3kHoU8A0+6+4YU98xBZ+O5KNzla1TVxxcmB60CFxKGP/lwp4Awp8FXZyp2+4awAHkFWh1fi/ITAjfnCSgE6Ro3vgKtsj+EfOUPuvfShjDMyld88h6Mx5CHJnDb3UPY8dtXfwKtvmeiFfY30Er/EOSLv44w/CnqqRiJ/q9nu/2ud/sJ0FXTF2L3lLjb8Sgo+UO3bX90xY02/gOFZb2DPAh5yPvkS9V+A/z3v/9lJip/+xX6f76KvkcXuM+ov5t3Ksr7CJAn5RlCT0UqukKnIGtgf+S5uMq9p0cIPRXnu/F+fTtA39khyFs1O/K8ZwLy2tyHqkltS0/FsMjrR6HA8ZHIo+PzNKOeimjG5jjk1ctFcSKWc1EH21nktAqaq8eiPgC3Wr+vW73fXM+C06YAHgxgr0Aej/PcNh8H8lr8JoDFzkuwPKjpnUh282P8PqoC+HMAQwN5WH4Xea00kIflzQBecY+fDGBMAHcmmbsuj0Xi7V9uf/aVNwyjeRD1VNybcNKKrjgHQeipGIlW09e6FeN9IPiV2z7qufCeipEQ3OHmvButWkPDPBcPuZXno9xqdUE9J9oX0Ar479z9C24VuhStvr8LwUAILib0VFSj1e4ytBpe7MaXufe5vzueSrTyvQR5HmZB8AbyGlS6ea6GYBwEb0HwtFthHgHBoWg1PifyWQTIO+I9F/ei1foxaCU7z43/dy3v9d7IZzmceM9Roqci+plWQ7Ca0CPjPRZRb0Wxe32Ne7wGguuR5+J0CJa6bf14/386xX2m/viix9OhQ4egu3tf2RAcDcGr7vM7EILR7hhLkLfgTAiuI95T4f9f3ptRiTwX57r3eCqhp+JLd5yL3fsJEt5rkdv/LOK9UtUQzIbgcve59qF+TwUQ5Ofnb3lcl6ci8eY9Fz0gGEBNT0XibUf1XDTGDrYciwbQ3HIsGoNWvPyaQWI+Rju0FuDzEl5BEYTFaH2oi9tmOVp/egqtQXVDazR1eQCi5WITozVLkdejyM29rzvGlW58NVqHiyEvTC7K0RiUsI81KEJyJGGfjdqoRvkhdwIllnNhGEaTkuipSFaR6Q7CWHnQme4ctPra2T13vxu3F8qJuA75lv34XyGfreceFFM/E+JyLvyq+tkoKTcLrcpPRp6M7m6/o5FnIpEX0Up3N5QbMJEwwTtAMf3laIV8JYrlP5XwbO89F96L8DCqhFSEEnrPRavZ7ZGHoQ1aiV/kXv8ceSX6oyvb+24/fdFVbU4tn0kpytG4GV3lxqPV983ufSTLP7mP8HP+6quv2GWXXbbkxiTzVCRuHyCPy0bkPelIeKUsRZ6KjugKjPs81rnjfAD9n69G/6Nn0f9nOPrftk04Pn888+fPJzs7e0tn7++h//Ul7ljOR9W/yt3tJWQRHIYyItMJPUuJOS/lqELWS+7xFPdZdnb/g0qUV+FLBvs8nXz3+n6Exew3ogpWVe4934T+f6sg6XU7FottyTPKz89n9OjRpKSksHMQMA79XvatsVX4fwjccd+NvtsnEO+pSKQa/SYfRd+9lY0oXduSseTtbUxLFhae0KXeEZ16opWUfNft+EpK4TY7odOuLyw4hPhGeEn36O6rI49nI0fpJyh9aj1hhfJUFIo1Gl3WeqGUuzno9LOWsJJUbzfnV0hcjKJhlZNLUDjVJwRBeX2DDcMwtgt1iYpKlDjtjaFbUZWcnsi4jooKz7/Qss/e6Gx4LmH50V9Qk0RxkZubS44bfxZazqlw+2xD/eKiBCUFV6Az+UTiq0aBDLiZ7nYTCpG6AIWppBA21QqQULoJVWHqgs70fZGReKo7pvZu/CJ0VXkTlV6NIaOvFC1FlaKrSBAELFu2jF122SVOXNyDjPDFKDj3TUJhFgRBDXGQKCp699b1KD09nfz8/HpFRfTziIqLzUgwlSAjOyoq/Oe6AYknLy72RAbxKOoWFR4vLvqiq/omZAmcj5oTggz6MndfTBjk7Ev4Ji4TVrl5UpAQvN8d/73o+7KJUKSCkr6nIjGxi7t5Q76asITLLBSg/QpKbq9yz0dNVv8ZH4ZKxMwgFBd1hTX5z9/vL8V9tqmE36tkVCEBfRu1J363Vix5exvTUkOhamPQoEFeqAdAMGjQoFrHasyYAL4J4PNASd6JIU7JQqCCyLiiAE4NIDeA7wdwRQA3B/BsAB8G8FkAUwL4P/d6bgAnBvBGoKTwskAJ20cGMDaAp928BQHMDaDSPW5IfuETAexUr2vVMAxje9GuXbugFwqfiZ6gKlAS8gAIjnWPfwHBHhDcgsKfEk9q/4ZgPAQnujCULi4s5TYU4lPbyTAaFjUChdqsdvtIDLUJqD8s6l4UtvSHWvZXAMHjbr+PQvA9FJaTGAJ2BwrhyXY3UEhMP/e+biEMmwkguMcd/84Q/NR9dv7a1p2aYTRfffVVgJvroshnAGGCe2J4S3S8Dy/66quvav3/tm3bNun/N/Hmw6Kuh+AAd/8NCi8KUDiYDwvz26xBoW8DUKhPZ/eZ+P9BsnCsKPPmzQuAID3yPal02/uQtGgi/WZ3DAvd3z4Myh9/lXt9HUqsv90dwz4QfIzCoHyo04sQ/ByFc+2FQt02ER9W5Y9lNgR/dP/biyC4lfjQQP8/8SFm0ffuw6IgeSJ2dH8VCfut7VaJEsuPIEz43lHCoILAkreNeigqKiIIgi23oqKiOkZ3ROsBa9F6SucG7iVw9w+jNYNvUFrdZORs/KGbdzRKNZuIigVOc+NWo34aT6J1BJ+4fRjwF1QsrgJ5NnzYVUPckWFjPcMwjO+a+fPnk5KSwnKUPOubmFWipNXFqAnbfFS+8wOUUH06OhtHuRY1HtsPrXwPQGFBp7mbbxCXjDNRaNEQ1PDtp2iFupj4FWbPSYQ9AL5EPmffbWMRCn35GQoHejZh201u/GZ3XGXIO5CCelnc78bdiXzj7ZF/+0zkGZmDVu4L0Jn/Tjf+PuR5mY1Cal5zxxIE6pS8MharsaLcu3dvvvrqKwqQZ2Mm8NlnnxEEAd/EYklLiQZBsGV8oqciGfn5+SxHq+j31TpKV64n0XfgA/e+nnbvdTOhpyLa1WkqShj/Gq3gr0NerYup3VMRZcgQpaF3QQnZP3X7qyQMSWvr9unDzh5G//973DEFkZvvR4G7P8PNuwwlk69yr3lPRXfkRRqDPHMvou+D9xz4PiSLgI9QXMP7yFsVLWqQ6A06nbCIQV5eHtOnTycIAhYgy8En8XtLwe/PJ6L72I1keE/F7W6Obm4/o0eP5oEHHqhlqx2Y7ShwWg2tzWPRUHJycgLoHMBDAeQHsMSJ94Z4LKoDuDaA0QFcGSjRek0QlrStDqAiUFnZZGVq17vtcgP4W8Jrd7p5b0w4nsrIMdV1+10AHZv64zUMYwfDrxb3jNhmfhV+Akoevs+tqN6BEmq7uFXbf7gVY7+a/W8IJkLwhHs8FYJjkIfjSbfi61e8k3kunkfJtje6VeVilGi7grAMa3T8PchzMsHt03ssNkLwDgTT3WO/Kj01YfsCt92lEOyHStKWuTnzUEJ3LlrJjm53UeTzmjVr1pbPLOo9mDVrVvDZZ58FjTFpvOfis88+a/A2ffr0qdNTEaWgoCCA5In5/hZN0G/fvn2Ae3wbNT0ViavyQFBYWBgEQRD3narNUxHFFwIYBcFd7n8fTc6O3rwHqXfku+o9F75Mb7E71jI332gIDoFgBkrQvsd9J36PSg3f7raf4r6DTxEmnQcQvOyef9h9FqMg2Ne9drb7jC6v5TONfkaTJ08OYrFYAKHnooCaHopq5KFbhzwz0UT6qKdiAErcDtx77OX2c//99zf4O9RSaYwdbMKiAeyowkInr+4B/C9QGNSqyO/Xi4HahMWDzvi/wz0uC2B2AF9Ftql0wsKHMiWb53YnLu6OPLc5UI+OfYOaYqchIVEPBdApGD58eFN/xIZh7CB4UZHYqwII2jtDxYuKSne72Rlzg5xx5cXF/xEvKqrd80+hfgSXu79rExdeVNwTMQy/csZVGWEIjBcXtYkKv+8FSFwsoW5xETWkfRiLFxf9qCkqEitdeaKG9KxZs5rwv1o3dYmLxKpfHi8ubq/jsygoKKixr7S0tAaJCk+iuPD/+2SiYm9UgWssobjYQLyoCAjD63aG4Bn3vO9pcQrxosJ/b+6E4GRkvFcTLyq8gX+7m3cUqlL1bwgmu+9sbeLCf7ZRET8QVSxL/P766mBPojCtRwgrWCUTFYeh32tU3LZ2cdEiQqFeeuklrrzySo4++mh69+69pYbw0qVLt2re8vJy/v73v5OXl0fHjh3p0qULBx10EE899dQ2OvIdDZ+kHUO1NhIJqJnEvQjVTZiE0uNiyHHbBujhtiHJdsn4OQoQ8PVBvMP2QuTM9VXL/TH6c0hdqOfnnDlz6hlnGIax9fik2cReFb5P9iAU0nECYUBnCeob8EsUepGCEnePRz0Njke19QLCUJajUajRXNSRugBYgMJpfFjUCyjR+3tue1BoSqa7pblbhZv3XnT27Y3CYQYQn7gdc8/1dftaiiorTUThIz4s6kVUtuNAt73vm3Ef8B7wT/e+PPcRhvYkdhUPgoDVqanMmjWLESPqquHTtOTk5FBQUMAM9F7uc89Hq30FCaFaQRAwC302Pkwu+lkUFBSQk1OzLldZWVmt4U/JGDVqFDNmzGAmCkd7gjDMCdQ96mYUFvc/lMh/Fbrq+iRtCEO17kVhaRtRMYGn3bwfoI5X64FD0BU9BYV9nYTC5I5AlaSuQ71EDgN+QliSxffkKEbd1vuhoOyetby3k90xjXLz+E+rFIXLTXf7DVDSdjUKW3sVBWa/hsKn1rn3MQdZLy+44/ga9cC4ljD86rTTTrOwKM/2Vjm10alTpy0qMnpbsmTJt55zw4YNwX777RcAQefOnYNJkyYFhx12WNC2bdsACH79619/q3nNY/FGoH4RGyOLAtFwqMTbqQEcHij8KQjUH6MgUCJ2dFxDPBY+LGpcAD8NYEMQ9uN4KVBi+Xz3uNK9XhqZN9mCRn6gDuXmsDMMY/sS9VREV63LIBjmVmHvQHX9F6Nk1jVo9d/X/7+HmonZvt+DT66Nhs08i5K/Ez0XU9zKsfdUVLhV31VJTpRlbjU56qm4nbA/ReL4ZJ6LG5Dn4hG33RRq91zUtjqfrJt4SyPquYh6q+rCfzbRVfFknoqtJeq5uAet1HtPxf6R/0kVWun3notc938rJfRUpES+5w+jZPp7CEOM/HfU///bEoZ+PYo8Fw8Rhvv522SUDH48KgDwAjW7lPtbBUqEHxX5Xt0MwRAIdnG/Cd87w3sqprjv6YsoJGwaoefieUKPRXfkqYj2QlmJPD6+CMCnn366zf9HzYEW4bGYNGkSf/3rX5k2bRorVqzYJnNefvnl/O9//yM3N5d58+YxZcoUXnnlFT744AMyMjL497//zQsvvLBN9rUjoFWRcmCheyaaHO29DYnegdlorex81DsVpPvbUjMdsKGei45o3S4frcWlu+cPRgXp7kPejLnIW/KlO+a5KNVwOUqF82xOeC+GYRjbnkRPhS8rWwVchs5E49Dqfsw9Xo5WVrMIOxaf4bYdiTwSQ9w474VoQ3yC7wS0SjwTnXlnoxXmF4CDkKciy22X4eYoTTj2R1Ap016oJOwa5HEYgpJwX0wY7z0XPZDn4hG3/51RsvERyIcNWoH2ibjec5FsdT7RU9FSiXouHqVhXaSDID5hvDZPxdaSm5vLMPQduBH4DaGn4r+RcSmoV4b3XHQiLIV8PeqGVYK8bze6+f6N4g02EJbP9d6aJei7NM/trxj4B3Acsgx8QvjD7vUh6PvTEX2XkhmvlcgqWIssEJ/YvYfbNht53oYT76mYhn6DI1BC+TDkEXwdeVp8+eZ05DHxJZyfRQXwO6LSybnA7rvvzmeffZbk6HYcmkxY3HvvvVx22WWMGzeOnXfeeavnW7t2LbfddhsAt912G926ddvy2u67785vf/tbAP7yl79s9b52FAoKCpBzdB76qW9OGJFMXDyCHPc/iDy3AYmKxK9bNWEIU30cii5Rj0W2BTn0P0Uipje6tA1y973RT349utR9iYRSIdA4t7FhGEZjqEtUXIvORnsAb6OwiwwkDiqRGIhWil+L+kI8hColpaHeDlWEDcqinXmmoLPiOBROVYbq8fUDTonMHUNn5gxkFHpxscrtqxIZZF8hUTEetRkdT3JxMdvdPkXhMlWoHuB4QlHh8eEtPQnFRbSyUWsRFR4vLla1bVuvqPAEQcCqtm23m6jw1ZUuIhShT6Hvw3+TjUdGeQoShh3Rd60MhRy96Lbvj8TDy+i71g59v+4mFBW7oF4a5ej7fRMy1H2r2yr0HfSi4mi+naj4EC03HosqS32BKnElExVLkABajITEj92YYlTxbBSyJC5zx/qCe+4NFJpl4kK0mnKzL730EuXl5fTr14/999+/xusnnngiAB988AHLli37rg+vBbMBRRz6NjhRooLAnyg/QZGU0dNDGcnbzlQT9vKsjzQ37xeE63QB+lmvQqevzm4/6e6+M1pvG4JOY+XotJAPVDJ79uwG7NcwDKPx5OXl0QstiZyKlje8qPgQxZf/AJ1hH0JGWWd0ptuEDKTA3ZeiFeJO6AxWicq7ZqAzaDVa9ilHht6jyMg6F5WjTXf7+wp4PuE4k4mLLMIY99tQ/Hw7VCQcd58oLmYh4/S/brz3VycTFSDPxOsoXn3+/Plxq/OtTVR4cnJyqKioaNQ2FRUV21VU+JKtaSiX4jzU7HBusm3c8yvR9/kclM/wZ1Ty2DezewSt7PtSwm2BZ5BIWEgoKrx4+T83740otyMNie2bUe7O1oqKgchbcRwS2tNQ2eJEUTEICYMBxIuLN9Fv8Tz0u3sIeWkmuM/vx+69d3LH0ROJix2VtvUPaRl8/vnnAOyxxx5JXx80aBBdu3ZlzZo1fPHFF3XWoDYS+RIJhgMIqz97fMI06FSznPjUPn8SjTrqQZfYgLBqdX3E3LyPI6d8ZyRehrh5lqLTQDJS0E8+A50+XsN3GTcMw9gebNiwgVgsxivIOBqPjP7PkJH/DVrp/AdKcL3bbTcJGUhr0fJJBTp7VSFRUYyMm76ESzu+U/IUtBp7KKGoWIUMpx8ig26q22Zi5Fi9uMDttz3ybFQTJhmnIDHgPS9eZExFZ98uaNnnVSQwKlESbm2iwnsm5s+fz+DBgwmCgPbt2zM3P79ViormRG1dwdOAS5Dgm+meG4YEZxbyRhWgq+8EZBmciAzvtsBeyFivQiFMB6H/cz666s5GfSyiosJ/FyphS9J6IUro70X9ogIUmlWEQvxqExWd3fPj0ff6OeJFxQAU4wBhp6tF7v3nueP/K+qt8Q3qct8dWTCHufHXo+/019Qf6taaaTXCYuFC5QH061d787M+ffqwZs2aLWON+gmCgFgsDa07jEb5El39q+7eX94WuPtBkRl8yFIK8Z6JSsKKUwHxVZ1qw8+7CJ3CIMy3KK8xuiZt0GmxBOhKLEnzJMMwjG3FlClTOPbYY7kHeSE+APYhFBUnA0e52xVIXMSQ0b7WbbMTtYuKGOGSjfdUDEeekKio8EGfR7r7qe5+YuRY/Vm8ElX2ibnjqkBeC1/ZCOLFxWtIfIxBBuQM1IR18ODB3IeWc6LGazJR4dm0KdErbmxrYrEYvZAxfFaS11OQMPXi4h1kTO+Jvoud0PcrQEZ7FVqlf9Nt3wuJyxXIi+abF37tXk9H33uI/y74pn6xWIxitIz5oJunJ7WLihXAW6gh3xsoh+PHSPR0R2FZnd1Yb2lMAPZHS5SLkaCIoba8We4Y+7ptHkMN+k5HHp3l7u/ebtuhSFT3Qb+BQred/w3siLSaUKj169cD0LFjYm/SkIyMDABKSkrqnKusrIySkpK4245NBVpLmIx+xlVJxsQIjfto2JP/ikWTpX1Ru1TihURdRn5AKCKirmTfKzPRI5KMUrQesi9Kh8wiFmtIfodhGEbjePrppzn22GPpjXy5LyGj7DEkAryoAJ09/4JKsd6Fwkm6oJW/TdQuKjzPI09FNjAWGYIriBcVniMJS8FOjTxf4vbRCXkvNqOV4vMJE62TlU19B10dXicUFYMGDdqSfFxbYnaiqDC+G9599916u4J7cfEZEo2F7v5zwuzJGPp+xpBFcBD6vz6HBO1dSFQ8//zzfOMW8YIgYAk182miomIkMtwfdPP3pm5DtTv6LUULATyOficrkPFfTM3lyxIUGtgLWRabkYVQTGhVzESLARvdZ/ACEjyD3Jg+6PfiwwB3B36PwqmGDBlCUVFRHUfeemm0x+LSSy/lueeea/SO7r77bsaOHdvo7ZqCa665hquuuqqpD6PZIK9FDF26uqFK1H2SjPSGf3TVyedalCPB4ftQpEbGJDPuk3kv/M892k9jgRubeDyJc1ajGhWr0KmnB3K2fpJk34ZhGN8eLypykSH2HyQuZiLDZV9kfEB4pqpCserHoDCSo5DRMo+6RcUadGbeBa3Ufo58ysMI+2QkEvVcrEfhLCXorNwVnXkzCM/SfmX7RkJxMQPFlfsKR5mZmRTl5zNoUOix9teOm1AMvRcfJiqajrFjx/Luu+9ywAEH1PBARbkL5UXMRh6Dr9H3rh/h98GLi0okBJ5G3/GVyCu3aNEi+vfvT3V1uLDovxOr3ZyJoiIxPKshRL+fBSg343wUwuRN+2g41GYU/pSJrIkyZL10Rp5CkKB4AVVha4t+KwegEjLF7nPYC/W4mIl+Kz48cIb7DPLy8igtTay31vpptLBYtmwZc+cmS+upm+394WZmKkJ0w4baY+f9MWRlZdU6BuCyyy7j4osv3vK4pKSEvn371rFF6ycUF7ci4/wSFJUYxV9QFqCfNGitIR2JjQ7o8tmWcA0iUQDU5UHwoVZDIs/NQs7OjtTu8ahGp8kpaO0t2z1/EXCOhUQZhrHNiIoKXw3KN5krR/7W/6EE0JPR2cubXX9EwaY/R2EnfhV1Z+JzKjxVyDj6EUqY/Ttaof0JYQnb2jgShXj8HzKmTkMG0xokLlITxp+FxMEaQsMpGkvuowYSSTQkTVQ0PfWJC18SdjYSB7vtthvBmjVbRCLEiwufzDwTeUQmTJjAos8/p3///kn3HwQB6enpzM7P32pR4UkmLs5Gv6NEceHLu3yJfo9dCZc8uyCBNA2J/Alu20EoCNuLit2RqChA1owXFXcgwf01EOyAogK+RSjU5MmTt7i0GnM7/PDDt8fxb2HAgAEALF68uNYxvqu3H1sb6enpZGVlxd0MfwFZhdYmfoJCo6IlaDORY3FWdCuU27AW/XRTqZmwHaPmOlwyZrn5O7nHZSi6cq9at9Bl+WpUA+JEVDTOczRhmpZhGMbWkUxUgAIvz0EBm22Qd2AKMsi+dGP+iMKKfo6qSIEMoH7ImP8qsh9f59+X0jgaha7MRQnUDwIPRMYm414UQlWFzuRPoTPrBre/RKJVnN5++23Wd+zYqLKp6zt2NFHRjPDior6u4P3792fNGn0jagtvu55QVIwdO5Y1a9bUKio8vlP4thAVnsT+KLcT/r4WIO/fOuStWImsg2Xod1mNRMN/UL5INlrCXIeKEywiFBV7I0EyH/2WfQf7ujqq70i0muTtMWPGAPDJJ8lDWxYsWLDlx7Hbbrt9Z8fV2gg9F58CF6JTypEobWkw0vWvostjFXK0V7i/ywnDmBqb21COUrPy0E89C/3816CfdfRHHEOi43V0ClyFTjdRUYE7lnGE6VaGYRjfjtmzZ3PssccyChlJieElZ6Cz5jtoWaYEGfMBMnI+JxQV0bIY3jzz/to+yAjyoqISGT2DgZ+6OVeiFVvQWc/Hl3vuRaVAO6E4+nMIDcUfIQMKwjId0dyIJUuW0KdPn0ZHIeyIISHNnUTPRWJ4WzJqC2/zoqIxNFZUrEA5FXWR6Ll4AVVeW4w8FxsIyyr3QhbLp8gj2BPliIxGfS9movyKEnfvRUUMeTyykaXxCLJ0TFSIViMsxo8fT1paGosXL+a9996r0cvikUceAWCfffaxUrNbif/RSGCsQ7o9HXksqtFP9mm0hpaBRMBKdFrwldfr3UvC49fdHEehtbuF6HTRE/2k56Kf+AL0037DjR+NxEU2yRkJtGX33Xfn008/bcBxGYZhxDN79mxycnLIRbHd49FZKTMy5glk1PwSZXi1RWexm5DB8ktU4jXxzJdMXPR0915ULEIhH+e51+5AK7HJxEVUVLyFzsZ3om7CycTFs9QUFUbrISouEsPbaiMxvO3bioq6qlMlcgdh+GB9489y43OQ2N4P/VaWEDaW7IXyn15GlsMqJFq+h7wWM1AeVD4SFQMJRYX/LeWgKlGvot/3IkxUQAusCnXIIYcwfPhwnnnmmbjnu3TpwjnnnAPAueeey+rVq7e89tlnn/H3v/8dgN///vff3cG2cvQDSkhArQAA2gdJREFUqkY/s25IUKxFl9QH0aXL1zbpjn7OK6gZ9lTfD9FXc8pDTfIGojWCOWgN4mSUtngy8Cd0WtgDOfgfpnZRAUpvTN+hu2QahrF1RJvh/QJlfJWgMyHI6HgJ9XQ4C1VrApW37IEERbJ+D9Eg0f5oiWYBMmYqkTBZhERFtnv9DLeP3sifezNhj4xkosJzJzrD3kEYFvU4KnlhoqJ148VFY/ovBEHwrUUFQH5+fr3VqTw+xOg94sOwauMQFONwDsqz2IB+L0OQODiAUFRsAHZD1dPuQknaWShMMB/FNBxBTVERQ80u/4B+Lw0peL+j0GQei6uvvpoXX3yxxvMTJkwgLU2lQ8eMGcOtt94a93pRURFffvklxcXFNbb961//ykcffcT7779PdnY2Bx98MBs2bOCNN96goqKCiy++mKOOOqrGdsa3J+xz0QW1pNmIErxvQB6FfxCKiJ7okpiK0hGjlZ+izv/EErTXovWEm5GQeQitEfwd/dxnI43ci/jaDw2hHS1QXxuG8R0xb9488vLy2LhxY61jysvLicVivIqMnoPQGa4EGSpvIOHgE0GHoZ4TC1A3YB8a1ZGaZS382fBBtEziKzp5H3BXQtHhtzkD+XBfRPHjN6NlmNdRSMhfSe43jnou5qIeFT7x1URF62bs2LGNXm3fmtX50aNHk5+fT15eXp3VqRLzFpIlkEc5BFWkOht58GKobEwf5K3IQlf9F1CG6Ei0vOgF9O4oF6MnYVf5ieh3mCgq3kYxEf9x82aCFYOhCYVFUVERH374YY3nfQdtgHbt2tV4vS46dOjA22+/zbXXXsvDDz/MSy+9RFpaGvvuuy/nn38+xx133FYft5GMChSN+Gfgn6gPZj66FPZGaYwg8VGJLokVaK2uDbX3sggIqzmdj37qV6Mwq5+ipO1NKL/j2yZhbya+x4ZhGIaYN28eQ4cOpSf1Gwze6LkCeRCOQZ6JN1Fo0YTI2LuRETMG5VwMRqUlEpc4/N4ecNt0QaujoDCNbsigKUXGkg+9ehgZOwegJZ5DkddkDVr6ucXtK5lh5sXF0278ZqCw0PLQjG1PfeIiWTJ0XeIimajA3fu4ha+QtRIjXlS8hwT+0cib9xSKs3gRWSvHRua6FjWgPAn9hvujGIr2aNEgeq746KOP2Hvvvbc8bt++Pfmuu3ybNm3Iz89n1KhRcZ9LLBbjww8/ZK+96ipM03yJBTu6tGoAJSUldOrUieLiYqsQVQvKt+gO/I5QSOyH1rtOQTVP/GVvLfr5t3HbZFIzmXsD+rlPQZfoXOTIX+Xm+x7Sxf2Jj2RuLJOB8xkzZrDlWBiGsQUvKnKRYV5fUivoPOg7X7+Plj1OAy5AhnwbJBBeQUbMamSkdHJjjqdmoKgXFQeiJnq/R2LkNOTtWIeM/zQkNJ5DHopd3f164B4kMJYQGmZ1Jc3eh7wa85GoyM6uK5zUMLaO6dOnk5eXF1dNrb4KS4nf4dpERZQAhT+tQcuU3ycUFccjUZHhxj6PxEVXVFnqUBRadR3wX+AE1I18CfKIvIbExSbkfVwJfPjhh+y9995b+oCA4iqWJ/w9Y8aMLeLC554sd9s3F3HRGDvYhEUDMGHRMEJxERUSE9FlcAQ6ZUxAid7l6KezAQkE36omhhyMtyCn/TB3v46wY3Y/dFrwZWe3hsuAmwgCq1hiGDsqRUVFcc2soqKiNkMnIyOD/Px8Bg8ezJw5cxgxYsQWQ8fHb9+DVjuvQbHaDxCKijXIGDkOdcp+HziXeHGRKCr8xfpy5JH4OQqxWovEhY9X35NQVNzvjn0O8QZabeIiWgHKRIXxXREVF40R8iOR1+AN5Bm8ntprTs5CQuBVJCYGIg9eoqjw+I72ZcgrOAqFSUVFRRf0+2uPfs+TUXC2x7+fp1AQ+DHIC7IReUHeRL+1GTNmkJuby0iU0O6rbTUXcWHCYhtjwqLhSFxkISejFxJvoEtzFfIwHEbohGyL1u0K3e0dVDV6M1rjq0biYzjwMTV/+lvDZuRV+XyHj4k0jB2VoqIihgwZsmVVsbCwsIao8ETFhV9VfOGFFzjqqKPIIayjD8qfeBkJgwrgUtTJeG/CYtknIIOmDAWSfoBWRY9HouIOFPB5E6GoqEDLMv+HVk5/hsTFZLQc8w0yro6idlHhSRQXJiqMpsSLC/9bbMh1ORaL0R3lG40CLiZ5rsYsFAblf5cvomXLc1HZl0TLwudUPIiaW6aiEMeTCEXFINTxfgkqpNAOCYLJKEcpxx1PR/Rb3A1ZPoVoMeEkZIXcgH5zOej8kfhbbA7iwoTFNsaEReOQuEhFl89+aK1uJPpJ34e8DyDnvRcPZSj/IuycPmDAABYuXOjm2wn9vLdlnsyT6DK+2oSFYeyAeFHhVxWnodXGZKLCczaKr94HGQczkUFwHqoe0wFljwXIoH8LnfU2otCLd93rv0CJ2FWEjfMuR12593Pb5aBVzVy0JONFRarb5gq0FDMWGTRF6Ow6BtgfraDWJio8XlxEV0lNVBhNhRcXjbkmx2IxJk+ezEknnZT0txsVFa8gq+RJ9J0/HsVTRLM0vaj4ClVdm4V+G8eh39YylBe1S2T8Yje2I1oIeAeJhN7odzgJeUgK0HJqe+AxdwzXoxCqc5DQ8R6X5iQuTFhsY0xYfDskCEA/tbYoBMoLiY1ITFQAMHz4cGbPnp1kFj9XGlonmMbW5VR4SoDDgU8IAisUZxg7GlFRcREK4LwSeRUOIuwBEeVxtNq5L8pn+ACdkfZHtfBKCMMi1iLjv6sbcx0yPtKRAPmdmyNwz6WiM+N5SHzsiVY4H3fjT0SCxIsKf3b9JfJ+7IHq4T3h9t0JreI2dNU36rExUWG0RGbMmMHo0aPjxEWiqDgAGfkHIWHwUyQsBiJxkSgq+iKrZSaq9LaR0FOREhkPirW4B4mWA93Ni4rD3Jj57pgy3HZ/QvkYv0UezAD9dpubuGiMHWx1No3tRhAE7lZKnz6Z6DL3NbCCPn06EwTlW8bUJSpEtPLU1lKNqksV4IWNYRg7Domi4nTCUIozUG7C4wnbeFExCeU2tEN9H05AvtYpyEfbxW3fFomKLm5MCjIW1iKvxMPIoGiHzkIVKEl0Ikqc3g24HZ31HkNFtpcRLyoWoZCnq5HX4i10hi2j4aICN664fXsTFUaLJjc3l+nTpzMDGeN3ojClmYSi4iIkFi5CpWCmIC/lQiT8k4mKhWixYDkSFN1RCFM58XUs30Xhib687Rvo93ww+k1WIxGTQ5jgfbh73B0tMMRQc0o/7+nuWHOBvffem48++mirP6ftTavpvG00b5YsWbJV2/tKJjVL2DaWanTKeRAosRAow9jBSCYqQDkTOyGPQEfU0A4UKhEVFUehROsKVMIy091eceMPQQKjjPACeyRK/jweJY0uQp6FFLRi2gsZLwtRyNOH6Ay1DoVcfIn6XoDiwQe7OeYir8dSFH4xA4WS5ObmNvpzqatPh2G0FLy4GD16NDchb+J/kUfPWw0pqIA9yNP4FGE52WJUWMGLikWE+VS7oIWAUnSOqHTbtEX9ap5Agd8jUJ+MSUhUVLoxVei8kI2Ew3x0TuiGvJrfQ6Kk3I3zDRdOR7/tlRBXura5YsLCaDGE4uJvaO0uWsK2IZSgtb0HgRXN/sdpGMa2pTZR4TkUxVKD8hxeQsb7AuJFxUqUNZbtXve8jIyCSciYKAV+jFZNz0JhS4+gMKsKZIiAhMdKJGzeRYmehyID5Q5UZWo5obiY4MZ3A74gTP78tqLCMFoTUXGxCnnxUlBYkf/NR8XFDUhcHIe8B32R58AL/YHIU+h/6z2QdyEd/Y5fQuFPg9zYIcC/CL2Rae5WTujlGOrmfhOYjoo4ZCJPCISiAnQOeIWGJ7Q3NRYKZbQo9KNagZyc4wiLwdXFZjfucLediQrD2BHJy8ujB0q8TpaYDTLos1A89X7IuDgUGf8fEC8qQJVlhiNjpAsKR1qPVjR/RryoAOVKnI8MjbZIXDyPjIxPUGWn3YG/o14VZxHmZnhx8QwmKgyjLry4+CYW44svvtgSHnVfZEwK+h1uRkUOnkJhUG2Qp6In8lIEKNahEnkzNqDfeBkKd3ocCYoL0G96JvrNVhDmRFW7eVOR56IalYx+Hi0cHOyOI0C5Up76+nk0R8xjYbQ4Qs/FJ6iOgq8S70vYtsP1i0V5FK+gn3kJUNFifpyGYWxb8vPzGTJkCK8AZ6JEyypkRAyOjIt6LnJRZ52lSECMRMmVVWhVcw066/wHGRnZyPMwEZ11znU3zyrk0ShHORSgsIl1qL7+ENjSVRgkLmYiQbHe3aYgb8gbmKgwjNrIzc2luroagC+++IJdd901rsP3Hei3tsw9V46ERwZaMBjgnvvSjRmNxIIP7C5FoqIjynEC+KF7bhYSLjlIRHiroy06b7yIfvfHIOtlsxsX7c7VEkUFmLAwWij+RyaB4ftgJFaeii9h25J+mIZhbFuiYVB7oZCmM5BQeAetGF4YGX8oqhL1BUqk/hB5MkChD+Pc368QxmAfhLrt7I6Exznu5nnO3Q5G4gIkLlYjn+ooFAYV5V63j2XAlClTmDRpErFYjLUoNMJEhWHUT15eXpy4mEF8E75YLEY3lJORjX7TPpH7K7Tw0Csy31IUEnU4EghFKJSpBGWBliAvSAqhdxMkKl5323wPVY5qS7gcWozExV20TFEBFgpltHCilafGjBlMtPLUmDGDCYLSLWMMw9gxScytuJswkfpBVD72XVRP3nMHMhaOQeFLJ6LSkbciMfKQG9MbeQ52Bv6NPCEZqHKUFxUBEhTPu23fdNsch8KtNqOk7EcIvRIgUXEd8lh4UQE6730Ti5moMIxG4MXFDOQBjIoK39L3Nyi3CeSR+Ap5IHsjURCgsrR9UDPKsagAwydoEaIYeTqOQqGRs1C3btz201Ci9zFuzAa0DOpzNlLQYkNLFRVgHgujFfHpp5829SEYhtHMSJawXU0YfvARKgt7AmFydHtUSeZwtILZBuVA7Ic8He1Q6cp30EX0DJR/8SJwKmqEB8rH2BkJiudRguYRKLTqSRT69A5a3RxI6BFZj0rM3khNUeHxIR6GYTScqOciKip893kI2/RmIhGxBAmM/oT5Fv2R8FiCzhOgHI0sdG4AnQemImHhG1xORaGT3yfMxfJ5FzEUEvkILVdUgHksDMMwjFZKbaLiY+TXPB0lWL+CwpFOQAb9FEJR0RkFVBajakwnIW9De+TFmOvGTEDhDS8iY6QzMlAmEy8qUlGy5t5IvKxEK5e3Iw9FlpujLlFhGMa3x3f2rk1UVBF6EIagwgwLUa5FKjKcv0QiYIB77mjgR8gbORkJkP+6+2x0vshCuVezgaeRZzMDnV8q3X47o/NKDtEmwy0L81gYhmEYrY76REUOuuAPQRf0Z1By9k7I6B+CLvJrURL2UGQE7Ik8GL5D94/d30eh8IZn3f6PRGEPryFjwosKUOx1JjCeMDcDJC5mI0+GiQrD2H40RFR4hiARsdA9DpCnYgAKifJMcPdPIeExEoVVDY+MORRlf76AQqWOdPusQh6NFFTsoQ1aXIjFYi3Oc2HCwjAMw2h15OXl0QtdyGsTFQEyGA5EF/JpyIMxAomJhShMaQAyAjagVccjUP7FG6ibziS0Ank0obgodLcfIeFQiYTFHBTm0N4d25coNOpgd9wvoNCs4cCxxx7b4owKw2judOjQgV7AYdQvKkCJ1A8h4VCKyscOR6LC/zp9+M8E1JfifdQsc1hknnIkHo50+5jq9nm4ez5aFcofV0sUFxYKZRiGYbQ6SktLWY5W/+9AAmEpqk0fFRUbUM7EeOCfyPMwEImJte5+FzcuDQmLaaiQ9XjkxdgfiZXnkbdjIhIVE5HYSEfCogCJii7u+cEo9noXJFL+jspdvoMqVLXkcAjDaK7k5+ezHFVnuo/6RcXNKFH7XrQ40R39hj0+7wJUavYTJBb2jrzuRYXva3E4EiG+7GxbZJCvi8x7BvArWt55wDwWhmEYRqvEx1H7vhC7odCjucg48KIiDRkU7VECZToSFDsDHdCFMgOtVk5BHbbHIWFRjQTFF8gDkYo8GmNRuJOfbxNK4qwiTO4ehLwVb6Aa+bu4G7TsFUvDaM5kZ2dTWFjI0KFDuRaJ/p9Su6gYhPKmpqK8iSHo/OHPDVVIPDyOCkAcApyChEJAKCyq0fnBP3cQYcU4LzY2I3ERQ+ebH7tjaUnnAfNYGIZhGK2WIAgoQI2wPke16GcAC4gXFWmEhkCVe9wRrTKWIWHxLjIc9kKiItU99wqKp94HGQMlhKICt30bZJC0Qd25QauV76MwqGJUBjfKWbTMFUvDaO54cTETGe2PoN+pJyoqrkILAMcBY9Diwnvot+w9FS+6OfZ12/wG9bsBnVPaoPNBGTqnFKIFhUGoCeeLqEpUqhuzHp2DSlGo5WHI29oSzgPmsTAMwzBaNd5zcSPqX5GNykdmIM+EX6n0K23eWEhzf1cgI+EFVEpyIOqYMwflU0xC3pAvE/abhVYqy5FhMRJddGcBf0MejKNQt94pqCIVKM/DY54Lw9g+RD0XN7jnTka9bbyoeAYtNPiy0T9BoY9PovPGUWhh4Ul0DliPijAciDwcvrT1OpQ7VYE8pnOQd+JR4DP0m89ACwxl6NzRAZ2DHkFFIL4GPvzww+3wSWxbTFgYhmEYrR5vQDyMmtftjxK026E8hygpyCDY4G7vohXFg1GYQymq/PIBMkQmoFXJEiQ4OrkxqSjMIoUwzGo48B9kLHQALnf7PBYJjRdRSES02sxZyBhZA2RmZrJ+/XoMw9h6EsVFAfIkDEaiAuRJ/Az9Xj9wry0D/gXMB/KRkOiJ8qRWogWMQYThU8vQ73pXJCrK0PngBfS7Xk5NUdHGvX4PEhUAa9as2eafwbbGQqEMwzCMVk9eXh7dUQjCFLRymIUMgy8J46A3ITGwEV3oNyEDozsSFVkojOp9lNxdilYpl6FeGN2QKGnv5om5W4U7jpdQcudBaCX016jC1PPIeBmHuvxGuQ8lmn6NEk8Nw9h2RMOiHgMWEZaOvQOJ/57AZcjw/x8SEmnoXPI9FGL5HPo9H4hCL0uQV+Mt5OV8HrgfnUOWAjegfKydgVvdvqOi4jE3z+7Iw5kLHHHEEUybNm17fAzbDBMWhmEYRqtn48aNrED5CkcgUdAWGf1FSCBUoAt6CRIVHZGgGI+8CS+48c8iz8NwVHWqwN12RuEMKYRVXjKQ56ICCY+pKBTreuAcFJJ1MjIyDgZ+QfyF+T7gOpQXUlRUxKBBg7bdh2IYBhCKi6/RQsF1wMVIVPQAzkcLAptQ+eofoLCo7shjMRWFRY1FeVH9gCvReeNMYD8URlmIFhc+ROeaj9H5pQR5Jp4kXlTkIdHxM9SPpyWICxMWhmEYRqvHN8T6FfB7tAI5D4U69XJj0pCXYTPyJnhvw4EoVOlNFAJ1GDII5ri/S9FK5gZgFTIavkQrjzE3byoKuzqKUKScgVY7v0TeCjBRYRhNRXZ2NkEQMGPGDGagXItUJCI+RrlR5yCh0AstEByMQiVXI0/lRchzsQ55Lfd0t00on+swNz5AyeDpaGHjVPTb/zvwO+JFRRt3fKcjT0p/JC7mzJmzXT6HrSUWWCZYvZSUlNCpUyeKi4vJyspq6sMxDMMwGkFil13fLG8xukgPQFWcKpA4yCDMmejo/t6IDIInUWhCCgqJ2A8ZCAHKg1jnxnYGRiGvhs/ZqEDC5Q0kLNqhajMz3HFGj9FEhWE0HTNnziQ3NxdQ0vVA1FvmR2gRIQ2FJi5B1eYmowWJY4E7UUjkmUh8lKFGeR3QOceLls1IRFSjc8II1GhzCRIgdxGKClD+1VSUx/URsCYtjbKyaC2r7Udj7GBL3jYMwzBaLYmiAhTSsBwlV3ZCF/4UtKqYgUSGx4uLjqic7EYUK+1DIV5GyZyHotyIDSgkahMw080xHAmPVOTBOMTt/yYUQjV16lSOOeaYLT035qKcChMVhtE0jBo1ijlz5jB8+HB6IZFwDPIwVKFFgt7I87kbEgD3o3ysbsC5yAvxNbDQbdcXhVWNQcb3PcAT6Bx0NMq72gWJjn4Jx+NFRSrKC/saCL4jUdFYTFgYhmEYrZJkouIjlGjdF61EdkHGvu++m4pWEFNQIiVIXLRF4uBQJDxeQrkax6NVxlIUptARCYL21BQXKWils8jt+xC3z4kTJxIEwZayuKuR4WCiwjCajry8PHqhRYTjUd4VaPFhI1qQqEK/81OQN/IpJChOROKjJzq/LHLb7ozONSeg0KoXgd8iUVGGQiU7AA8Af0Fhm9MIRcU7brvmHGxkwsIwDMNodWRkZNALhRRERcVyFJ7QDl3AO7nX3iTsSeErwpQTios16MLeCfWdWIbCF650455F4U+T3NiZqLzku4TiIgd5I2YiI+LfaNUy2qMiCAIyMzMpys83UWEYTcjmzZuJxWK8hhYGjiNeXKxGiw4d3N+7osWCoUhk+K7cPdw2C1Co5EAU6vgOqgI3ibAJZyYwEYmR+4HTCJt4tgRRASYsDMMwjFZIfn4+Q4YM4XV0Ef8hil3ug3IZlqNVx3Wor8QzKA76BXRhHImSqvuj8IRSdHHPRJWgZiLRkoU8FwuRuOiJQqVeQZ6Ps1ATrJnIKFmKVjJPQOETyXpUWJ8Kw2geeC/iTe6xFxdlyFORgs4TpcAQ9PuvRIsQaYQNMru7+8Xo3PA4qiD1Z+JFhe+rfQwqBHEDqmDXEZ0nmruoAKsKZRiGYbRCBg8ezPz585mBkqCnIFHxNarK1BN5LZ5Bhv+RKOzgKNQQ63HknVhE2KW7HAmPKWilcTwyIN5DK4m5SFw8jhI6b0J18E9AK5fzkag4njAp03pUGEbzJggCCtDv+UnkodiEjP1uyPDvhMRDVFTg7tPQ778X8lo8iSpF/ZHkogJ0rslGeV1VtBxRAeaxMAzDMFopXlwMGTKEG4ALUKWmWe71D1Ds8q7oQg8y+EtRxadRqFLLIvfaF8DDyFNxLMq5eBMZCnshL8hU5L1IXOk8C9WwH0y8qLDKT4bR/En8PZ9OKB4GIk/EElRhLq3m5qSgfIpXgJPc9ptJLipwc8xGixbLaTmiAkxYGIZhGK2YZOJiJMpt+BiVjzwIJVS/jTpqn4sMgalujhEoKfM95KnIRjkWs5GoGIPCFj5Cde3bEuZMJIoLj4kKw2hZ1PZ7HubulyJBMCCyTTnyUr6IPJkHA+e51yrRIkOiqABVi3oZeUEhPJ+0BExYGIZhGK2aqLi4HjWlewWFQg1AwuEd4F504Z8Y2XYqyo94FRkCu6BSkE8gr8ahSFS8hxreHYc69q6hdnFhosIwWib+9/wQytvq7p734mKBux9AKCo2ospxvQlFBdRugD8BPIK8qL9H4ZTRAg/NHRMWhmEYRqsnNzeXrujCvwpYiUTCTcjzkI9KQS5CCd2+tvzXaLXxICQsHgf2QAnf65E4mIFK2h6DxElizkRUXFiPCsNoufgS1icTigpPVFxUoTyuVJSHcRwSC7cQLy4SSRQVEHpGWoq4MGFhGIZhtHry8/MZOnQoQ1G37DtRB+0ClEA5GPWeOI6wBO0CVBXqIuTlmIE8GB8iwyKGQqoqkViZjDwRM93+Ro8evWX/1qPCMFo2yfriJDIMeSqK0PlhoHt+krt/xN1HxUU5CqF6AuVwZaOS1wFhmFRLEhcmLAzDMIxWT3Z2NoWFhQwdOpR0JAI+ReJiNsqpOJvQAFiAqjh1RzkZAZDnXit0fw9CguMd4DbkjVhKTVHhsR4VhtEyaYioAC0y7IS8mUuQV9QncycTF4uRl7QLyuPqAPwUJWyDCkhExcViVFyiOYsLExaGYRjGDkFUXFznnuuAqrLsj0QGhKKiB6oYVYFCGyAUF35FcpDbbiy66ANJRYXHelQYRsuioaIClIy9irCvRRo6T3gJEBUXpcC+6PyzEuV3TUOhksehBQwIxcWrwApgNPKypqenU1ZWtpXvbttjwsIwDMPYYYiKi/OQMLgYlYv1jepWEooKUJx0orgIkPgoduOPQTHVN9G8VxMNw2g4jREVoEpzpSi0sgdhX4tEcVGFwp42Ab9Gns4UN+5lNy4qLpYhj0Y6Kmf9DTC7mfa9MWFhGIZh7FBkZ2cDin8+H1VzqkYX/q9R9ZZdE7ZZi8IVqtzYHDd+OWp8lQfshgyDlhAHbRhG3bRr145eqG9NQ0TFP1DDzdOA76PFiHL3WlRcVKCGnIWostyXwFBUpc6HPUXFxWMo3LISeUULgdmzZzN8+PBv/+a2IyYsDMMwjB2K1NTULQbDmUgopCAxsZ4wedtzB/AQStg+I2H8IBTKkOLGnkXo+UhNTaWiomI7vhPDMLYX+fn5DB8+nNdRiejT6xj7D+AB5KV4GeiKeuRAvLiocLeXkHcjmzBkqn/CnC8D81Bo1dtAZ7Tw8eyzzzZbUQHhudAwDMMwdgjy8/NZDryBBENbdNFPIbmouBH1qbgJlZNt624xNz56Ib2PmuVmDcNoeQwbNow5c+YwA1V7u6+WcV5U5KHS1GNRmNNT6LySisTFZiQqnnev56EKddOQJ7QK9cjpBwxHXo/33euDgZ+gfItjjjmGwkIfJNX8MGFhGIZh7FDk5ORQUFDADOB64MFaxnlRMQtVdCpA4uJuknfLjTa+KygoICcnZ1sfumEY3yH1iYub0WLDLu61KhReORYlaEfFRZV77u/I+3Al6qztxUYMeUO9uMhzt51ReNU/UT5Yrjuu5iouTFgYhmEYOxxRcZHMYEgUFRAvLu5IGG+iwjBaJ7WJizsIy0x/jbwQMeSd+CU1xcUj6JyyCXk8XwZGoHLWFcjbkUIoLl5FjTuPAC5w25yO+uqMovmKCxMWhmEYxg5JbeIimajwJBMXJioMo3WTKC4uJjxHTJ8+fcvzjyIRUUkoLh4GfufGp6GmnEeiZpsvEYqLcuAFZJj/EXkx9kfnmtTIsZwCXEjzFRdNJixeeuklrrzySo4++mh69+5NLBYjFouxdOnSbz3ngAEDtsyT7LbPPvtsw3dgGIZhtHQSxUXUYKitqlNUXFyMiQrDaA0UFRWRkZFR6+tRcfEoOkd07NiR5cvVzm4ROic8TpiofT6qBPUoEhUfol4XxwDjSS4ufgs8h0TFjcRXWfJV6U6l+YqLJqsKdeKJJ1JcXLxd5j722GOTfjkGDx68XfZnGIZhtFy8uBg5ciQrUVhDfaVigyAgFoux2o03UWEYLZeioiKGDBlCT+ouFZ2aKt9BpXucuWED48aNYwQy9B8DbnCvnYiEwWxUPe4RFCrl++FMcPdT3f14lKz9BqGo8OPbIEHhK9K1QXkXM1HVqGHDhjWb8tZNJiwmTZpEdnY2Y8aMYcyYMXTv3n2bzf2vf/2LAQMGbLP5DMMwjNaNFxd5eXkEDSwRGwQBqampFOTnm6gwjBaKFxW5wKHAKyQXFwsWLGDw4MHkAgchAdAR6Iaa4p2BciDGIXExA3gWdcvuCDyJ+lJ8g7pngzwXMSQu5iOhcD4qbd0WCYl85JlIIRQVoCISrwHrIu+jOSygN5mwuPfee5tq14ZhGIZRg5ycnEb3nbA+FYbRcomKiouQMPA5VlFxERUVZ6OE6pGozGw/1LhuV+ALJEzGuddSUTWnucD9wBRgNXAAcCsSChPdsbwAHIjKygbIU3EeKnW9D0oU96LiDhR2leLmuQ4YMmQI8+fPb3JxYcnbhmEYhmEYxg5FMlEBanL5KyAHiYtkoqKzG/cLYC0SFzEkLmJAMfI2ZKHO2n8HSpB34XDgM+DcyLFMBI4H2rvXUpCoyAeOQgnf5yCxERUV0wkrReUicVFUVLRNPp9vS6vsvH3fffexZs0aKisr6d27NwceeCDf+973mvqwDMMwDMMwjCamNlHhOcvdXw9JRUUX9/pp7v5OQs9FhrsNRqFVbwFPIJFxLhIWQ4F73Jy3Ax8BG4G+KGfrJGChO47TUWWpm4FDUGhVWyQqPKej0KuVQF5eHqWlpd/iU9k2tEph8ec//7nGc3vuuSePPPIIQ4YMaYIjMgzDMAzDMJqa+kSF52eoB0Vb5JlIFBWe09z9nSiEKQMY6OY+HjgM6IBExc+Q12K82+YeFPp0Auq2PRSJjSJCUQHwU1Qx6jpgHsrpiHIHCsH6GgiaUFRAKwuFOvLII3nkkUeYP38+mzZtYuHChTz44IP069ePjz/+mIMOOogVK1bUO09ZWRklJSVxN8MwDMMwDKNlk5eXRy/kTahNVAAUoopL44CjSS4qPKch0VCKRMUFKFH7PGADqhh1BBIVnVHp2SOBnyMR8SISFZciz8M5SHCUu/mrkUA5F5WmHQKUudfq6rvTFMSCRh7FpZdeynPPPdfoHd19992MHTu29gOJxQBYsmQJffr0afT8dbFmzRp23313Fi1axAUXXMD1119f5/grr7ySq666qsbzxcXFZGVlbdNjMwzDMAzDML47YrEYI1ETu7PqGHcp8F8kGs6oY1wVcLkbew4SFRegZO5fIo9DibuluPEdgHQUJnU3kAmsR96R05CoKEcJ4GnutWLU9+IWlNtxKQql2t6ioqSkhE6dOjXIDm50KNSyZcuYO3duow+qKeO9unbtyoUXXsiFF17I888/X6+wuOyyy7j44ou3PC4pKaFv377b+SgNwzAMwzCM7Y3vQ3OTe1ybuPgHMt7vRoLgtCRjvKh4D4mK41Hviv8ir8RP3bgsYBPKpchEoiINeU1SgMlIVJzqxqe5e++1yHT345GQeBK4BlhO8/BUeBotLCZPnszkyZO3x7FsV0aMGAHQoM7e6enppKenb+9DMgzDMAzDMJqAxoqLO93j0xJeLwJeRSFTxyORMAr1ungbuBeJh/VIhHQm9EKkujlORdWfdvLH5u5T3TYVyGDPRF28X0eCwr+P5kSryrGoi9WrVwOQmZlZz0jDMAzDMAyjtRMEAQWofOsddYz7BzAWiYv73XNVKA9jZ/faf1D1phgSBjcCe7p570T5F+VAJxQGlebGenZKeAwSFFVIYKQA96GE7+6oHG5zpFVWhUrGY489BsBee+3VxEdiGIZhGIZhNAca6rn4K/Is3ImSqeciT8X+wG9Rt+273NhTkLi4BFV5uhWVh61ASdzH+H0nHoubO8WNjeZY3IfyKcagHIu7kCBK1iW8KWlxHotDDjmE4cOH88wzz8Q9/+yzz/Lpp5/WGL9+/XouvPDCLQnn0dwJwzAMwzAMY8fGey4eQn0iEqkEJgH/A+YDf0Y5FeOAT1EZ2L8gz8VdwIPIU3EuYbO895Cn4WXgWfecx3sqSoFvUPWo2kTF7agD989RpaieQPv27bfq/W9LmsxjcfXVV/Piiy/WeH7ChAmkpSllZcyYMdx6661xrxcVFfHll19SXFwc9/xbb73FDTfcQL9+/cjNzaVz584sW7aML774grVr19K2bVv+9a9/ceihh26/N2UYhmEYhmG0KHyVqJOR8R/Fi4olqEP2U0A7JBqOBx5AYuIK4J/Ie3EVapB3CPAYEgLHASNRNaeX3dzHoBX+AImK9e7vdShkqiM1RYXnQeANYA1QvnnzVn4C244mExZFRUV8+OGHNZ7//PPPt/zdrl27Bs83ceJESktL+eyzz/jkk09Ys2YNaWlp9OvXj+OPP55zzz2X3NzcbXLshmEYhmEYRsunrtKzUVFxHtAehUDtjnIrUgirON0F/AZ1zG6HvBfdgEFAHpDr5tsZCYpn3XbHoF4X65EAqUBeig2oPO1k5Lm4DYmOGBIb17n5/u1eay4hUY3uY7Ej0pj6vYZhGIZhGEbzp7Gi4k3gx0A/oAB5FQ5FYU0PoCTvzcDvkSfhY1RFahQKo0p1fw9HlZ2eBL4PfA95J0rdmO7Is7EB9a24B3ksbkPJ415UfIFEyPZukrdd+1gYhmEYhmEYRkvm24qKcZFxBUggHAr0AY4FPkdejYXAiaiXxUfIi5HtxlW6bSqBx5EwOQF5QEoIO3QHqG8FSFz8AOWAREUF7vjnIjGTkZHRpL3jTFgYhmEYhmEYOwwNFRXnozyJPwOjiRcVI9x9AfI8pAAnobCnp5GomIRExXKUXzEQKENejVTkqZgDvOPm9n0sipGo6OIej0dlZ28B5iHPhhcVoNCo14Gvgfn5+Y37MLYxJiwMwzAMwzCMHYKMjAx6oYpKyUrLfgTMBH6IEq7bouZ1LwEvuL89XlzMQ96IHqiJ3f5Ab0JRMQIYRlhO1veneAN16T6SUFT4bmvFKJ/Ci4uj3ba3AUOBRe55n28xA5g/fz6DBw9u+IexHWhx5WYNwzAMwzAM49uQn5/PcrTCf1+S1/cDdkNG/2QkAsa721QkLqKMQGFNPVBORBbJRQWEgiINeMvNdSQwAVgdmTMT5W+UIoHRBeVdnAv8CuVjDKD5iQowYWEYhmEYhmG0UmbPnk1aWhoffPABsViMwYMHM3/+fGYgozyZuPgr8hZci8q6liMBkExcVKPQpg1IDGSiHIklQC/kXcDNUYE8IK8BzxGKivtQ/oY/lhg1xUUq8nj8HIVNlaMSt81JVICFQhmGYRiGYRitkNmzZ5OTk8POwL777ktPwrKs8+fPZ8iQIVznxp7u7i9HORLzULnYq5ChfwoSAiBxARIaFUhUtCcMY+qEvBZfo4Z6/QlFxSvAFNSB24sK3/TujoRjyUQCohR5KdqiqlCvAytRPkhzEhVgwsIwDMMwDMNoZXhRkYNW+3sAByOjvDZxMQ94FYmEEShx+3XgSvd6VFw84+6PQEb/RiQAvLjYB/gAeRQqUMO8V1CDvUNQHsadqNrTKOQ9uYR4cVGKvCFRUXEdKitbTfMTFWDCwjAMwzAMw2hFREXFzsg4/wVK2M4mvqGcFxeXolKwWW782UhE5KAeEVe6uU8BDkfG/VTUCK8T8moUuzFeXPQAFqPKT5uQJ2Swm+MWdxzdkcBoizwXZyNxUeH23wGVnr0fuB4llkPzFBVgwsIwDMMwDMNoJSQTFeejUrAlwEQ3LrFbdTdCUXEWcAbqhu09FDcCf0PhTx8CC1DlqEyUsNzZjfPiIg31stgF5VwsAPZFTfPOBf6DKkLFkNfiLBQOdTvKobgJiYtzCUXFDGDRokX0799/Kz6h7YsJC8MwDMMwDKPFU5eoAAkHqCkuRiJhsYlQVEDoefgBynV4ELgIlYadRCg6Yu6+s7svRl6MQciz4cXGTsgDcg/yYlSjLtw3udfPcvtYgfpj3AYUoXCsmTR/UQEmLAzDMAzDMIwWTn2iwhMVF9Uo7KicmqLCEwMygGOQkf82cCDyXMSSjO3s/i52++qJkrszUbJ1D5S0HUMejDmE4mIuEhEbgenAQcBjSGgAzV5UgAkLwzAMwzAMo4WTl5dHL9SD4nPUyC5RVHi8uJgEpAOPu7GJoqIUiY9OqETsOuBUoB9hf4kogbvvHNl+MEq+LkD5EgcCP0V9KG4iXlw85sbMQqFUq1Gytq8YFQ3daq6YsDAMwzAMwzBaNOXl5cRiMb5AHou3gXupKRY8WcjgPws4FoVCRfGiIh14FgmLfZHHoRe1iwpQbkZn5OlIjYwtQCFNfYCfuW1uJhQXjyGxkxaZ6yJ3vwhYA2RlZVFSUlLLu2p6rEGeYRiGYRiG0eIJgoACYBUqGXsHEhe10cbd1yUqlqOGeLsQioq9E8Zvivz9InCxu0+NPD8CGOnmnoY6cJ+B8jcWAy8DPyFeVHjuQyFSX6PO4c0Z81gYhmEYhmEYrYIgCOISsn1fiNo8F4lUIKM/FeVepCLPw0soX2JiwvjZqP9FNsrBeAaFP/k+F0dFxo5A4VQLUd7Ea0gwrEFVo5JxHwqHmgEsWLCAgQMHNvCdNA3msTAMwzAMwzBaDfPmzWuU5wJk8FejFfc2yLOwFnkevkB5FV+gJnee2Si8aSPwEOpNcQRwmbt/Bnk7PHOBr1CY1CtIMMxEiea3EYogT0sTFWDCwjAMwzAMw2jGzJ8/n44dOzZ4bHZ2Nt2gweLiI+AN1GMiQAKjFJiCmteNRSLhMOAJJAq8qMhAidyPu8er3ZzHEi8u5iKB0AGFV93gHhcWFm4J4bqJUFy0RFEBJiwMwzAMwzCMJmTBggVkZmYmfc0LhayNG4nFwgKvmZmZLFiwgHnz5tGhQ4e4sb7Z3U7I2K9EYmEyEhoVhMnWHyFDvw/KYfgQCYUy4EmgEIUwpQDHIXFxA/AIEhVfouZ5C4AlSBzc6eb24uI21J8imajIzs4GiBMXF9MyRQVYjoVhGIZhGIbRRCxYsIDBgwfTk5rlVL1QyAUORZ4CLy56AYMHDwbYsi0or2KAG/+6m+dLoDcqKdsGCYgMlBuxHJV6HQbMR0KkAhgInIjCmy5185yOwqNmuG03uGOaBVuOOxaLbWl49wvU7Xsmqvq0EXlGEkWFx+eHrHbH2NJEBZiwMAzDMAzDMJoALyoShUMQBFtExShUcvVUlBR9M/I6HIe8Bt3dtpORV2EAaoz3U1SF6Z9u/NHAD5EYaA98hsrCjkCiAmCIuy9w98e4ey8uZrhjXOqefxQJgKgY8uLAN7x7DVV9ApWT/ZrkoiK6fVZWFgvy81ucqAATFoZhGIZhGMZ3TFRUXIS8AXegsCLvfRgJnIMqKy1CpVnLgIdRCNNA4GTUpTodeSXORZ6GKtQf4ifIS7AW+BTYCxn865GgGILConyQVV3i4k4kTIIgYN68eeTl5RFs3FjjvSV6Hj744AP23ntvOnToQGF+fq2iwtOc+1TUhwkLwzAMwzAM4zsjmagANaurBv6AekacB/wIiYI2KG9iovv7bmAQKvH6MuozcR5wgptjKcp5OBEYBdzvxpWjUKcRSERUu32nUL+4uA2FPQFkZ2ezMYmo8Hhx4UUFUOf41oIJC8MwDMMwDOM7oTZRAfIyLESeiNOQh6IcCYkuSFisQB6MdOAB1GF7MBIVpyChsAgJi4GoTGxfN//9bvwZyFvhA5jWoRKwieJitZtnMGH/ituomQtSGw0Z09owYWEYhmEYhmFsd+oSFaCE6DdQ5aWfoNyIzYSiYj0SDl1RInYaEheTkFchQKFSS1FFqH6EYU5HIQ/Ehyhxu9q99iLwNBIORxOKi7kojKkv8oascvsoQg3tGioudjRMWBiGYRiGYRjbnby8PHqhZOtEUREgD8NhqJpTT+BIJCo6EfaWaI+8CxXAj4HDkadjrZujExIDS5DA6OPmfxrlWByJcjG8qHgKhVQ97cZNQCVm56DKU3shQbMZdd9+HQmO999/f2s/jlaJCQvDMAzDMAxju7N+/XpisRivokTts9zzAQp5CoAxqNrSfchzcBoKhUpBnoYNyND3XS9SkehIQSFNMVQZCtRbApTo/RgwDoVBeVExxT13GDAViYtvkKiJiopiN/5mVDr2/fffZ5999tn6D6QVYsLCMAzDMAzD+E6IlmMF9XrwDeteAJ5FpWWXEJaQPRUJic5uG18zKYYESYZ7/WvkuYBQXDwAvIvK0yaKisNRA7s1wPeBT1CDuhNQ2JOJisZjnbcNwzAMw2iRFBYW0r59+0Zt0759ewoLC7fTERkNIdpl+k7kcXge9Yg4BeVDHAPsjjpWP4CM/BgSFx2RGFiHci+ykEG7C0rqXovEx7vIC7HWvZZMVKxDwuQl4L9uP88hj4qJisZjHgvDMAzDMFochYWFDBs2LGnH5tqIxWL0AoYNG8bcuXMZOnTodj9OIznec3ED8DnKk/gRcAhhZabD3f2D7t57LtoiIdEOJXDHItvsgipLPYjCn36Ays0+BswG8okXFR3cdk+gUrIjkRfkJvf4TUxUNAbzWBiGYRiG0aLwoiIXha3kEDZVq41YLMZIVG0oF4kL81xsPTNmzCAlpXHmZEpKCjNmzKCoqIjZwHvAPkgExJBnYXeUZH048lA8jZKxV6Mk7i5IZFS4m2cF8jRMda//Dnk/vo8ETKKo6O5uJ6PvUQEKqSpAYsNEReMwYWEYhmEYRoshKiouAq4FfkXd4sKLil+68Rdh4mJbMGPGDEaPHk0P531oCLFYjJ5BwOjRoxk8eDC90P+iG6q8BPHi4lmUsH0SqvDkcyo6oMTtFPdcNRIVG1F+xWlIgFzl5voJ8E+Ua1GMEsK7R47rLMLv0dfArFmztlR/MlHRcExYGIZhGIbRIkgUFdGOzbWJi6io8FWITsfExdbiRcXWeI0AlgPTkTHfIToWdeAuRP+341DYU3fkiYghT0W1e34VEhWdUcnZk4Cfo4pQf0NhUz1RGFUnFHq1IuH4ot+jnJwcgiAwUdFITFgYhmEYhtHsqU1UeJKJi2SiwrMjiov58+fTsWPHRm3TsWNH5s+fH/dcVFRsjdcoBzWyG4sqMr1IaJj+AXgH+Bmq5uQrQPnZK4BK5LVYR7yoqHa3k1HVqXeBKyLH0tndNlK3uGioF8YIMWFhGIZhGEazpj5R4Uk0CmsTFZ4dSVzMnz+f7OxssjZubJDBnJ+fTywWo9PGjWRnZ28RF4miIuo1Op+Ge42+B5yLmuJ1QrkPU5G4+COhqDgVhUgFyDORgrprV7jHxSQXFSnudrKb520kLla5/XfGxMX2wKpCGYZhGIbRrKmrY3Mi3nB9CBmVtYkKz+nADGCl28+mTZu25lCbJV5U5KLP8BXqrqSVn5/PrrvuSg5qHvcGkJ2dzdNPP82kSZOSCrx1KCSqGrg1Mn8yUVHk7k9ESdi3uceHI+M/FYkOLyoqkYhIJ15UBMBXKMk7majwnOzubwYeBv6EcjA6R459BTVzLuaisrYdO3Zkw4YNST8rIx7zWBiGYRiG0azZtGkTy2FLx+b6OAtVEapPVODmewXF+LdUUbFw4UKysrKSvhYVFdGwpREkX433omKkG/cP4EJUsnXSpEmMJLmoiCFj/yQkCmrzGnlR0RWJihOBc4D/AdOQcNgfOBaJCi8iEkVFGurQfSEqJVtMclHhS9Ee4+atAq4H7nevdya55+I+4HX0vcjPz6/xORnJMWFhGIZhGEazJ9pUrSHionvC40qUJBzlDuBG1K8g2ep9LBZj+vTErb4ddRn/UebMmUN6ejoAWVlZLFy4sN55Bw0aRPv16+OEwowZM4jFYnGiwouBHwA/BoYSLy5isdgWUeHFgA8nOgcYjiounRrZ/zpkuLdDydftCMXF/sSLCm/8ZyBR4TkR5Vl8DOyLGtVNRjkVaciDUYVERRv33APA3SiR+353KyU0bKvR/7waNct7FPgPcKDbxx3Ei4v2bvsyJCquQ56sefPmMWTIEIyGYcLCMAzDMIwWQWPFhacShdhcCvzePdcQUdELhUdtrbjwxn/HBOM/kTlz5jBixAi6lpcTi8XIWL+eQYMG1Sou/Lz9kAdiuDtunwfRGRiCch+8qFgALEYegZ8SiotYLEZnYBBwNkp6BhnyKajC0knAl+61auJFRbobn04oLh6MzAMSASnIgF8Xef5+lGB9BHAk8hzcjTpgp6KQpxihwLgXuAs4CHjBbXcfEhtevFS7uYtR+NM9wN4oXOs0YAyhuCgGNqF+GQ9jomJrMGFhGIZhGEaLoaHiohJVGvKiYg5wAGp4dizJRUW3bt1YvHhxjbKoDRUXPuE5ijf+ayvLGovFyM/P3yIqcoHj3bgebv/JxIWfdyRa8e+MhMVQYPTo0fQD8lA36yzU8M2LigEotOl0JC4Gun2Ndp9NN//ZuH15cXGp+0w+JBQMUVHh8eJiJ2S0+3lS3L5Aje7WIcP+bvS/2QuFsPn3fwdqUoebI9U9vgPYDbgaeSPORV6Y+wiFQgrqf/EocDvQF+VY9HT7vhKJi5vdNu1R4vj1mKjYGix52zAMwzCMFoVPCr7JPU7MpahERvMnaJW8N1qlnkjtngrvoejfvz/DCUN4fFhMXl4e+fn5jB49Oukx+dyEnoSJy1FR4UOR/P69uOgF7LrrroBExPlo1X4EMrg7E4qLBQsWMHDgwDhR8SvgTLS6fz8SEcOQAf09FPbTHgmLdkhQ7IzCivqgVf/NyJj+oTue9kiIgRNCSFyAxMUC4DXkNfhN0k8jFBubkaHfyc3jxcVCd7yTkSjYE3gJhUS1R83x3kSJ4KDQrQeR52E3YAJKru7o5rwAWO9eB4m4Kci70cONuRfoj/I7dgL2IBQqi1CSuomKrcM8FoZhGIZhNEs+/vjjWkOHvOfiIeKTbr9CBnwhMB4Ztm2QqChHq/onkrzfxXHI8O6JSpRCfEna2jwXXlQkeiUSRQWEpUy7uWP5MTLwhwLnIVGRhUKPTkVhQzsRios333yzhqiIIUP7YGTYHYzE1Guo2lUuMqir3WfQBgmLfBQCdDLwW+TJyAF2BfohcZHoubgX+MDN+zASF7WRjrwMoNwFT4o7tjtQaFUp8DIy9LsiAdQHGOeO51bkdboL2A95FVKRCFiNcjbWAIe4Y78P+D/gTiSsHnfz3ojyOPZCYVPXofKzs1ECuImKrceEhWEYhmEYzY6PP/6Yvfbaa4sHIBEvBk4mTNR+GYXoFLr7i5HnYTlaaU9DxvNY5MGIVi46C7gEiYDNyMivcvPWJS6ioiJadWk4Eg/J+m4sQob+z4HBbt6fuL8z0Sr868gYPpxQXIwEDjnkkBqiArTa3te972Xutj9a9f8PEi79gPlIfM0DvkE5GO2R4OiLPBadgGxqios7UAhaDFVRGgrcQu3iwldxAokFj59nDjLs30Iemi7uODojYdSDUFy8gv5vF7hjX4XCoAqRN6YIiY1jkfh4FXmudgeeRR24C1Bo1CWEeRSFhYUEQUBJhw4mKrYBFgplGIZhGEaTM3v2bPLy8igvL98iKmrruxAVFT9327+MVrZXIeP6cMKV/1S0en0pKp/qeQEZrT9HHo0s1OU5HcXen4cM5zaE4iAaFhUEQZyoOB15BQ5DnoDJyPCNchlKVPbC5R339xC0cj4LhQ6VopChNLfdNDfncCQooqLidZRPMAyJojUoD2IMMu5fcO9pfzd+BhIKo9EK82K3/+5ICLRFxn22Gz8HhVo97D7Lz93zT7jP7yH3+e3s3lMKEgi+sV3nyPtf4cbPAjIyMuhYWspuhOJwP+SFKETCBRQqNZFQ8OW4Y8tAImixG7enm/8HwFFISExGImsOMHfuXIYNG8YaVEa2sLCQ7Gy9S+tTsW0wYWEYhmEYRpMye/ZscnJy4rwTdeUljERG5IdIMPRFCborUY7CwWiFvCMybE9z+4mKi78hQ/2XyAjNREYqKKEZkouLaDO9xOMEreRXopCozihP4jLgGkJR8TMkPn6LQncOI+y/8BEy3vOQUJgd+ZzeQ4JpJLAEeV8+RknQA1G5129QrkRb1G36auT5mOrm8OJiBRIvxW6enkhUpCLBErjPox9h+dflbn7Pj5HwORmJik3uvceQwEgUFSABcTKwFphVWkopEip7EnqXdnJjC5HXp8Idw7uEHpSzUKjYN26uTcgbszMSNV8gT80C91nNnTuXoUOHEgQBHTp0oDA/f4uoMLYdTSIsVqxYwbRp05g2bRoff/wxS5YsISUlhX79+nHYYYdx8cUXM2DAgG81d3l5Oddddx2PPPII8+fPJy0tjby8PM4//3x+9KMfbds3YhiGYRjGVuFFRS6KkZ+GvA5RY/0XyEj9KzJ890RG8V7IU7EUGTS/QqKgxL1eila1IV5cjEOJvmejCkRtiA/VgeTi4m7CZnpQU1Tg9pfq9n2Me+5ulDtRjESFP5bD3XwvI3GzARnzuyBDP0AhQp7RaEV/HVql/xKt4vd1c692285ChvXhyNCe4Laf6u7HoXCoRdQUFT4nArefF1E4VV/kjXkHeZF+jAz/85DHZxMSEz5/o5qaosLjk+19En0BYU+K0wjFRTUSDvNRnoSvJLUSJX5nurli7nOoRv+bRShnYoX7XLyo8GzcuLGWIzO2llhQWz/37chJJ53Eww8/TEpKCqNGjWLYsGFs2LCBjz/+mP9v77zDq6rSt32fAAklVB3pEEoIEGJARbErRbEh1lEUexl1LKNTv2lO05nfOMXeUHRUxDKCINjb2JUWOqGFXg2kAJJA9vfHs1b2yclJk0CAvPd1nesk++yy9t6nvM9626ZNm2jWrBkTJkxg2LBhNdrv9u3bGTZsGJ9//jmtWrVi8ODBFBYW8sEHH7Br1y7uuusu7rvvvhqPNz8/n5YtW5KXl1et5jaGYRiGYZRlxYoVDBgwgNzc3NJl0aLiDlTF6A00O346cA5K6C1GcfLPonyAEjTrfTpKJv4niq2/j3C2PQ95D5Ldw3dgfsbt/3JkxBZR1qj2nZ09v0DG9fmoytB8t7w9yov4KTLMY5NWc5Ch3caN/QUkVq6KWe8FJC4GotCfNGQwf4dm972hvwAZ4E1RUvrrbuzpKDRoiVsnC5jhxnY88tr4ikz3ITFwHwr7KnL7rkhUrAL+gETYeiS+vqZiUdHIXYfdUftuThiyFUt0hS4IhdpVSFxsRyFYT7l1GjduXNodPbart19/Csr5mOv2GSsqjJpTIzs4qANuvfXW4A9/+EOwevXqMssLCgqCSy65JACCNm3aBLm5uTXa7+233x4AQUZGRrBp06bS5dOmTQuSk5MDIJg8eXKNx5uXlxcAQV5eXo23NQzDMIz9hdmzZweRSKRG20QikWD27Nl7dNycnJwACNrJ5g+CIAjmz5+v32wInoJgMQRLINgCwTMQpEDQAYIRECyF4BoIfgjBeRCMg2ArBHkQfAfBRRAcDsGjEJRAkA/BWgg2QLDGrbfbvRZAsMk9+8cOCArd8hUQbHbLn4LgaAiGQNA3auxBEARA0AmCUyAY6/YfxDyWu3PKjdpn7GMXBJdBMNDtJx+C1e78/Dq73XrzIXgFgvHueZY7z3y33pMQ9IfgGAimRO1rCwQTIbjeLff73enOe6e7Nv6R6675Kggec+feGoJz3f3KhOAJt4/tEBS4+1AEQbFb/p277nlR1z3ew+8f98iA4Gm3zcNxrns0QJDu9uHPv59bf+nSpXv0njVCamIH14nHojK2b99Ou3btKCgo4LnnnuPyyy+v1nZbtmyhXbt2FBUV8emnn3L88ceXef3Pf/4zv/3tbxk0aBBffPFFjcZkHgvDMAzjQMd3Y26HZp+r8/PvezusAyZOnMjll19OQUFBtY/ZvHlzJk2axODBg0sTsd8izBvwM9Tno7CVFqgS0COoUVo34HPkEWiBeh/8GHkripBnwfdLuBj1Nbje7S+JsqVVmxN/9rzYrbcdeQkau+fJyEPS3J1/vL4XPZGXZS0KqbqCyj0XreNco6UohKuNG9swFLrk8z1KoLTzdIK7dotR8nIf5E0odNfpLXedZqAwpOtRt+nJKD/jPOTtgPKeGs9WlLOQ6NZZ6vb3PGHo1C0ojCzaU5Hk/i5x42yAwq2au/3W1HPhk/Yr6ozu8Z6L01AS+xxg6dKldO/evcJtjJpREzt4vys327RpU9LS0gBYtWpVtbebOnUqRUVFdOnSpZyoABg1ahQAX375JWvXrq2dwRqGYRjGAYAXFRV1f45HdPfpdGDkyJEkFxZWuV309smFhQwePJh0JCDuQx2WOyND2ucn+FCdQtSj4APgJpTsPAAZ0p8gw70XEiFNKdvx+SXCJO5X3P6TkfGcQGiARxMrKlqhnIQ33X4KqFhUpKMwqKtRrsdLqIFbScwxUpCRnYsM9lh6AKciI3w3EgefuNdiRQVITAxD1aECZLB/jMqongb8A+WkbEUC7S8oiTlaVJS4c29A1aKiEwoHuw6VgO2Gwsi2UVZUQNihuwTlslzqxgW6lhXJgxtRWFs79/8ct11VogL3ui8ja6Ki7tnvhEVxcTE5OTkAtG/fvtrbzZyp4mdHHXVU3Ne7d+9OmzZtAJg1a9YejdEwDMMwDhSiRUV0n4XKxEV0/LoXAxnIMK2uKOmLjMUM4GTkZRiPyp9ejETAR1Hb9EQ5DJ8gMXM+MhQjyEBNcf93QJ6LtYSGaoCSmX+OPA5PuEciSure5dbJJ+xNEU9UNEc5GP9BBn8jKhYVPrb/BGToVyUudqO8j11xrtehyIif6cYy0Y01VlR4mkT9vcat3ws1+ANdr7boen2OvCpnRm2T4M5tN2GfCT++gLKiwieP3+r2uwwJjYCyosLTADWoexjdo0eBcW79YuIzFnkb1gOzZ89m6dKlFCYnV8urBro/hcnJJir2A/Y7YfHUU0+xefNmmjRpwhlnnFHt7ZYvXw5Aly5dKlynU6dOZdatiJ07d5Kfn1/mYRiGYRgHGrGiIrb7czyREG04X49CYLa77c8GLqxgu+jt+6K+A2e77XYAP0PG46XAr91YZqOZdFAozFfu9fNQcvIU9/89KLl7ATJq2yFxsZpQVKxAhnljt+xhlPTbDBk7xcjL4f+uSFSMQUJoAnAz8Tt0e1HhqUpc5CCDuyXly3H6Y45CXoBFKJG7BWEieqxQgdDDk4AExSY3ZlBjwOlu7KOB7oTJ1BCKAp+sXuyuRUskMOYgb090RaoE4Hfo/n+M7kOsqAAlTj+ChOMT7lgPoiT1xDjrjyVsVjd79mwyMjLo3r17jULuAAoKCkxU7AfsV8Jizpw5/OxnPwPgt7/9LW3btq32tv4N2KxZswrXSU5W0bmqhMK9995Ly5YtSx+dO3eu9jgMwzAMY3+gIlHhiScu4omK95EH4WqgH+q7UJG4iBYVA5C34irgFNRvoRMywFsjgzcdxe93R8aqFxWTgaluP2cio/xy1NthOqoA5cXFXEJR8Yo71h2oatE4NGvuS58mIo9JPFHxLTJ+W6HwoXjXKJ6o8JyAKlgdi7o+e3GRQ8U5Fv6YLdFsfVt3/ocgEZVAxeLCh3YlI+/OSHfNzkWdrLug/IoM5DHxRn20DyARiYsp6D3yOQoFa488IdE9NCAUF8e4bR6Mef1JJOh6oepei5GHZj4SG4/HrB9PVBgHNjXuY/Hzn/+cSZMm1fhAY8aM4YQTTqjw9dWrV3POOedQWFjIiBEj+OUvf1njY9QWv/rVr7jzzjtL/8/PzzdxYRiGYRwwVCUqPNH9BCoTFT5+ILad2KuEHbH99tciA7kjMjCXA13R7P+HaFZ9JOra/B0SE2+gXIYLUViPFxVnEIYDjUHx/5cio/ZmlAexlrKiogvyjvR0Y2+CZv99eVXfwG2He80nFx+CysGOQR6VaHEB8iyMJL6oADVw242SuN8A3kVGfSsqTtz2x/wTEiB3oc7f76LSsiCvgc+18InR0aLC9+k4C3k7vkK9Llqh+5eKQqE8Xqjgnt9z4+2IQtFw2xMzBs+zSCRuQaV+W6BGddGi4mX3eBOFruW6fXkhciMmKg5Waiws1q5dy6JFi2p8oMLC2JSpkPXr1zNkyBBWrFjB6aefzssvv1zt5DBP8+b6aqisJbsfQ1UZ7UlJSSQlxXPwGYZhGMb+T2ZmJu1RZZ2rkUH6LZqNjsUbys8hL4IXFe+g8JzYoORUZCgWI4P5CcrP5r+DZrw/dcfujsKLDkUC4F03niuRR6IbmtEe6tYdGXPcMW6fF7r1U5GR+hPU68KLituQF6QTqlp0LqGoCAhn6CMoRGo7yivwFZiuijperLg4FjXum+eOEc2n7rU+yLjPQsZ8KyoWFZ5G7nyOQkIE5NV5Gc34pxD27vB5ItsoKyoiyLvQzY31XZQLcRLKYznM7ZOo9QNk+E9EVagGI2FRmbgYi5LrS1C+zAPA/Sik7X3KiopxSLQtQ96YWbNm0b9/fx5018hXcDJRcXBRY2Hx/PPP8/zzz9faADZu3MjgwYPJzs5m6NChTJw48XsZ9b5T98qVKytcZ/Xq1WXWNQzDMIyDkZKSEiKRCO+gWf3eqJJSW2Tgx3IjCsE5DHkA/otCngYQegw8C9067VESdmNCUeJFymnI67AKeQ68z38YCnmaBPwSiQJQxSFQqEw/JB78rPpTlBUVDdGs/ndIjPzO/f9nN572SBzFiopI1APkLQAZ6lC1uDgceRWWuf+9uIgVFa+ixnRDqVpU+OZ4N6PGeAuRN2Ar8kykEiZqJyBB9x3KbYgWFUuQ8d4HeYfy3X4ykHga79b14mK3O+5E5Onwnbn9/fC5GtHi4k3krQB5iJajXI5/uvWPoryoyELiYdasWWRmZpZ6tr4lTNQ2UXFwUac5Fps2bWLw4MEsWLCAIUOGMGnSJBo3bvy99nXEEUcAMG3atLivL1u2rLTb54ABA77fgA3DMAxjP2TmzJlEIhEOO+yw0mW+DOc/0OxwV+Ql+Ni9noNmvr0B77dcjHIKFqLSp+uj1lmIZsYPRZWiQGLiNcqHCJ2JPAQrcJWVkHfjPeSRuCxq3QZIHPwceTrGuOXxREVjt8++qPxsW3deFxJ2gx7p1kumrKiI5RBkCH1L2VKwVyGx8zESF54Ut8165OmIJypORbP/yVQuKtai65uJvDM9kQDMdmNJp2wI0hvu+nyG8kS2uXPajpLXD3HnvBXloByPxN1x7u/xyJORhwz/F904R7hrtM3t9zyUdD8R5Vx0dWN9GgnFacjL0su95sXFy27cr6IwrVhR4QmCgA2RiImKg5Qaeyxqi82bNzN48GDmzZvHkCFDmDx5Mk2aNKl6wwo488wzSUxMZOXKlXz22WflelmMGzcOgEGDBtGhQ4c9GrthGIZh7C/MnDmTI444gnbA1k2bSnMevv76a0B5BTNQxaK+yMj/HZp1zkAGva8a9BWaFb8SGcqvuGMMRwZptKjYirwBCYSiJJYLkaE5F3kpJiBvxp9i1vMlYC9CRut4d6xVxBcVOcio7Y8M+L+i0KY7kSDIQ8atz0OInUXd7M4jD4X17HbnDaEYuMo9R3suViIR0g6F/3xLWVExDAmmFchQ34rCoXDrei8Jbh8nI/Hyjrsu36Fr2sE9PG+ga9cDeQ5AyeK48+7sjrkW5UosQYb9KUgA+OL9zwBfuLEnuTE3d+PId/tqTui5eBH4Gr1XOqL8lK+ijt3LPa8mbJ43FIVIzaO8qPCUlMSrc2UcDNSJxyI3N5chQ4Ywd+5chg4dWiNRMWTIEHr37s2ECRPKLG/dujU33XQTADfffDPffvtt6WszZszgb3/7GwC//vWvMQzDMIz9nfnz59OoUaNK1/Gioh8KA0rFlSmNRDjmmGPIAO5GYUr/RbPhi5HReQzyQDwNzEJN1Lyh3BuFEl2EYudfdtscgkTFTDSbPZP4pVCjuRAZp88gAzieqPBJyQmowtElyEiOFhVJhKKiC8onaIU8BIeimflL0Sx+S2Qk+yRnL1xw53sxCrvKd/v1SemxTeyucuf7FhIAS5CH5AfABmSwR4uK0e4curpx5CJx8Yw7p2ei9p2HDP+jkZB6CnlC+iIDvRDlsXhRMRx1HR+GxMVn7vy2IW9Hl6j1xyChMtQdvyVwBBIr093+16L7/zDyTjRDIsx7dvqgXJUvkdjKRWFqx1CWXm69ZUiwpCJPUl+gf//+GPWLOvFYXHfddcyePZtIJEKbNm1KBUEsI0eOZOTIkWWWLV26lBUrVpCXl1du/XvuuYevv/6aL774gtTUVAYPHsy2bdt4//33KS4u5s477+Tss8/eG6dkGIZhGLXG/PnzSU9Ppx1h1aVYokXFLShRtw0SAcuQ4R1bEepPyBi/Dhns76FE7edRcnI/ylZ+OhfF6j+HwmZORIbpZjTjvgmJiwFUPFP5ODKEFyGj40mUAwHlRYXnAnc8n2idhLwFOYSiwoc4fYkE0efuPH7klrdy+/LdEJJRjsDjyKh/AHlpbnWv93Bj3IryGhqjkKFPkTFdjISED086DF3nl5DnZ3TU+L24WIHExPNIkPgQr3ORod4Q5Vd8hhKjb0ECKd+N921CUXG6O68RSExMdfvy8RmpSPy85K7XYe4a5LlrtwTd4wboHl6CBN97qDRuK8LcliUojOkkN+5x7nxepGynbk+056Ij6kjekrDaWHUb3RkHPnUiLHyuQxAEvPzyyxWul5KSUk5YVEbTpk356KOP+Oc//8kLL7zA1KlTSUxM5Nhjj+XHP/4xF110UdU7MQzDMIw6xIuKfmhG+wPKG2fxRMUGlFsQQQbgYW57zxwUanMKMmwT0Ox3QGhYDiP0QDRARulqNCM+DxkN6cjQ7okM/UVULC4eR8blfLddIpohB4VgxRMVfsa8NTL8i5ABv5pQVHhB8gIy1i9FHpVH3LbxxMV45K1ojrwB8934ElF+yGpkNLdw+3/evZ6BvCheVPixnoBERz+Ud+AT2D0JKMTpcSTEbkEeo4eQV+Q85DlYiK5zT2TkJyIh8QllRUUD1ODvMXfeGZQVF+8gIdIHJVY/5q5BW3SPGiMBNtVdu83u/2JCzwVIoMxx12G9u26ViQqPD7nyDQljSxmbuKgfRAK701WSn59Py5YtycvLq7JUrWEYhmF8X7yo6IuM5V7IAH7FPQdBUKGoSEGhLiuRETgBGb1/RF6CN5GBPBoZiiCjvRgZtC8Ag1AfCm8cNkC5Ea+ixN6taKZ7BGG+QA4yXA+lrLjY6I73GZQpRXsKmpH/OQq1akDZpm1QttfCdpQX0AzNuHtR8Zw7r+NQgvpulHS8BnkBfuS234mSxh9DxvV3UdfSl8m9HuUVtHaP55Ch3ReV3I0VFZ4SJC7eQV6AUYTiIlpUgYTAULfvQ1ETvxvduP+CvA3t3LoDUVjRcPfwouIJygq1E9359Ubeh5NRfszTyHPRxF2fVMKQrM5uLNORV2SB219nJNpOcPvZgERQZ+KLio1UnFsTTfR1MJPzwKQmdnCdJW8bhmEYhhHiRUUaMnL7orASb9C9QtjpOlpUrEeioiMyyDsiUQISF5ehZnCxogLCvg4jkfH6H7fciwuQiPgK5RkMQUZqdBJyinuO9VwchgzULcgAvhGFaaW4477qjn0BZYUEMX83dtssQzPpvZE34UmUN3CLO0ZrlF9wHBIFScjQfwqJioVuTOsJDVwvLp50x7kRhf34src/pGJRgVsW3fp3nHveTlljevny5XTv3p1NyFOwmbLN4jogA38NCgGbjXIkoj0V0aJi7ty59OvXD9y1XYDeMxe4a3sbypuZgsKxRiBvxlokuG4E7nXX0QufjSjEaj0SnP+jYlHxOOVLDFfEjei9kYv6iOXn51exhXEgY8LCMAzDMOqYaFFxPuoJ0BvF/beMWm8cmnkejAxoLyo6EXoYEigrLuJ5KjzRZVjPQeFOnyAB0RfNyE9CXonRyACOFw6T4p6jxcV77rkLmk3/KTIuL0ShO88goxXiiwvc8Uuixr3Mnc8E5L34JzKcfcL1W8iw2Ykauc1G3oQOyIvx99jzdx6LzkhU5bjzHwjch0KmmlN5pZtYcfGkG+caQgHTrVs3li1bpl4OzrCORCJlmsWtc4+GyINyCarC1ICyosLvM1pcXEkoKhJRvsY0QhEzHr1XgDLH9CKladOmdO/enY7Im/IG8ohUJCr8WLa6ZZWJi7HuWOuBZVlZlaxpHAyYsDAMwzCMOiSeqMhAxnQxCk+J5gkUJ98aeSM6EQqKwD0nIXExDHkYBhBfVETzX2SMnoIETSKqFvQqCuE5ExmSC5AHI7Y6UAoSDuuR8b+DsBdDI+Q5GIzClQJUcWkJ8sQMpnzPBy8qfA5GCsqVmIDyIf6JBIUXI+OQWBnixv9T5CHpi/o3ACxFxrL3/ESHaP0MCZNTkKhoQFlRV1noT7S42IUEQizdunUrM1sf2yxu1qxZNGvWjNTUVJ5COQ5XU3EoUXp6eqm4eAYJoB8hQ/5fSCQuXryYoqIiCZriYnDn7o85d+5c0tPV6m/ZsmV0796dZkj8/Y7KRYUff7TnJRY/ljlu/926xb6bjYMNExaGYRiGUYekp6dzKCo76kVFdGWmWHFxFYqf/xh1a+6CjGCPFxfrUD5BP2R07o6znhcX/0U5FiehMKjmhLkVXlS0dQ+ILy6WoipRycjw70aYSNwDzcJ/hsTKSHfMDSjPoiEKv/GGrBcVEUJvQQPU/foKFOo0FBm6u9CM+OPIWN6JkriPQt29fbfp6JlzUIjTaYQG8d9Rv40+MdcJqhf648VFH3cuLwIJCQmV9mzwxnl0v4fFixeTmppaapC/TcX5CdHi4iGUHP6e227x4sX07NkTgGInKvwxGzZsyNysrFJRAaFXpXv37oB6m0RXFIsncCoTFyYq6icmLAzDMAxjH7N48WIyMzOZO3cuoNn9WShePlpUJLrnYhSuU4QM6XQUcvQyMr7Pd+t6obADNZdrjkrQJlJeWIDExWsoB2ELChnyouJlNPvvRYUnDXkuVqHk8tZu7BvQjH4mCj9ag0rAFiNvw8mo/8NEZPxuQP0kTndj2+nWbUzF1aJ2obyJAHgU5Vf8EAkJ31fiNTfGqYRGTrSRm5WVRSQS4fDDD+c995o3oPtRnpqE/iSgMCIvYGZXI/QnVjD07NmzVFxsomxOSDzS09PLeT+iRUU8du3aFXd5tLj4l1tWmdfEjz9WXJioqL/USYM8wzAMw6ivLF68mF69etFyxw569OhBT2TQ90RegDdi1k9ERvcGFJqTjgz9a5Gx/jIypovc+hFknDdBnoNClPxbTPkcBi8qNrp9TUEJvZOQwBlMWVEBarC3HoVatURG502oypBP3D7NjSHHHduHYZ2FxMfHlK14lOjG+jnycMQTFTvdowHy2lyN8it+igzeb935XUTlouLwww8nIyOD2bNnM8e9Npb4xBrU81COwuMVrB99rNmzZ5ORkVHBmpXjxUV+06bVrqQUBAH5TZtWKSqqwosLf23upOqqTtHX5k5MVNRnzGNhGIZhGPsILyp6Ik/A28hA74sSnDuiGX1QLgKox8ESZJA3Q4a+z4E4BM3i+45Q3nOxCnktfuD+ju4G7YkWFdeiSkLjkVHeCQmI/yGR4MXFYmRgHobCoJ4kNDoL3HhudMc7wx1jBcoXSEN5Fu8jQXGhWy8ZJTuvQEbJEjfO7lFj9aIiAQmVle6aXYZCuHIJZ85Boih25tyLCo8XF4cffniZ2XnPnoT+7Imo8PTs2ZNt27bVaJuarl8R0Z6L6nhNoHzOiImK+ol5LAzDMAyjlsjOzqZJkyZxX/Oioo37vwgZ9KtQGdCjkbhogsTFG0hUjEF9DiYT5kqADPndqCJQO2TEf4sM9OVIpPRETeJWIY+HJw8lOi9B1YfOc8suQYLHN6N7G4U1baByURFvNr8BEjoBMrb/4bY5EfiNe92LiiUo1OtEFFq1GDW889cpVlRkE3Z4vgOJDN+ELd7Meayo8FTkuagq9Cf2XGtbVOwPeHGxrXnzGnlNtjVvbqKiHmMeC8MwDMOoBbKzs0lLS6Md5TsNR4uKbih34SNU9vMQVOL0aEJxcTRK/p2Ich+OdeskojCgtchgP4ywlOjFqH/CKmR0t0BGeSoSHMvcWDojEXEMEhALUNJ1BBn6l7j1phKKi3x3rE5uu1dQXkNVs/leXPw/JIz6EF9UdEQhXrj9/wUJqytRTkWsqGiP8iEilO2T0LBhw3Iz5xWJCk+s56KqhOnYc/WlWw8mUeGJrWRVHaxPRf3GhIVhGIZh7CFeVGQg0fAWobiIFRV3AJejMqcfA0eipOD3UeLz0SgEqQB5G36BEpQno8pEq5GB3hB5J75EouJslOfQnFBUtEQGfLIb5zJ3rFzCUrWPI2/Cb9z/UFZcNELejWHIoJ+AhMUAFIYVLaLiiYv/oNyNBUggvAJcR3xRgRvPOGTY56NE7pvcucaKCihb7WmuS5b246hKVHiixcX3Df052ESFYXwfTFgYhmEYxh4QLSpuR0ZyEqHRDZQTFa+hHIibkMeiCzLY5yFjeiWqpHQjmvHfjYRDMTLUI8g7kY1m9M9HAqArMtZBnopkwrKy3dzzSne8rqh0a4B6Y9wL/JGw2Z0XF1NQGdXZKB9jDnCNG5sPGapIXPjZ/HnA6NGjee6553jAnXsa8UVFbJ+Eh1EoVDrxRYX3MkT3ZPDjqAleXGRmZhJUUiI2miAISEhIYHZWlokKw8CEhWEYhmF8b+KJig0oZ6EB6jexEhnxdxCKiqmoQtIQ5IFIRGLkJWRUp6FSqpchI7oE9aQY6v7/P5SLcZ3bjy8j2x4Z4atQ+FMzyvaCSEH5GEnu/wLkiShGRvrvKC8uTkFJ4K+7464DBrnXffJyReLCz+YvXbqU7t27c+qpp3LNNdfwKMovOS3qWlaWLP04Cov6JdUTFd+XjIyMSvtOxKOm6xvGwYwJC8MwDMP4HlQkKvoib0FLJAbeQnkDIygvKhKB3kgYRNzyEuA44FLkTdiNvB+73fpFyNAfhMKoAncc38eis3te7sbZAiVEe3ERLSrykafjDLcsnrho5563IQFyBmX7PVQmLpo3b87SrKzSpmtXX626S9dccw3PIk9OdfskPO/Wj632VFuiwjCMPceEhWEYhmHUkHiiYj1qGOcb3LVF+RLtUQO5v6Gk6xGEoqIBEgy9CPtQ9EAdqqcgz8d3SCgkovyIyciLMBQ1oWtH2GCuMRIKPlfiORSKdIV7+B99LyqSkVdjKxIMi1Hi8mdu/E3cvl9E3a6PQknbsY32KhIXBQUF5a7d1VdfzdFHH02/fv32OFnaRIVh7F+YsDAMwzCMGpKZmUl7JBC8qPgBykFojPIZGiOjvwGqaPQlEhWDCT0PuW5/SSjXoh2alS9AydzHE3bOTkTN7nqiErC7UG6FT9T2vR68uJiJKk8tR6IgQOFEOygrKopRkvhzbr+tkDekMaoe9SIqeXsk8UWFJ7o6U9OmTdm+fXuF1y89PZ25c+fSr1+/750sbaLCMPY/rI+FYRiGYVTA0qVLSU5OLrd8x44drEMlUd9AoqIYiYMZwBco5GkFMuIPBW5GXoaGSADkIk9EEjL2vTh4BSVKn+32l4RExTKUO9ESVVgaj5KiA/d6Q+RdKEGds18CDkd5DNuQOBiDvBNNCbtxNwL+i3pMbHSvf+zGPQ54yB27PxWLCihbnSkrK6uSNYUXF5saNKhRn4RNDRqYqDCM/RTzWBiGYRhGHJYuXUrPnj3j9qXwrEdhPM1RdaZ3gDdRKNJxqPpSHupFsRJVcuqLhERDJBIaEYYyfYaqQx0PnIqM/wRCUZGNStGeCHwK/NWNYzjyYDREomIcKknbyI2vBImYR5CAGY1CrLyouN/9/yDwAfJwZCGvyTx3jIfdWKK7TXuicx6ys7NJTU2Ns1Z50tPT2bVrV7XW9dR0fcMw9h3msTAMwzCMGLyoyEBJ1L6zs2fFihWADP9FyNj/Bwo9OhKJiBUod6Iv8mh8iqoqPYZ+fFuiHAafWzENJXqfivpZLENeg2WEjeHeQqJjLOo6vQ6Ji0lun28iUZGOhMVc5PH4DoVX+epPz7llLyNB8B3qY3G+O3YO8njMA7766qu43aY931dUGIZx8GEeC8MwDMOIIlpU/ISyVYsikQg5OTmkpKSQjnIs/oeSnjeiEqrnIDHxOjL0z0OJySuQKJgOvADchTwY24CvgHeB090621GFqRko1GkpEhUnoLK1vmlcMTLq/4EEzgxUsamXG9dwlDg+HwmRzu4Yj6Pci/+hcrcXIVHxpDtPn/vx1VdfcfTRRwPxm9+ZqDAMIxoTFoZhGIbhiCcqoGzVIy8qfJ+JXwGfoFyGdsgzcDbyRowHPkd5C6NQ5aV3kLB4FLgQiYq3kSAZjrwUoPyK5cjj8C6qLvUTwr4UecBIFAL1MPIwnEcoKo4GNiOPx8XAmW6/vwf+nxvbVuQx+R8SHt+3OpOJCsMwwISFYRiGYQAViwrP9Sj06FXkVbgQJWjvQCLjEGSkbwC6o8Zzq5AX4ERCEXAaSoJ+AXkcslFDvBHIO+FTxRPc8lXueP8DnkZeEZ+30Qj4EfKGTEGiI1pU/M/t90dR5+FF0v8hL8v777/PkCFDSqst1bQ6k4kKwzA8JiwMwzCMek9FoqIEhSo1Q5We+gMdkMH+TyQ0LkAlZBcDS5Cx3whYiJK3b0EejdeRxyIJeTQaAM+jfhGLUH+K4e6Yye5RgpK6fU7GVCQ+fMfqQ1A52G9Q+NVq1IyvIlHhuREJoseAIUOGkJ2dTWZmJkElJWKjCYKApk2bkp2VZaLCMIxSTFgYhmEY9R7fl2IoZUXFPNSEbpd7pCPPw2Goi/a5KGehEBn5iW79L92yHm6fuSjM6ES3bQSFPkVQInVblIC90+0zAQmI1W7bQ5AXoiUKYSpCoU1PIc/HESgMqiEKvfofypmIJypAXpABwDXAs0CvXr2qXfLVU1mfCsMw6icmLAzDMIx6T2FhIZFIhHdQYvP1SFSsQ8b8ctT0zs/Nj0RN5JqgsKQi1KiuHfJILEeiohsKUZoDnOVeD9wD5LmYB3yNvAvvIG/HWUhULAdau301R2IBJC6mu+1w400CJrp9HIPCp+KxCuVddAJOcucX2zHbMAzj+2DCwjAMwzAIcwd8VaQjUSJ0CkrIXoMSm/siYXAoynP4Dhn9zZAHogsKl2qMRMVE5F04zx3HF60NUM+K6SgEajTqsj0ZhTJ1p6yo8FzgXn8ICZpDUFWon7t9HYY8JzcAV1G270S0qOjuxhLdMbtZs2Zs27atZhfOMAzDYX0sDMMwDMOxcOFC5qPwohwkKkA5DS2QsJiPRIHvC9EceS5K3LoJaNbOi4ozkKgojjpOxL32IgqVuhZ5KFqikKe3UPft7u640Sxzy092xx7plv3djfNb1GH7S8r2nYgnKqDmHbMNwzAqwjwWhmEYhgEsWrSI3r17kwEchXIiDkEeApDR/g4SFiXIm5BE6KkI3PKGqH/FROSJGI5EyiokVDqj/IzxKOn7MsKwJ+8JSUXCosgt8/svQuJgOuqMPRSViW2LBMRu1PF7HfJA+KZ2O1BORTxR4ftQLF68mJ49e+7BFTQMo75jHgvDMAyj3hMtKn6CyrqegrpY/zdqPe+5WIhKte5AoVDeUPfdrychQXEG8gQsQV6NHORNGI/K0V7hXl+G+lREkHD5AAmMFkiseHGRiErLjkPJ5Peg6lJXIQ/GHCRebkVhXA2RuHgM9cswUWEYxt7EhIVhGIZR74lXFepWlGz9Cmok5/HiYinyCuSjClAJ7nkiyos4A/W0WI08BR1RWdiHkYi4DImT1e7/FOTVmIISuBehErW7CcXFG8iTMhAJhN+5MY0DPgZ6Iy/IzcBtSFxE3L6eBZ5w65uoMAxjb2DCwjAMw6j3fPfdd6xDRvtjbtmDyJtwEdAqZv3TgKbASmS4FyBR0QxVd1qMjP1VKJm7Fwpjeh31xdiKvBYrga6oetS7wIeoUlNnFGY1Dnk/drvn54AhSCDcgMTESDfWjsDfCL0SNyBx0ceNeb5b705MVBiGsXewHAvDMAzDAObMmUNGRgYPol4Q24BRhDkW0XyNQqB6oYTr7Uhc+OZ1O5FwiAAnuOeuyEvxMDATeTyuAo5FAuE1VCL2PPf/q267F5BQmY76YFyDPBij3XEeRGFRzxOKCl/S9noUtpWLRNI8KO2YbaLCMIzaxjwWhmEYRr1n7ty5ZGRkkIEM/ulIIFQkKlYjUdENGfAtkbeiAIVGHQtc4tZ9w21XjITKzSgvIkJYuWk86mlxPsrbOAV5IhqivIrPUFL22W7/uOfTULnY1ii8KVpUADwDvI+ExLRp0wiCgPymTU1UGIaxVzCPhWEYhlGviRYVhagi02VINPjSrp6vgbWE/S0CZPjjnpORMGiKhEFjlHNRhDp0N0BehESUIF6AQpSOB05F4mQXSvJujQTGR6h3xgnuta1IpGx3z5e6Mb2JksG9XHgG+DcKeZo2bRpHHnkkgPWpMAxjr2HCwjAMw6i3xIqKZsCPkRdgHqrmBBIXX6Myrn1QPkMeqtzUAIVFNUCei+bIc1CCPAolSFzsRh21C1DFqN0oofpolMdRgATIGvc4BImXbkA2Ejl90Q93vttvCfAS6kMxlFAEPUN8UWEYhrE3sVAowzAMo14SLSoGI6N+GGGn6nTkeVgGfIFERW+UWP028FvU1wJUxWk38ig0cPtKQB6N4UAGMv43oG7dIJFxJRIsf0OiZiVK+G6HelNkI09EAurm3Q9VnDoMiZovUM7GciRCEjBRYRhG3WHCwjAMw6h3RIuKn6Cu1Yeiykxj3TrzULhROxRq1A6FGb0JTEaei9eBqUhMNEShSQtRXsMSt/x9t6/BqNJTQyQ4GiFxcazb5zcoFyKCRMRnwAQUFjUdhUz5H+1Wbgz3AwtQQvZDwF2YqDAMo+6wUCjDMAyj3pGRkUELwr4Vu5Ax/zkSGbnIy9ATeQK2Ie9BFjL4T0TVmz5A4gLgTJT4Pd/9v8Dt7wskKk5EYqIJquZUhATFe0i0fOe2S0VC4iVUDSrXPR5CouNGyvahWLFiBV26dCESiZRWfzJRYRhGXWDCwjAMw6hXLFiwAFCY00OEXoh1qJTr88BTaPb/NLfNcuBJ1PV6JErEzkOCIUA5FGtRiFKi2/c0JA6ORaKisVteQpjU/QgSGeuRYDkHiYmnkZgZgkKl5rnHg6jZ3XuUFRUAQRAQiURMVBiGUWeYsDAMwzDqDQsWLKBv375kIG/F2yhZ24dEdUThSlOBF4EfoCpPz6P8hkLUGbsPaoS3HeVl/A+JhEtQOdmJqKFdMfApClu6Bv3o7kLlZZ9Gid6tUfnaD1BS9hLkzTjf7evnKNRpU4MGzNu9u7QPRbSo8ARBgGEYRl1hwsIwDMOoF0SLip+g5nTtgP+g8rHtkRfiCtQz4v+A3yBxMRgZ+segUKn7kNfhHCQ6vkQiYSaq9DQbGIS8IOvdMZq4ff8XGIM6bPcDXnbLC5EISQAuRKLiKSR+1gPBrl0AHHrooayYMaOcqDAMw6hrTFgYhmEYBz3xRMVyJBRaoSZ2bwEXI29EH+SFeAeJjHOQMOjmth8D3IvyKN5HYUvXoYpO/0O9Jc4CNgIt3LpjUBjTp26fR6FQqcGoOtQW4Ah3nNORqHgA5WxEeyI2b95ci1fGMAyj9jBhYRiGYRzUVCQqVqG+DycjcfEcCmFqg/IjNqAwqfYoPKkpkIaSuZNRmdeXkCcD4GPgpygXoh1KtG6HwpruBv6EvA9DUXjTj4Eu7jlAIVg93HEfQAnasaLCMAxjf8aEhWEYhnHQEisqrkY5E6uQgOjs1rvYPT+HBMAu1LRuMErYXo9EQlNUvelYFAr1AqrYVACMQqFNu9z+l6DE8ERUdvYvKOSpG/rxvRDlcTwNXI6Sv79Flaeex0SFYRgHHnXSx2Ljxo385z//YdSoUaSmptK4cWOaNm1K7969ue2228jJyfle+01JSSESiVT4GDRoUO2eiGEYhrFfk5mZSXvCsrKgUKMuSBCsiFr3ImA0Eh6noXClHUgcnAp0RcnaSajD9lHI4zEdlZo9CzW42+b2v8HtvwT92EZQg70Gbtn5KGTqXZT4vRzlaExCSeR9gUgkUnsXwzAMYy9TJx6LO++8kxdeeIGEhAT69evHiBEj2LZtG9988w0PPvggTz/9NBMmTGDYsGHfa/8XXHABycnJ5Zb36NFjT4duGIZhHEAUFRURiUR4B3icsKt2N/ec4559GvRFSITkIVHRBnXELkaeh1ZIXASoed37SFRchkTFaiQ2OqME7jWoBG3EbV/itvVC4wLUsXscqgj1hTueH+8DSFyY58IwjAOBOhEWbdq04Q9/+APXXnstHTt2LF1eWFjI9ddfz/jx47nkkktYsmQJrVu3rvH+77vvPlJSUmpxxIZhGMaBiu/v8KD7vzJxUYSM/4Yo7CkBhT6VEDa3a4DCl8YhT8avkWdiDfJqdHL7zEcio5nbb+C2TSAMFyhCwgRUHSoF5WjcgTpog4kLwzAOHOpEWDzwwANxlycnJ/PUU08xZcoUcnNzmTJlCpdffvk+Hp1hGIZxsFGRuPCeihyUJ9EUGf9tCPMpipGoSHTrvohExcnA75CgWIWESlckMlYjkdAV2Or2VeS2b4hERpHbd0OU49HA7Tsd+IpQXCxCYVsNGzZklys5axiGsT9SJzkWldG0aVPS0tIAWLVqVR2PxjAMwzhYCIKgtHv14ygECSQuSoBslDydiISE75Cd5P4uQj0onkLlaP+MEq7XoNCntkhgeFHRxa1/E2qQ18Ad04dDFaNwqMZuHBegvI3NSGh8BZyBumyvB7KysmrzchiGYdQ6+11VqOLi4tLk7fbt23+vfYwdO5bc3Fx27dpFhw4dOPnkkznppJNqcZSGYRjGgYj3XHi/+XUoF6IIeSm2AIegH8cWSGA0QALgDfc4AgmQqW6blijZej0SGSlIVLyGOmx3RB6OYuA8t6+I23cxyuVogsTHdJQ4/iPk5fB9LObOnUt6enrtXxDDMPYq8+bNIzMzs0bexoYNG5KVlXVAfub3O2Hx1FNPsXnzZpo0acIZZ5zxvfbxxz/+sdyygQMHMm7cOHr27Fnl9jt37mTnzp2l/+fn53+vcRiGYRj7H15cPIf6UuwEWqOwpzQkKAopKwB2oGpNnVAPihdQ2NIZSEjMdftJoayoGAwMQ523X3H7GokERZIbT7Fb/0WUs/EjFJZ1mhvTE0C/fv0sx8IwDjDmzZtHv3791NfG5UllZ2eTmZnJ7NmzyczMZPv27WW2iUQitEefeT+h0KRJE7Kysujfvz9ZWVmkpqbWyflUh/0qFGrOnDn87Gc/A+C3v/0tbdu2rdH2Z511FuPGjWPJkiXs2LGD5cuX85///IcuXbrwzTffcMopp7Bx48Yq93PvvffSsmXL0kfnzp2r3MYwDMPY/5g5c2a5kq2RSIS+KGk6WlSkoJyIFqgB3g6UgF2MPArnoDCn91COxonAFGAZKi0bQSFRXlSc7h7LgB8iofAiCqX6AuV1JLn9PYd6YHhRkeuOeZvbRzs0i2kYxoGBFxUZqKy0Lx+dlpZGi+++o1evXrTcsaPM91MkEiEduATIQOIiEonQ+rvvSEtLo+WOHfTq1YvFixfXyTlVh0hQwymQn//850yaNKnGBxozZgwnnHBCha+vXr2aE044gRUrVjBixAgmTpxYa/W7c3NzOfLII8nJyeH222/n3//+d6Xrx/NYdO7cmby8PFq0aFErYzIMwzD2LjNnzuSII46gHQpT8p6Kvqg87JGUFRV+Cilwj3zkuWjsHgmoDOxkJBJORCLiE+SVOAxYikTD6chbsdQtPxTlbvwe+AwZDoOQuHgThVdluvUORaKiLfAM8C9gDhYOZRgHCtGiwjfmfBy4H1gHDAA2AScA/0PhjqDCDbeiiYuxwD9R3pZf/0TgU+Qhzc7O3meei/z8fFq2bFktO7jG0x9r165l0aJFNR5UYWFhha+tX7+eIUOGsGLFCk4//XRefvnlWm0K1KZNG+644w7uuOMOJk+eXKWwSEpKIikpqdJ1DMMwjP0XLyoyUF+Ktwk9FTcBPVBYUjxR4X99WrjXfVhUExT6VAK8joTGLUgwvIuqQk1FJWZPQaKivdtPA/faOnesKe7v/6Eu3j9HDfIWIy/K8ZioMIwDkXiiIkCTCd8hcbENVX3rgb4vHkOeUS8qQA00dwJ/R99Bt6JQzTTk9ezVq9c+FRfVpcahUM8//zxBENT4MXz48Lj727hxI4MHDyY7O5uhQ4cyceLEvWLU9+nTB5BnxDAMwzh4iRYVP0GzfodBqai4GYmErW65LzkbLSpACdq+mV0emolLRN6I85CYmAxcDwxBs4hHAxtRxahWQHNULvZVt+7hwD+Q8PgMCY9NqNleDzeefNSJ20SFYRxYVCQqvPfzB+h7aDiQiiYjOqEqcOnIWwn6vtmGvkP8+mlIhPQHrkWhk/tjWFSdBmxu2rSJwYMHs2DBAoYMGcKkSZNo3Lhx1Rt+D7799lsAmjdvvlf2bxiGYdQ9saLialT5aTvKVzgTGf4RVK2pGfoRb04oKvJRF+016Ic+QNWiGiFvQyIKfUpAVaJAYUutUYhUW+TRmAQcg3IsfKL4EJSnMRq4AomJscBLSMh0BbJQFam5mKgwjAOFqkTFx8CH6HuoCwp/ykXfJ6OQ5/J15NU4FXkz30d5V73Q98EyJC5A4mJ/9FzUmbDYvHkzgwcPZt68eQwZMoTJkyfTpEmTvXa88ePHA3D00UfvtWMYhmEYdUc8UfENynk4G7gBJUbnoTKxuehH39MSiYGngYFolnAtClHoQdih24dNDUHhTo8jUTEQCYQ84CTgS9STYiVqprcY5WMMRHHTg1CI1HVu3QeQcfElJioM40AjMzOT9ij0siJRcRGafPAsBnojsdAdhVm+hsIiV6AKcme5dXPdsn7o+wj2T3FRJ1WhcnNzGTJkCHPnzmXo0KE1EhVDhgyhd+/eTJgwoczy119/nenTp5dbv6CggDvuuKM04fzOO+/c8xMwDMMw9iviiYoAzQwegX7YX0D5Di1RiEEL5JXId4+XUazzKtScLhvogLwaLdy+ctzrjZBI+ADNMF6MjIDjUaWn9u7/FchbciRwJfC5G4vvXQHyWHwNzEPhUiYqDOPAY9euXaxDBR4eB3ahcKbtSFRkUFZU9EYipDdhU85hwIVoMmIkoahYiL6r2iEvamfUb6c/KhLRDgmb/YE68Vhcd911zJ49m0gkQps2bbjpppvirjdy5EhGjhxZZtnSpUtZsWIFeXl5ZZZ/+OGH3H///XTp0oWMjAxatWrF2rVrmTVrFlu2bKFhw4bcd999DB06dG+dlmEYhlFHHHHEEeVmCzejxncPIrHxkFv3cvccQSJgM/AR6jOxG/g1EhCvoDKRA5FA6EIoLj5xr29HORsZbvlwJEamIC/Jk0ikTEQeikEokXw3cBzykPhcihkzZjBw4EDmHqCNsQyjvuMrzz3o/r/EPQ9BYU1TCMUChJMLIHHREBiBcrB87aWFKGzqUOQJBX0ffYs8su/hqt7F9MOoK+pEWOTm5gK6AS+//HKF66WkpJQTFhUxcuRICgsLmTFjBtOmTSM3N5fExES6dOnCD3/4Q26++WYyMjJqY/iGYRjGfob/QfezhecjT0IrlD/xJEqyjhYXzVCexb/cNknAz1BCZRPk0n8TlZrNIBQXK1BllwbAr1CZ2CVo1rAVmnVsCfwXiZeRSOC86Pabg/peFCGjwIuKAQMG1Kg7r2EY+x/xxIUXBBPd81nlthI+jKgqUbEciYpn3Ov7U/PMGvexqI/UpH6vYRiGUXf4krI3oITIH0S9tguJiwXAj5G4eBr4KwpZuA55PAqAPigPYwJKojwPiYv/Am+hZO4RKAyhEFV2aYNETBLKy/jYrX8KyrG4H4Vb+dqEvr+GFxWGYRw8+GZ3tyJxsY0wIXskFYsLz/4kKvZqHwvDMAzD2F/xs4VPuP+vQt4D0A9etOdiDoqH3kiYVJmHEigbIpFwgdt2AkrU/hSFMd2ESsPmutcTUQJ3dGjDyUjEvEUYblUQM04TFYZxYJGTk0P//v3ZunVrpetFey52o6ac1fVc7E+ioqaYsDAMwzAOKqorLt5AORJNgdnIw3A+CnFa5tbvRCguPgZ+iLwZq1HYQneUdJmDwp66IpGRiOKpZ6KwhvEo7OoolFfRuHHj/dIoMAyjYnJycujWrRvtkEci9jO8cOFCMjMz2blzJxB+Fz2PvJvtKS8uznTPvtz1DlQtKoEDT1RAHVWFMgzDMIy9SRAEzAeeQD/E0eU+vLg4FHklhqEf9feRZyIBJWAvQwLCey7uQlVYVrt9dEKhTKDu3auQAVCExMMEJCpmuNeHoqTv9UBWVlbtnrBhGHsVLyoyUFGHvkhceBYuXEifPn1oVVRUutyHQ12OcrW+QxMRJwE9kbjIQ8UmvExogprnlaDQqSI0cRGgPhf7s6gA81gYhmEYByl+tvBlNFvYhdBzcR7KkxiAvBb/D/3Av+Rev5hQXOD+jiDPRgtUIrIzKgG5HImMFGQAfIQMgrYodKoZEi/3o9CohQsXkpaWVvsnbBjGXiFaVPhy1o+j3jORSIQFCxbQp08fuqDPfRJlcyxuRN8vm5FH9EtU8OFcNHERLRMi6PsFJCK+RN8ty936W4jvLdlfMGFhGIZhHJT4H/YrUAiCb4aXhfIreiNRMRolYntixcV89KO+FkhGoVCdkQHgn5chcbEMGIMqS21EDe9mA3cjEWKiwjAOLOKJCpBYAIkLLyoykGfyN8ijcWvUehEUIvmRe5wHnOGWR3ss/LrR4gL0neJ5hv1XXJiwMAzDMA46YmcLQcKgADWrK3T//4SyomK0e/biYjCaadyEREVfQjHhgyA6uecXUDhVRxQnfRZwAvAsYSUoExWGceBQkajwXI++S55CleBuQRXnUtB3z3UocXube3yBvJnnAqcTfoccTOLChIVhGIZxUBFPVGxGP/DJqFdFe5SI3QGFL6VEbT8aWISMgE5olvEQoBcKc9jllkXTAiVWrkVCJQ2JkMVoZrI9YdjE/mYIGIZRnqpERQn6fI9AeRPPoepyXVHVuOuRUChEouJT1LfmPCQqvOBojPK6DhZxYcLCMAzDOGioSFTsQMb/BtQJ+xQUppSARASE4uI5FP88EomDXCQqOiFRUezWixYXTVBC58Oos/ZxyMvRHoVHZLr1TFwYxoFB//79aY9CmyoSFQkon+JqNBGxDomG85CggLKiYiQKfwpQ+eolKIm7J2WrKeUTNsmLoO+fPFQcoi9lxcVY9q/vFKsKZRiGYRwUVCUqWqIf6OGoDOwsZCAkIXGRg0TFFNTTYphb/2SU+J2AxEQjJC6K3DGKUbWXAW4bXwmqG/KI+FnIG4HbKF9NxjCM/Y+tW7eyDvW6eTzmNd+/phHqXzPJ/X0M8LXbpgDYioTFe0A/JCpK0PfNgqi/l6Lvk+3o++dnwFR3jN1IxKxFvXPaIK/IIcgzOghVuNtfvlNMWBiGYRgHPJFIhPZIEFQkKjx/QcbAWGAaobh4EhkIw9EsZVMUOtUo5liNCMXFd2gmcbF7XILCIGYAryBDIdcdw4uL01HX7eTk5No4dcMw9hJBEDAPeJCy4qKBe26ESsa+C4wCXkae0DFuWYC+W04lbMiZjRrgNUdlZ9ujMKclyNv5KhIOE5HIWIJESDNUOCKCSluvROGXi90xAJYt83Xs6g4LhTIMwzAOeGbMmMERRxzBe0gwnEN8UQHwNKrYtAV5KEAiZCFqkHeeW+YTLpvFOZ4XG/koPGGt+z9C2FBvHDJARrn/26BE7vdQL4sl1svCMPZ7ojtogyYHWqNQx9dROdhL0YQEwF+BXyJxARIVA5EH437gaDS5kI6+P5qjEMvxyJN6AQrV/BB4FDgCeTr6Ig/IXPSd8xUKv7oUCYv7gR49erB06VK6d+9euxehBpiwMAzDMA54BgwYUCou/olExUXEFxWPow7YX6Hch3+jBMkfoRjoRORhgLBErRcXEeShaAHsRDOHG932JYR9Ly52677g/h8F/BflYMwFlixZQo8ePfbwrA3D2BfEiovr0ATBF8hLOQx9/hPQZEK0uPjOvb4TmIcmMY5HgmItmtBYikrQbkKTGQEKgZqPJiGORt9lG1Do1DfAdDQR8iN33ATgX9S9uDBhYRiGYRwURIuLR5Bhfzn6AYeyouJht+x4ZPD3RLOKEcKwJR+o5MVFMgqVeg04E+VfrAd6oByMXW775W79i1DYwyfoR/95JCqmTJliosIwDjCixcVW1A/nSOR9KIlaz5eivguVlX0EhSt9ivKutgEPIRHRAX1fvI48E18CTyCPxOeooEQ+8BjwLfqeWYZETV/gBsKcBp9gXtfiwnIsDMMwjIMGLy7mIfHwPApBiCcqHkd9Jzai2vLjkTjwtVUiyDPRDP24v4Lin1uhEKrXUTJle5R42QiJjS7IWHgK5XAcCkxw2/UCzjrrLBYt8rWoDMM4UPA5F5PRd8kslGSdgMSFf0xAIZWbUQjke6gnzli3PAe4zy2fj74z3kbiwa9/PPoOuQT1wfkX8CZquHka8sqOoayouRqVxs1A4qIuci7MY2EYhmEcVER7Lh5GoQafoKTKaFHxAPIogMKb3kPJkedQtgFeC+ANlJh5HPrR/hr4mFB8tI86fg9keDxF2CfjJOAqd/x8IDMzk++++64Wz9owjL2Nrzw3GuVaPIYmHEBezF3os/8oqgKVg75PTgO6o++LS936LyGvxLHI8zkMhUSdB5yIPBINkOczQJMkX6EGfCOAfyBRczZhk06QuJiDPCKZmZkUFBTU3gWoBiYsDMMwjIOOaHGxGf3g3+pe86LCN5xKRx1z81ByNcgYABkCryLjIRNVaylAyZRN0CwjKIfCi4txbj/foWTM5sij8TqKo14PBCYqDOOAIl456x+551dQTkQREhunou+ELOQJbQakunU6Ia9CL+BvaNLjdOSZWIS8He3Q9xKEXbx7o8mKTW6d6UB/lLsREE6EPI6+l9YDwT4WFWDCwjAMwzhIiRYXoFCCOehHN1pUeENhN/B3yoqLV4EXgRPcYz0SEOvQj/oWJBZAhsTHqDRlW+Sl+Br1wXgPeU58NalFixaRlpZWuydsGMZeIZ6o8PwITVzcj0pUn4SEQg/3AIkLkNdyN/oeSAQ6osTsH6JwqBRkmL/q1h+Jwqwao++j+cjT0QxVjzodeTlAHpEnCCdN6qphngkLwzAM46BlwIABioueN49+/fqxCYkDoJyh0AA1pvLiYh6qvjIIhUBtcNt0RzOL81HIAkhcrEEJmvloVvIb4HpkEHREP/oN0axm7969WbhwoYkLw9jPqUxUgCYMIsjYPwVNOKxHYZa9UOUmKCsuXkf5Ejcg74bvi5GAQqKgrLiIoKIR7yPP6hWoMpVnKSpC8Sx1KyrAhIVhGIZRD0hPT2fu3LlkZmbC7t2kAddS3lCIFhf/Q6IiBZWFzECiYhcKiYog42Go2/ZjlEjZGCVb/hjFP28BzkKGxxMoORNCcXH44YeTlZVF7969a/28DcP4/lRHVLyPch9uQd2wdyCP5UK3Tqy4WI4KRpxL+YpSvmRtrLgoQSFOG91YokVFZyRS/oNExfz586lLrCqUYRiGUS9IT08nMTGRdig8qS9hadhoEtCP9/+hH/T7CUtD7kTCIhElevdG5SOHovKSRyKPxMVoprEAiZXmKKnyx279du74vXv3pk1REX369GHhQm+KGIZR17Rq1Yr2KPE6nqhYhLyTnyMvRQvU1+YwNCHRAYkLXyDifJRHsRQJijPc8mhD3FeVAomLfiiv4kHiiwpQUYkpbt0MoG/fvixYsKCmp1trmLAwDMMw6g1ZWVmsR8ZAFvrRjxYXAUq6LkGhB6+inIrPUG7GbuTqb+jWTSMUF9PdPn0iZq5brw3qjLsNuIxQXOxG4mQkMgpMXBjG3mXBggUkJiZWa91Zs2axDlV5ejLmtW/d82HIKzkBVWjqivIstqK+OPHExb2oEpQXEL65ncf30XkT+AB9j/jJi1hRsQV9RzVH3pChaNIiMzOzWue4NzBhYRiGYdQbUlNTyc7OZi7qRREtLryo2I3qzb+Aki/HIg/HJOBDwpr1uG16IY/G2yjB+59o9rKBezRFwqKEUFwch0ImLkWJm9di4sIw9iYLFiygb9++HFJcTCQSqXL9lJQUQF7HX1FWXPitE1Fe1UpUqGEDobDYSnlxUeC2SSYUFBH0PZFA2EPnLRQ2uQl9N/zKHWNMzBhbAxe6/V5MWA2qqKioyvPbW1iOhWEYhlGv8OKiV69ePBfz2qFIUDyFQp5WIQFyLjIYJiMD4HS3fgKaqfwEVYe5jrADdwOgECVbtkQ/uMWoxv17qMHWb1Glqv7AbSjkoU+fPixYsMByLgyjlvCiIgPN6r+N8icqS3L2+RXDgHeRcQ8qyNAKNb/7LcqvuAF5Hye6dY5GwgIUHrkZhU41RmKgGeW9FKDvjKkoETsXhVRdG7WeTwCP9lxcjDwXdV0NymPCwjAMw6h3VCQuJqJZwQRkPOQgj0USCnloiCq6gMTFWyjG+XTkeYAwVKoZMhTykJHRGDW5ehzVtP8vsAKFVXRGFWUaorK4Ji4Mo3aIFhU/QblOvpdNReIiNmn7aeSJjBYXf0Wi4gIkLFq41ya654HI4J8DbEdFICoSFb4PxVvIM3qWW/YKmuS4lvLVpby4WIFCMq8CnqnknPYVJiwMwzCMekmsuFiKKjstQs3vegC/RvkYPgThbPf8Ogpt8DkVPyQMj/AEhMZGHjISnqCsqFiORIWvd3+1ezZxYRh7TjxRAWEydqy4yMnJoVu3buUqQV3jnr24+ApVgxrm9unb0J3lniegkMrmyPPQDX3Gm1JWVASUFRUTCTt170YTE15IxBMXw9B3SBc0MdEizjnta0xYGIZhGPWWaHGxCcVIg8rK/hjNCp6BQh6ixUUJClkYRigqooWFNxhA8dTjUYx2T9SZO56o8FyNZjk3oSTMnTt31sq5GkZ9oiJR4YkVF8uXL6dbt26koT4RsZWgosXFOJQj9Qn6XhiKErcBhgOzgYeB0aj6W3ckKgLCkrLRoqIQiYoe6PumCBno0UJiN/KM+GXjUN7FVW7/8c6pLsSFJW8bhmEY9RovLryo6IoqNV2Dfsx7AX9CImAM8AaaHfyley6M2V+0yAhQffn/IBHiRcUyt78uhALE8zhhEqaJCsOoORWJit3II+m5EeU29YVST8WPUP7TYrfOIrcd6DvhTsLJgEKUvP0R+lwXoNDID4BjUN5FF+S5aERY+CE6/CniXj8dNdV8ComKRPfaBSiP4k3kyQQ4DzgCmOHWi+ZGt692QPPmzatzuWoV81gYhmEYhiMdiYUNqALUqZQVF6OBF1GoQjL64c932zanrLEQQfkTL6DY6ntQzsYqFG99KKpNn4xCGCKEsd/7QxKmYRyoZGZm0h55EqJFxf9DidhnAH9xy29AoYoT3Lo3oM/pMuDfwJduP/eg0KRrkADIQ0Z8e8JGdg1QYYaTUcW3Q1CluR3o+8F32I72WhShog6D3LoTkHHucyiKgDPdNi+6ZanATCDTbRPNWDeG9cDSrKzqXK5axTwWhmEYRr1m8eLF9OrViwzU5O4BNOP3ChIXIKPkBTTreAlKxN6JfuybIXFRgMRBdBhUG1ReNhcZNatRUvihaLazsXvOR6LifsK+GsuXx2vfZxhGVRQVFbEOeAd9rryo+Bx9tj9G+VMB8kwMRKGKN7jtU5CBPw2Jis/d9rtR48z/oZLRv0ahSBciT+Z7wPFICHRCYiIZiZACd7wG6HuiBAmYD1G+VqLbzyUo32IMoeho6F47GomYvyMP6PXoO2WZG/dY5NWYAyxdupTu3X2Q1L7DPBaGYRhGvSbe7KaPY34JGROfImPkR8AIVD4ykbD53S4kDiLIkIj2NVzhnp9EYmQkShRPRYZHIao68yiaZfwRMlC6d+/OsmXL6NatW62er2HUB4IgIBKJ8G/U7HI7cBMwClVPGgPcjMKMeqCQJc9vkEfgRpRDNQEJlKHoM3o48nisQx6LRkBb5HXoSziB0Ivw+2Er+l7wnotlyDO5FX13NEYTDhe4MYxHouIaZKxPQSFXq9F3TCskgCJoMmIS+h6pS1EBJiwMwzCMes727duJRCKls5s3Inf+cDRj+P+QiLgZCY5cZAS0QkZDMQp18j0roLy4GO2en0RC5UIkTpJRTPZY9//FwH3As2jm0cSFYXx/Fi1aRFpaGg1RrsUot/xK5DF4HImCB6K2+Q0y4K936xWhnIbvkEcRFBaVjMTKGOTB+BHKqViIQiDnIwHRBVWZg9BzsRNY4P733osF6HunN5q8KEbeiQYoX+IhNCFxMfKSjnPbXowmIp4C5lK3ogJMWBiGYRhG6ezmg+7/y9GP/RxkeIxG1aByCXtceFGRiH5MWyEjJJ+wrKQXFwmEnosxyFi4ENW5f869/lcUcvEKMmjAxIVhfF+ys7NJS0sjA4U4nYw+b63RZ9v3nXkaiYk/U15UgD7/24DByPPwHGpk+XckEj5EDS5HoM97R1QVKht9f+xCHpFmyAP6HGqa1xt5NFJRA813gHluH13RJMYKt/4ulI91HvA39P1xNwrPXIxyPtL8sXr0sD4WhmEYhlHXeHHxAPIqrELlJG9BP+hbiC8qGrntl6Mf1ZaECYw+58J31o0WF6Cwh7ORmJiHZh9fdq+ZuDCM70e0qPBVoZahiQGQuGhIWXFxDvqMR4uKAE0UFCIPxTWo0MKjwC+Ae1HOxrsox+Js9L2xHoVJbkNVpRq55/8gEZDt9nUpYQW501E1uPnuf//9sxx5OK4kFBUgYXG3W2eg+/tnKLQqMTGRoqKiml20WsKEhWEYhmFEsQpVg0lAguIKZFg0QvHRzVCVlwaEP6KPo67ao5Fh4qtDeaKrRV2JjIx33P6PdPvxTa8ykIFxKmUb5pm4MIzqES9vqjv63OUhkZBAWXHxIqrEFC0qdiGvwSEo/DEBhVNloZyHM5A4AHkj1qGJhyZo8qEPCnH8DxIA69DnfQUSI51RcQf/3XA68AVh4YjlwBrCBnteVHjuRhMSvd0xfDWo+XVQDcpjVaEMwzAMAzWUao+Mi75odvM9FMtciAyF5uiHsxHyahQjUfEIEhv/QfkSO9xrIIMhgVBojEfJ4EOAE5HRMgB11v0f6s59JppVBRlGQ1GcdWZmZoXjb968OcuWLavwdcOoL+zYsaNMVSiQx8J7FHei/AhfcemHKOToCuRd3ISExUTUEG8KMtjXIe/GR6gqVCMkEo5CYuI+VElqKxIyPdBExbtuvQuRgLkVeUeeRuFMPnAp4o49EYVTrUTV4Zajhnv+XKJJR987vhrU/Pnz6dOnTw2vWO1hHgvDMAzDAGbNmkX//v15D4VPzEPGxKPIADmdMCSiETIGxgCPoVjpl4DbCX/8/UyoD5UCiYpHUElKX1VmC5q5HIASRJ8GJqNykhFkiPiGeUF+PvHwoqhHjx51nrxpGPsD0XlT21EORBv0eSxxz7tQXlQimjQoRtWV3kQVmnJR/sTnaHKgAH0+T0bJ1KtQcYev3d+HAF+hSYjDUGWp8ciTOQx9T0BYde4hlHTdCH1fvI4Sydej74Xly5eTkpJSLgcsuit4dInZuhYVYMLCMAzDMAB5A7y4uB2VcvwpCkd4yq1zuntuikpWPgb0RF6NNShnYjcyPqCsuHgRiZRoUeE7cPu+Fle6fT+KRMrRaKaysoZ5kUiEdNS07z1MXBiGxxvkT6HP1WXo87kNlYktQt6LYvQZfQ19hjoB76PP1K2ESdfzUc7FWiRKHkNVoE5A4uJYJCCeRd8H05EH9JaoMW1BHk1f6GEL8lIuQR275+MmEWI+7/HExf4mKgAigbX2rJL8/HxatmxJXl4eLVq0qOvhGIZhGHuRSCRCX2QMXItm4H4NfOb+Pw15FB5GguAtFOawCoU//ADFPn+BEjR/iAyTB1Gs9CvEFxXJ7gFKDH3N/b0R11wrzs+1FxW3Ut7QMHFh1Hf856MzKgH7Q1T+9V2UaH0OMvC/Q+FO7wMnoUmFFYQiIwcJicVu/R2EfWyGoQ7YW1Ao4zDksXgSiYV0wm7aW1CY1CQkJnyidgfk4VhPfFER75z8RMK+EBU1sYMtx8IwDMMwHF5U3IbirXcig+IvqPnVGBRz/ShK5jwEeStWoaTNEjQzejeavXwa+B1KBi90xxhLKCpK0KxltKh4ARk4uSiR++8o5yMSiU4HLy8qQPkYP3Hb9ejRw3IujHqL/3z8GHn/GqA+FH8H+qEqTpORp+JtZKSfiERFd5SLsQz4L/JuJCBjvgPycESAU1CoVB/UgfuHSLTsRB6O36GGduehJO6tlBUVQRAwf/581iIPR1WiAvf6POQB3Z88FR4LhTIMwzAMyoqKG9yybchI2I28Fn9E4Qo9UXJ1PjJWrkM/qB8DZ6HKMXcTlo9NQuES21Fc9VVIjExFM6TXueO9gDwOy1Ec94So8T3gxuhDImJFhSe6kpSFRRn1kcTERNoj70EKEuqtkfcgB4mDnihJ+gtgAwpRTEGiYgYKdTwSeR2ao4IKn6Jys4egggoDUN5ELzRJcJo7/stIeFzu/r8fhWGdjsIqo0Mb+/Tpw/z588nMzCSoZonYIAhITExkflbWfiUqwISFYRiGYZQx1G+IWt4MiQtfPeYeZPS3QaFRbyLPg/cqDEGznxEUMrEJGTP3o664U1ADrQdRY6xhbj8JSHw8jOK3D6WsqPDiwYuLikSFx8SFUZ/Jysqib9++vI1CmgqRgD8DeSHeQaFQxyIv5EkoTLE1yot41r12CeqI/Rz6TB/h/h6IvBs9UWgV6DPsxUUCYfnoy5Fn4RXUvyKH8l6JPn361LjvRF31qagKC4UyDMMw6jVNmzalPTII4hnqzVDYk2+G9z+UpL0RdetdhUTBQOBcVFLyP8CfUH35/0M/tqe79Tu4/R2FjJtLkIj4LRIbv0fhVBfGjONG5E05HomKq1BIVgkKs4glXpnaSCTC7NmzadWqFTk5OdW5PIZxwNGnTx/++Mc/sgB5B45FoqIH+lyNRCFJ65B3cTPyTExGuRFeVHRG3wujUX7FBuRdLEHexy7oO8GXk05A1eKGAYejSYb7UZjVNuKLioMNExaGYRhGvSYrK4t16Md/LGUb24FyLCIoRnsM8jYsRAbD0ShM6hcoVGIe0BaJhiS3XTLydjRFoVSzgbtQ+cuFKKxiIBIbs1CC+C3I0IknLl5DoqKz298hqOTl6zHrPk5YpragoKC0JG1mZiYN8/Lo1q2biQvjoOTVV1/ld7/7Hb3RZ+hs5JHwacdnIHExHzXMOx7lUryP+lNciD5fHi8uVqDP6UXo++JNNOHgG2A2QIb1ZPRZboY8HJ3QZzRertTBRp0Ii/z8fH77299y1lln0aNHD1q2bEliYiIdOnTg3HPPZcqUKd9730VFRfztb38jMzOTZs2a0bp1a0455RReffXVWjwDwzAM42AhNTWV7Oxs5qDQoWhxsQsZDdGioilqXLUMGSp3oOpPA5F3YDWqNvNTFEbxJ6ArcDOqa38j8iYMREmhq5Eo+QX6UT4SCYeKxEVLZPS0Q8miHYBvUJiVFxePo7ApH8vtw6cuRomrP0DeFBMXxsHGq6++ykUXXUQ68iwOQ56KZsjTELjn05FXYiEqFduIsJN2W8IGl55h6PP2HkrC7uf+/pbw+yJAn8PXkOD4HOVfPI2+E3zzzYNZXNRJudklS5aQmppKcnIy/fr1o3379iQkJLBkyRKyXBvym2++mYcffrhG+92+fTvDhg3j888/p1WrVgwePJjCwkI++OADdu3axV133cV9991X4/FauVnDMIyDn8WLF9OrVy8yUGWlKwm9FU8TioqPkHHQAlWbuQIJggTkvchDFaJAsdqPIWFSgkTFNe41/+Ob5/YVQQmj/3LLp7v/H0ax3K+iRHIvKm5FM7GTkOhJcuNq5MYYKypuQo253kTCowTFni8kbMRlGHvK6tWrOeqoo1i/fn21t0lOTmbbtm01ChNq1qwZWVlZ9OzZs3RZVlYW/fv3pwMSCaehxOt0JOJLCMOVvHfhTZRsPQQVXmiN8qN2oc+Sb4bppcADKEejORIhI6LGNBl9ThOBL1G1qCdQmVncPmKrQh0I1MgODuqAgoKC4IsvvgiKi4vLvfbBBx8ETZs2DYDgrbfeqtF+b7/99gAIMjIygk2bNpUunzZtWpCcnBwAweTJk2s83ry8vAAI8vLyarytYRiGceCQnZ2t3xEInoKgCIJHIUiHYKD7/xkI2kPwEwgCCIrdcv/Y5R67ISiB4DEIjnfPgXuURL1eErP8JxB0gOC/btkTEGRCcC4EP3BjewKCtRAUuP3cD8FREAyCIFV2UBAEQYAb+0Nu/Xy3z4fd+RwFQTe3/vLly+v24hsHHHPmzAkSEhJK/1+1alUABO2i3oOxJCQkBHPmzCn9HwgOg6B5JdvEgvsMAsHixYuDIAiCWbNmlX520yFIg+AfECyH4GMIVrjP53fusSvq87cSgqVunXy37DsICiHYGbXeFAjOg2Cw+wx+F/Xa6xCMhuBad/wLIciN2m/g/l8Gwb8h6FuD861ramIH10koVHJyMoMGDaJhw/JFqU499VQuueQSAN55551q73PLli08+uijADz66KMceuihpa8deeSR/OIXvwDgL3/5y54M3TAMwziIiQ6L+jfwc+SpWAb8CM1ejkbegXfQzL//IS2J2VcEeQRGoPCqEbhGd27d3e45OijiCbffQ1F4FMD1wKkoX6ItoaeiOfKgTEQVp45Fs6ytUFJprKci2W3zOor/Ps4d+xC3fm2FRS1fvrzG3v0WLVqwfPnyPT62se+YO3cuGRkZHFZSQiQSYfXq1XTu3JkMNJNfUe+VtiUlZGRkMHfuXCKRCL3QZyMTeRWqChPy7+tLUL+W1NRUlixZQv/+/WmPChZkoc/CWNRXogtKnF6HPsOgz1+APIa7UB5UMfIuFCKvQ4JbFiDPxhhUYvpSFNrk9+U9FUciD2AJoaeiIWHlqNbo8zkCVZ87GMOi9svkbS84kpKSqr3N1KlTKSoqokuXLhx//PHlXh81ahQAX375JWvXrq2dgRqGYRgHHdHiYjwKWdiBhMZY9MO5FFV5eRBVkfFhFYF7JCDjxHfUTnXPhUhceBHiBQZIpDyIDJGPkEFTgsKwnkXhULcRioomqPrUWyje+zskLvzx+6BSl9Gi4jkUA94PVbXx4qItobh48803v/e1W758Od27d6eZSxavDpFIhOSCArp3727i4gDBiwovInpDqaj4CWoieRtlDWffJ+Z8FJqUkZFBL7f9r4GfufVjxUUkEiESidCsWbMypZb/SdgMMjU1lcWLF7MOCfObUYnXHSh0KVpcrCBMtM5H+RJNURno5ijccKt7LkHi4S1CUXEJ+gweQihMXkMJ2pehKlMN0WcvWlR4WrvjnY3yNtpB3In2A5X9Tlh88803vPTSS0QiEc4555xqbzdz5kwAjjrqqLivd+/enTZt2gAwa9asPR6nYRiGcfDixUVekyYEQcCKFSvKJHcnoGTrbEJx0ZBQXBQgEdGMsKN2Mqook4eMel+msoSyouJjtzwRCYe/Ig/JUGSMJCJR8Utk7JyIDK2RhOIiBTXYmw98ggymX6F8j0Fu/YvcODoig6k9cBhw5plnsmDBghpfMy8qKpuxjiV29tnExf5PtKj4Cerh0gWJVV+YAMLyyP590Be9R71nrRe671eh9+uIqPW9uIhEIjRGwrdo+3Z6oU7a8TrNe3ExD3kQelJeXByG+tCsRZ/RrcjIPwyJjYbukYiEfSOUoB0tKoajAgr+s5vszms18mpcjrybK1EOViz+8/82qkK1Hkrziw8G6lwi/e53v2PlypXs2LGD5cuX880335CYmMgDDzzAscceW+39+C+iLl1itWFIp06dyM3NtS8twzAMo0pSU1PZvn07oN+WFStW0LVrV/6FDJa3kfExD4kCUHhDvnt4L4FPzyxChkozZFgkuNfHIMOnEJWiBQmQF93yvigR9F1kLI1E5WqnoQRzPxN7htt2Agor2Y7K3r6HklPXoxnVHii0ayjqwXE/oWdlOBIiffv2Zf78+dXu6hstKn6CDD5fmcp3C48lttHfWCTcunfvzrJly+jWrVu1jm3sO2JFxRUoXLDQ/X8Kek/5Uq03IsP+STRT3x94BL33L3EP/xmJECZCP4A8eQXI2zcINZLsjTxxu5HHAco2g0xNTaUvStp+D31eliDhcb/bbiCaEOiOiib8wB17K/rMtCAUGO+gz1MBcAwKf2qFPvdeeJQgwZ+AwqFAXgtQn5nzCJtd5gG5SIA8ikIY586dS3p6egVX/MCjzoXFpEmTyii15ORk/vnPf3LNNddUslV5CgoKAFUJqIjkZM0b5efnV7qvnTt3snPnztL/q1rfMAzDOPiJFhebkKEeBAHz588nPT2dB5Hhch4SD40IjY+iqL+TkfAoRKFJDyPPQh/gBdTHwouKLshYaYhmcR9GRsoOVI72UjQzmuPGOBz4AoVS3YiExL9RKNUpKN9iNRIW65CR1A8JmhHIi9HFjaO64iKeqIDy3cKjxUW87uHRBqKJi/2PikTF5+geXow8ZlsJxcUq9L5KRMLxD0hEeE/FTpQP0RF5MLy42AX8DX3GLkbC4ghUJe2XqIzsTygrLj5CRv7lyJvohWpPt+0HbllryooKUC7ENuQJTHSP7Sh/qSfKd5oMfIXEBYTlaBPd89nu2YuLy9yzFxfPcPCLCvgewuLnP/85kyZNqvGBxowZwwknnFBuuQ9Lys/PZ9GiRfz73//mhhtuYPz48UycOJHmzZvX+Fh7yr333ssf/vCHfX5cwzAMY//Gi4sjjjiCYLP6Xfft25d58+aRnp7OC8hz0A4ZTUVIbOxGosJnDrZA9e9fQKICZPA/hvI3PqSsqADFhjdBxtQdSASUEMZwL0czrBuQgOiEjK3uSIR8g4yjc5ARNdctK3Kvt0MdiP1sa3XERUWiwhNPXMQTFR4TF3XPvHnzyMzMZNeuXSxdupTMzEy+/PLLMqLiavS+fQeJ2XORod0QeSDykacgAQnpy93fLyBP2XnoM7EWFSpY447dC4mTHsjblo08dj2R1+Aw9L6e6tb34uLvSKhfi/I3thGWdf4HCi8cgErKdkN9Zbyo2Ep5UeEfI5GQT0UCZ6Lb5hzCkrQB8jBGqFxcjELlbA9mUQHfQ1isXbuWRYsW1fhAhYWFlb7eokULBg4cyAsvvECrVq145JFH+MMf/lDtvhNegGzbtq3KMVRVreJXv/oVd955Z+n/+fn5dO7cuZItDMMwjPpCly5d2OxEhSc9PZ2+qGKUN4O9iPD18GPLkXRz629FlWT+jsKdXkOhTNGiYjcy5rogQ24wYfgSaHbYx2xfhgy9RcBiFD4yAs0KT0RG1CAkZBai2d/RqEnYCmTInYkMp9epWFxUJSo8seKiIlHhMXFRd8ybN49+/frRjjA/ph1KtPYVl65GgnYr8rK9h8TExSgXYpfbl+8239gtG4FEdwkKH0x023cgTKoOkCB/A70/b0TG+FtITNyA3s+vEIqLEuRJOA2J6E6En4tLUBjh58hjd407H+9l2Io+D80JE7obEoZmneWeX0ei4Ww3tjXoM9cNfSZ9dbeA+OJisft/IfIWHqyiAr6HsHj++ed5/vnn98ZYSrn66qt55JFHmDBhQrWFhW/ss3LlygrXWb16dZl1KyIpKalGFakMwzCM+ktFxnIEiYlGhCEbsdzo1nsAVcbZjEKZBlJWVPwShTwNR+EhnwI+BqAZMnamoZnho9x+mqOqT63demcgA+9VNNu8Chk9R6OO4GuQkbfBvZaBjLh/EV9cdO/enXaExmZl+OvyHBIxFYkKz9Uoj2UTkJmZaSHJ+wAvKjLQzP5b6H10KRKt3kPxGHA4sBG9TxchcQF6T25A7+kOSCDvQl6I5ui9ugJ9JtoiAfAtMvTbIm/dlyg0bzSqIPWGe4xEgrcEeetAItyL7u5IPPeOOqf/IvFwIxIB2whFxS4kzpsgD+I2t8yLCygrLsYjIXSkuza93Xnhlnsx4sO5lrjrttldn41IrB/MogL2gxyLePg8iY0bN1Z7myOOOAKAadOmxX192bJl5ObmAjBgwIA9HKFhGIZhVD0D72dBK+MG9/wACgvZHPP6YhQffioKcXobGXjFyADcgeK2Mwh7ZRQgQy45aj+B20cxMtTOQ56N2e4YPwDaoDCNIiRSoKyBX1RUVHre7ZDIeRslalclFm50xzysivVw+3vb7T8wUbHXiRYVd6CKYl2B55HhfRt6f85DoUU/RJWRvgJ+i94fryBDfSAy8Neh90435LVoiozt9ShcqRESFY3ca4F7fQryPoxAwuAd9L7xfV0S3D4vQp+vj90+ByER7d/7z7t9neMey5A474IETkP0+diGxEcjQm8LlDWQT0T5ERNRWNaphNXbEtBn3OeIRNDnMQsJlrFITNUHUQH7YblZgPfffx+AXr16VXubM888k8TERFauXMlnn31W7vVx48YBMGjQIDp06FA7AzUMwzDqLdUJ66lyH+75BmS8tUXiIprebvmbwCRUo7+H+/srJAjOQoJgKjKq/Gxw9HF8CdthwD3I0zAPhWc0QiEisaIi2sCPFhV9kWHXD81kP+DWrYpYUbEbiC1s66tJzYe41aSSkpJYuHBhNY5mVId4ogKUXP1jZECvRO/PPui98Boy+C9BnrC+6H33LvJY9AHS3N9L0cz+5+het3frFqL73xSJlG9QblGaW+8+5Bk4Bzg9arwlSISkoNCmy1Euxgz0Pt6GjPm30efiTCQKNrnj7CL83LVyf69Exv9cN+adhPkTvvfMULe/Bci719M9l6D3ajahqBiPxMvnyJNyFfp8HX744RXdhoOGOhEW48aNY/r06eWWB0HAa6+9xm9+8xsAbrjhhnLrDBkyhN69ezNhwoQyy1u3bs1NN90EwM0338y3335b+tqMGTP429/+BsCvf/3rWjsPwzAMo35SU1FRmf+9KnHxODJ65gNPoRniw5FAeAcZUOei2O6JSFxE/7hHd5JIco9maAZ3HjKMehOKiouQYRXPwPfnfQvKq7iJmosLz25Uvecy1GPDn2tloiISiXBIURF9+vQxcVELVCQqDkFG91XAdUhc5KD3elP0njoJvQ+nIZG7Ec3yv4k8BR2RSNgETEeCpB3yXmxEnrnVqATrFPc4C+Vq9EdC4TwkXHwBBC8qQMb+d0h0HIHCA0Hv3ffdPs5Fn50FKPeiExIZWwlLzL6Nqlu9gLx5a5G4KCIsHR1xxxrq9vsRKoTQEHkNWxF+PqNFRYo7p484+PpVVESdhEK98847XHbZZXTq1InDDz+cVq1a8e2337Jw4UJWrFgBwC233BJXWCxdupQVK1aQl5dX7rV77rmHr7/+mi+++ILU1FQGDx7Mtm3beP/99ykuLubOO+/k7LPPLredYRiGYVSXJk2a0B6Fa1RHVDxO1bkFPvHzBuRBGI96YyRS1tD2CbUdkdHXHhl1CYRlMCe65zMoKyo8SWgWeQEyntqgHIsAVa4pQb01HiS+qPBiKo+wd4avdPOA+7+q6+JFxZfIMPyf29dKKhcV6YQ9Cvr06cOCBQvo3bt3uXWNqqlKVIDeCyPd32OQwdwJvZcHIYN5IcoLao8SpF9AIUxHo/dpMQpxSiYMOSpGnoDD3GvPIYF6JXoPBMgL0pewEd1u93eEMD+iIXrvLHDjbOPGPhx5T152/yegsKSOyMuQiwTIFFSKeQH6TCQgIbDOve4rRX3nXvvIHW+1O/404CX02XvaHS8FhUGluH09iELC5syZQ79+/eLciYOLOvFYXH/99dx22220a9eOGTNm8Morr/DJJ5+QlJTElVdeySeffMJDDz1UZcfOWJo2bcpHH33EvffeS8eOHZk6dSpffPEFxx57LC+//DL/+Mc/9tIZGYZhGPWFrKws1iHjdmwV6/oZ+M+QgVHZjH7E7c8nei6gvKhIRwnZn6NZ2fNRbPdENIN7DqHn4s0KjpOLvBVJaPZ5pzv2Ce75KeAhVL62dGxxPDQtkSF3BjXzXESLipuBe1FPhOqIiluBfxJ2W+7Tp0+5CIb9gdmzZ9fYholEIsyePbvqFWuJzMzM0kpPFxJ2kW7lXi9B4UnvIwP/OrfsVhR+9CZ6r0xFosKXOG6PREcRek/+AOVUfIf6RcxDuRXdCcXD0cAs5AVoj0rDNkbvhxWEgsILDF/W9l1UjOBc9N4P3LqXIVFwP8ohykTv+TVIXDRyx3oWFSr4M/KuvILExndIMH3rziNAXpunUT7Syehz2hJ9rv/PjWMTEhEp1E9RARAJ4n2CjTLk5+fTsmVL8vLyqixVaxiGYRz8ZGdnk5aWVmmp1diwnqrCp3xDrznuf58cHb3tTUhMfIQMvg5unZGE3gNQecyphJV0PAVRjw0o6dRX8dmJZmKfRUbd5cATbvyVjTteN+HjkZG2HeWDeGJFxSg0+7wazXKPQQZdVc30QNf3IXe81157jfPOOy/O6PY9s2fPJjMzs8z9i2bFihUMGDCgtKAM6Bzbo5nyrKyscrH4bdq0YebMmXTt2rVWxxp9bYchA7oNYc6DD19ajwz+HihB+gXkVWuPvF3nImHp33O9kQegDcq32ISqRzVBIVBtUPhSE/TeW+f29ynyxI1En4Mc9F5MRcnku924E9D77WV3rEuQUAnc41nU4dtXgboGCe7Vbr0v3OvZyEPzO5R0fgX6PJyHSjonIrHkvXGLUAjYc+7hP6/t0Gfq/5AomsvBJSpqYgfvl8nbhmEYhrE/06tXLxYtWsQcZFzEei7i5QoEQcA84nsuokXFokWLWLp0KYXJyaWioj0y/M5Hxs6lKJzkA2QwRYuKPGTcdEaeCx84vBsZP4nIy5HiliWjWdkk1PF7PcrdGIFmqY+n8lySaM/FGjQDfRnwH5Qz4ovGxxMVIMNyI/KYXOTG5mf7KxIVX6Fwl9uQ5+L888/fLzwXXlRkoHvUF8p4LlasWEFKSgpJW7aUO8dL0LlkZmaW8VxEIhEab9lCSkpKabh4bRH9nnwX3Ytc4BPC6k3HION9HQpbmojeH6cjQXE1eh8+iwTBaShPqCt6vy1AXoFuSCAcTlgBars7Zid0vU5A75unkLegHwphWobe7w3c+lNRCOBF6L220q3jx/+8O8ZcVAXqCSTGO6HP2r+RgE1DYuJRd9wn3blOQaKmMxIv97nz6EJ5UTF37lw2RCJsQ2LlFQ4uUVFT9stys4ZhGIaxv+PFRVpaGv9yy66m8gRkLxQedP/fSHlR4SsiFhQUAJrBzszM5D1kqJ4N/BTNCF9HmFsBEhFbUJhGNgpxSUYhJAkofMR7LDqjGd15hD03BiPD6BwUKnI+Mharak/XEhlp7VEloXwkII5Ds9AlSDzEigrfo+MdZFTfjgxDqFhU3IGMxFNQ6dMNbj/nn39+nXou3nrrLc4444xSL9aV6BqPQeeSk5NDSkoKGSj86G3CClv+HB9GfSIyMzNL73t0XklKSgo5OTm16rmIfk/6aks7kKjo6dZJI/QELEYJy2lIPGxB76tByDtR7Lbpit4Ha5CgaIHei+3Rey0bvYdTUe5GR/S+2I3yFi5w6xYiT0mSG8PbSFScg8Svx4fudUfetkfReyQViaSH0Ht9CkrQ9jke65DIWIsE0evIY/GIW/4WEkaFKEzs5+heeFGRnp5OSUkJoPv5rTtefRQVYKFQ1cJCoQzDMIyKiA6L8gZjRbkCnthE5FhREYufCe+HZl3zUKL3lVHreFHxIUowPQd5OZLQLKKvse/LZyYgA/JbZJQlI2PwORSW0hEZ8NWZgWyPvBbRouIqNKP9CpoJXo6Ezr1uGy8qPkMz1R8jo3QnCrcpRknCP446zh2oQ/hJSLBsR96WRGQgroe4XcKjiUQizJw5k/79+1fjzMSCBQvK9PGIxYuKPoTVvd5E92QemuWfD2VC59LQ/bgKeXLeRWFCi1BY2Dw0e34H5QVoPHFR2XnNnTuXjIwMEhMT2blzZ9xziEQi9EQ9KkYjgdHAPQJkcP8XiYoO7rEAGd7nIlGxEonQzuh+ryMsL9vc7acZEprrkEhpjbwYDdA9n4OE5kzksbgMiQXc9fwpEgu/iBl/jjtmF7f+E0jgb0DviyNRSNY6oGHDhsyaNau0y/h6t49oIXsUEhvr3Gvz5s0jPT29dP2KelI0aNCArKysg0pUWCiUYRiGYewjosOiXqRqUQFhCMqLVC0qQPXvs7KymItmT69EBleJez1WVJyPZnxLKFuTP0ACogkyykuQgZiGElZzkVF5CZppPqUa51+ZqACFq1zvjvkm8AxhWNSHyIj7I4p134XEVpEb10BCD8YdKETneuCvbn3fJLDY7bMP6hK+YEFsdwzhw8oGDBjArFmzqnF2EhV9+/blkOLiuAnZ0Z6KW1AY2kvonrQhLHvqez1cjWb3k4AfIYP3v+jadUPi40x0T3zyMG47n7QeGxYVfV6RSISlS5eWvuZFRTsgUlRU7hwaNGjA2LEK5itCZV7nu7+L0D3xOTuXIw9WaySAJiBRcAbKM+hMWN51HRKqPVEC9/+QKJiKBFU7JF62ovd0ERIHecgT4YsB7EBiuASJkpGEIVnRpKD3xAok0EagnKS2SCTvdGMKgoDi4mLS09OZO3cumxo0IAiCMmFhd7rxrANeeeUVgiCgb9++BEHApgYNKm10t3v37oNKVNQU81hUA/NYGIZhGFWRnZ1NZmYmO3bsqPY2TZo0ISsrq1oNYSORCF2QoX4LCstIRkbXVsqKihGEM7gdkRHnPRe7kJG1kzDnoiPyBKxAhno2MnYnI6PQ9wiIpSpREc0rwB/QjGZHNFudggzmWSih9lrU1diXAU0B7kIx82+h2ehrUOz8J0gEDUOC5Uk0670DGbaxnot4XqKqPBdeVPRDIusDygrHeOFPU5Fo60pYcWs9KkH6NjJ6m7trdgPy1GxEuSWHoypG2SgM5350zxLQ7DmU91ykpKSQ7q7Ds+g+rweWLFnCjh07yMjIoB/ypr2H7nEBlMnfWYfeI5cSen/+jqqGbQH+hITRj914troxvoWM/ItRGF0EiYQ1KHypIxIEbyNxkoYEyVluvI3c/9+i99kGJASy0HtgCHov70Tv9WTCJnQT3X5Guuuyyo3lMCR8mrv1H6d86eTKiO4qP2vWLDIzM6vc5mCnJnawCYtqYMLCMAzDqEsikQh9gGPRTOoZ7u+mSFh8iIx/LyqWIQOyOTL4OyHD0QuLQpQX8R0yMrciIXIIas431722DcX9t6S8uLgBeAMZle2pXFTg9v8sMopboBnpIcA4ZCzeiWLst7qHFxeFyLDshYzR8e4cbkNem0I0y/0Wyk8odOe8tVGjMt3Co8Ncoo3zisRFtKi4GBnF85FAmg+sWbOGjh070sWN+y8odGwrCmHqggTDKiQMNqC+B58gAXIDMqDXu3VbEvZoKADuRoby8agCWD6qakTM+Pu6a/ExElQDkUib69bth3JXRqME5QcIxUW6uxcrkHDp6JbdgnJ5it3jHWTMj0DelC1uvKtRhaiFyMs1FHmOiqLOZwrybJyF3p8vIfF1ASoDu4KwY3Z7QlEx2K3T1J37TsI8jcCNx5eaPZLyoqK5O/+AMCyqJuLCREVITexgS942DMMwjP2Ypk2b0h5VQboP1dL3PSqOQUbX/1AIzghkPM1HQiIDJch6g7QtmrV+AyWonoMMymzkeYgWFaOQByAbVcZ5B832gzwhWWhm+0233ZlULiqWEMbU90FJ3KuRmBmMhBKEfRTOQkb2o2gW/iZkqLcGTkQGd8T9n+CuT4CSbpcCS1w4VLwkcF8e+F8ofChWXMSKin4oNMsbTa8AHTt2JAMJnnkod6QPMvRT3dgPc+uvdtfoCJQfciahqOiJcgK2ukcTdI+XEybnpyLR1Mnt62pkKHsvwsfoPh2FDPTjkKdggzvv0e4aXu/G48XFfEIBgDv2QMJk7UbutdPd65OQZ2sDErfnInHny8/uRveyGaGomOjuzelIkJzkXnsGhYSdikROa5RX8QYSkBe4MRcjQeFFMEhc9HHX5Qn0XrjILU8iFBW4Y92IPCO5QFJSUoV5Jh6bc//+mLAwDMMwjP0YHyr1HprBv8YtfxMZcGeg2eX3UJnO5sio3YiMqa7ox3418jp8gQzHN5CB6A3zjpQXFWPRbHknZADi9p2AZqi3IIO2EIUJ9UIGXjQ5yINShGb0e6IZ8ZVum1tRCFCOWz8FGbsgAznPjftjt80NqKGa75TcndD7cpob94NAz56qaVRRD46KxEU8UdEPGbEL3TY+xCkD3ZNjkPi6xa3nq2xFkLgI3DVojwz/j914vagoQkZ8K+A3qIfEdWiGvxglVEMoLnq4bW5CHpBsJCx3IvEyCxnbM1FoVbSxd517/gW65z1QbsfnSBSchTwfoPdJovv7dDeuDwnDz0Di4ofIGzLeHWs4oajwgnMrClNriQTANuRlKkHvmbnIA5GKxM1uwmZ4iVHjKEDv5VXuXLchYZ2B3qO+OEG0uBiLPh/rgQVZWRh7DxMWhmEYhrEfk5qaSnZ2Nr169Sota+vFxX+RQToSGen/RsboD5ERuBAZuGnIqB+LjMe7UOjR/cjou5z4ouLfaBb7E3cckOHQwh0zAYVKLXavPeaevbjIQQZ1OzSz3AZ1LW6JPCDHIEM2ErU+SFxsQUbjlUgcvIrETHQx2WXuuQ3Ka2jq1l+AchkaU3kPjlhxMW7cOEaNGhVXVIBCnKIrV32O8hJw5/INMtbPcsu8MdwWXdOVSDwUUFZUFKPr+kcULnWN228LlH+wk7AC2GMoBOqP7vjRouISJAAmA68hj8B0lP/xL8KKPVlu/L1QXkt3JEC/dGPsSiiivLh4A1VV8uuvRuJiF7o//d05TkDvhzmEORA7Ce/PJ0hwXIM8M++5c5+PPFHtUbjUAhRaN4Sw8lcyeo9kI+EQQWFQG1A52eMI845w60SHjS1YsIDevXtj7D1MWBiGYRjGfk5l4sJ3LP4fmsX/GM30+jj/BUhUfIpmeFei+P2PkEfjEzSj3oeqRYWntXv2fQTiiYuBlBUV36Dysj1RCNUgZERPQGKoi9suB81u+ypJXd1rQwg9GQGauQcZmZvdOu1REvd7yFi/m4pFhedqZHRuAkaNGkU75BGJFRWeDDe2Rki0fYbCgfqia/s6Mnh9x/NEdE07I0Gxxe2/JbpPxW7936F7dzUShi3c+j5xuQWhEHrGPaCsqBju9nk2EhGvIsM7WlzcjkRdHyQSjnJjucqd01jkCelAKC4WIe/C2Ugs+NC6E935znJ/X+2ONRUJHJ+nUeLOYYo79oUoxKs5ErfvIzF6vhvzRPQe2OWOMwTd8+XoPd7AnWdXFGI3G4U5ve7OCSQuxhM2qzNRsW8wYWEYhmEYBwAViYsvUIz7SrcsC4WJ/AYZ739EM8DHIQHwI1RlaR2a0Z/j1v8F1RMVnsrExb+RYXkxMg4/Qkavb4z3CZotBxl/UF5cpCDDEWR4e1Hh/w9QmNEGFPaVjkTFw2iW/X3CHIfKeBxVLVoPrFy5ki5duvAV8kDEigrP8UhQgHJFuiJD2HeGnuhe8+KiAcpp+NadY4uo5Y2Q+HsDhXL9EF1b38jQiwpfKvgW9/wcEmleVPhjBciQ916TV1HFpY9RrsU0dO1vRSLJ5yQ0RwIJlNdyk3vtBZSHcwJhwnZXZMgf5sY8Cd2v05C4O96tuxMJnEboffCOG/sAd5wz3HhfQp6cT1FX9TOQkJmHGgyCvCSL3DgPR2GAH7jrcD7y3jzmrv81KA/mAeSJM1Gx7zBhYRiGYRgHCLHiYg4SFitRwqnvWbCVMPzDN+zzlZc+QUbwP1Hi71hkXD+MZv/fp6yo2IwSj+NRkbgocNu1RR6DIcij8gwKvermxpPgnmPFRXvCMCKPFxOeYjSDvRkZtS+64/dExnR1iO2Sfthhh9Heje9T5G0ZUcG2Xlysd+c5wO0HN66JyBhvge7PCndundx5+PCvJBTKVYIEWH+UuBwrKjwNkHdhO0qkjhYV/vVocbEOzdpvJRQVt6MKTvko3GwbMtS7EoqL+914d7l1u7jxLEdCrhAl3vdGouAN9D45B4kK34G7EaGxOcyt9zwKv+voxh5BAmIbEk7D3D5eQO/Jf7nr3Q+951oj4fEc8rj8zJ33uUgULURCqrE73z59+lhC9j7CGuQZhmEYxgGEFxe+Id8Cwio2/fr1IwgC5s+fX65h3zxkqK1A4SK+o/XVKLa+JUpAjhYVTyPD9ZlKxtMaGZYjkFfiEGQUZyPDbyPwD1T16UFkVLZARrdvzjcQiYvX3D5jRYXHG+O+mVoOMso/YM9FBcDGjRtZh4znJm5fkyrZx/Fo9nwTEg990XkvQh4bLypyUChUp6hz8GZuAbrWv0XJzf9GCeHxRIXfdj0SlJmEs/6eEsIu69lI9FyKxFIK8ioMc6+3QILCCwyQuDgD9e0oIhQVHZGHw/e5+AHyWPgCAv1QzsVmwvCnRCQqCtwxfHL4LPQ+WOOOmY68a02RkJmKEs+vRiFWOUgoFCAP1Z/ReylaVIBCoToQ9l/53B2vHcRtbmjUPuaxMAzDMIwDDC8uMjMzCbZvL/d6nz59mD9/vl53vRwaNmzIrl27SEPhJuMIQ5NAM/TvoMo6jZDh/RBhHwDQTHk8WiMj/AMU7gMyFkcThiP1RDH2/0HhLFchQfEmMshbUNZzAaEhHU1A2PyvK/KEPIpyF2ZVML5Y4omK0v27xnEg4eCFSkWei1R0vXLQ9ZuFwpmOQTPnG5Go6OjOx4dLRZChXIhExGXuHP7gxtcUhfREe2q8p6MDEnHvIiP6HMLO6n6dJe74vhnfjcjQfgMZ7t5T4MOy8t2zDy0bge7DRjf2ruiaFyGR4sOnmrp9ZqGciuaEoiLB7XcLEp+JhKVrp7jnM90xzkHvgzFuDJejcKxthKWKJyHh+hbywFxAKCo8r6P31FAkXnw1qJkzZ2LsfcxjYRiGYRgHIKmpqWyPIyo8ffr0KW0QBypbm4sqP7VAM/zjYrY5jVBU+Ph03PpPULHnYiyaQZ7n/o8t8ZqDQrKucscY4/b1NTJUU1HuRxdkyOcRGsmxrEJx/r5M7kvIOE9CnpGqqExUeLyH5zOq57lIQWFGb6NwnIuRwFqAZsvjiYrdbtyJyBjfgc7/cuQRuB95jKCswPIj9l3HX0XiIlpULHbHbotCq3yy+TnI+J+IxIXft/dcLCLsgdKK8qJinTvXQ936ie6cJyMRdhJhUnuJO7+lbixb3DbJSFychbwSr7vjtUClbzu47c9D97c5CjM7mjB/JduN7WF0P2M5A4kKHw5YVYd1o/YwYWEYhmEY9YC+ffsyb9485qPwnIrERazhPQ8Z8hWJi+hynhC/b0RnlGcxHc3kn4xEwCQUo5+GxMZKVDHIz6LHiosVSKR0d9skoXKmzVH4zRxCr0c8NqLwrOp0YI4WF/koTCuvgnUfQ96AS5Dh7kOjWiJDPFZU4P5PRsKqAAmYVGSgX0GYG1ETcREtKg5DCfuHumMVIDETKy78vlYjz8AOt/4yJEw6EYqK7shbkYyu91RU8ngIYQ+JnW4fO93+Vrtzy3b79OLiVOR1WeDGvAm9F6cjT0S7qPPNQWF80911nTJlSun9eZDy4iL6PWmiYt9ioVCGYRiGUU/w4iI9PR3QDPnD7rVRxJ/Njw4N6kTZsKjqiAqQoXqz+/s5ZABvcdu1Q6LibcI+FfGM6GJkYLZAxq0XHWe757Fodvwdt/4lcc7/MGSMb0Ex99URF/7cb6VsZSrPYyjc5lJkEG9BRnUqCsFZTSguIkikeOHkm7j5vgt93LYlbqz/RvkWI5BAiBcWNdr9/190jRoj4+44t3wzEhQtCMWND+ua6J67IW9TEsqjWIXE4GFubGvc3ylu/RJ0v3yX7FPccXeh8sG+R0lnN5YfuP0scdv/wK17PBKLL7gxTHTr5brr1oVQVLyARMjy5ctJSdFI/P150O33RkxU1DUmLAzDMAyjHlGRuJiBYtOryjvw4mIuMiCrEhUeLy4mo5CbfiiJ/BXKi4roBGdvTDdChm0OmkH3VYpA4UcNkECZ6dY/CYXVxOLH9wBVi4tIJEI6Ei8d0Ex8k6jXZ7ixH0coKrYhI749MrZXovCQFJRX8Lobry8HGy0uCpGASEQGenvU36FV9JioWFxMQsZ6W+RB6eXG3MI9ogWbFxePoFCj/u71EiQKmiCRAxJ/G925NEcCYBLKm+mKBE0Ht2wKyn94BoW39UW5HqluDPPdefVE77cZ6N6+joQhqOhABHmjJhNfVHiixcUiJPJMVNQdJiwMwzAMo54RKy6aImNuLZXnHXhxEb0+yFA8jaqb0f0/FPbSGxmdG6lYVEDZsqwBYZ+L5cgATkFGdwQZ/yWol4XPE6iI6ogLLyouRNWXUikrKnJQeNFwwkZ/AwmN+BI0Yx9Bs/IfImM/ldBTEC0udiGDvClKcF+CvEIXUT5BOVZcgHIzFqFqUbciEbA9akzxEuGPQyWI84CfIgHgSxP3RB4M3HmAxGQzJCiPQpW91rptPkYFALqjkKc0d01y3Bg2IE/FDnT/ZqHEey+ClgBLly6le/fuRCIRxrlz+JyKRYXHvze/JUzUNlFRN1iOhWEYhmHUQ2JzLioTFR4f177KrT9lyhRWrVrFOjRTPLaSbeehsJ7OSFRsoHJRsRPN/hcRhhB5cdHNjWGle60VMvrPQl6RnSiBujJuBG5DM+qxpUhjRUUfJIY8Oe7Y3YDrUfiQzwHwYU7+fDohQ/spFE70cxQ6NJGwMtJONGvfCrgH9RK5EoVX7UYCJpp4d+l5N6bz3Zh7Iw/IIuKLCpDR3tuNZw4SAO3QvfJhS0nI81KCQpTWofyIZUgYdkD9Nx5wY12DRN4At5+XkbHfHt1z78H4N0pw/xIJBy8qQO+zRe76VCUqSq9JEJio2A8wj4VhGIZh1FO8uMjMzCQoLq56A2TANWrUiHlZWfTt2xeAVatW0blz59KO4FfH2e4naGb+RDRL/R8UclORqNjplu1EYTITkKF/HmU7dIN6WWxHs/0/Qkbxa2j2/thKziWe56IqUbEDGfCt3TjWopCjXcgQbkrYJwKUh/AhClHahDqcLyfMKdiFkp8TgXuRsX8FCm9qgYz5Qrffhsh4j50Vfh6Vnr0QiasSN6YEZMR/jaoqRTPJje0mVI1pgTv2ccA3hBW+eqLrvMQd+zt37oNQ2dckd+wtbrzXuHP6F2E3+Ifc9RiFEs1fRt6PTUhsRIsKTxAEtGrViuWzZlUpKqK3MeoWExaGYRiGUY/p27cvxdUUFZ7Y9Tt16lSpuBiOZrJ/jEJoNiBRMR0Z4z4B2wuJQjRL3hgZouPQTHpsh26QwbvR/d8OeUXeQx6OmVQuLEDiYhEy4CORCO3d2OKJCtyYOiOPybOomeCRKGckGeWMgMTFZJRUPRR5BR5FIinDHTPVvR4gY/5dJCouR0b6Nnc9mhFWdvLJ3ztRT4doUXE2Mv4jSFT0cmOJFRdeVJyNKnQ1csvjiQsfXtQZCYalKHSqjTvnpwhzI9aia+7zHL7++msGDhwYNwdi0aJFHHnkkSzNyionKjxbt26Nu9zYfzFhYRiGYRjGHlORuLgbGZIXALej2fQvorb7j3s+GxnLOSgnoQeKw38ZGfkDUMnSaHHRDs16b0QhNs+imfLlyAh/BBnkleV+jCVsojZu3DhGjRrF18i4jhUVIKM9BYUrPY2M92nutR7u+RU0Iz8LNYC7GCW8T0Zem3HuumS5bZ5BeQpnIlHRComKfCQqWiCxlY88G2ORIOmDPDUXomTsAAmKEvfw4mIrEkK9UF6DFxWD3XiLkQcFyosLX52pBbqPRSj8KQFd/5Fuu/no/uS7a+lFBZTPgVi0aBG9evWioCA2yMs40DFhYRiGYRhGrRArLuagyj9rURz+WCQ2jqW8uNiFxMFyFPbzHPIGnA/cgoxdz3g0M98flVPtigztaFERrxRpLLGlSU877TTaEnb07oCarcUy2b2ejMTB8ZQVF8Wou/lZqAv3WFSWdjMST3932/iu0oe58/nUXa8RxBcVye7cH0Oz/9+iRG1f4cl7KgIkLEC5EOvdfj4m9BANdusmuvHGExcZKAl7g7tG+ShXwnfP3oTCm7oQlimOFRWeIAho0qQJi7Ky6NWrF8bBiQkLwzAMwzBqjWhxsYlwhjotLa2MJyNaXHyH4vBPQUbyl6jqVCGhoXIcobjYiYzrk1DVpG8oLyqgcnERr9/B9OnT6dKlCzko9Md7R6LFxSQUntUEzeYvQKLoZCQuCpFH40YkHsYioxsUujQPeTquRE0CL3LXYaQb/7+QAX8p8UXFA4QdvQehfJVFSABA2UpaS9y6c1GifBN3LtGioqF77CAUF3ko/+Nxd41bU1ZU+ByPtlHXtTScrH37cqLCs2PHjrjLjYMHqwplGIZhGEat4sVF0LYtQRDQq1cvFi1axBxkOI9FBsixaLZ+BzK4x6GwoAdQWM1aynZWPg4ZsztR3sB4ZKTHExWeeB2aK2qi1rlzZ1auXMk85C1p6o7xptsuWlR8ioz2NWvWMBd5A/JQ0vgCNJv/IKGoKHbn+Mknn5DtzvVOZJDPQ4nQh7rzeNwdK56omI88AuuRV2SeO94i5KXwwmK5Wz7DjQnCBoIfEYoKT2MUNrYEeWQ+QWFPD6Du47GiIprocLJvvvkmzhpGfcE8FoZhGIZh1DqdOnVi/fr1pf97cRHruZiDjOgN7rEVGaheIMR6HBah2fR1yPB+GRn08USFpyZN1Ly46NJF6eEnIqN+AcqZ8KIiupHgmjVr6NixI/ko72EJmr3/ABnrXlSsXLmSzp07l8s58NvnovCm3cA/3fmNoqyo8Mf8+uuvOfrooxlP2GU8QHkUS93xZqF8j40oKfxZ5L14BFVzGkrZnhgrUTL4K+48Frp9bUcekYpEhRdpq1evpmPHjnHvgVE/MI+FYRiGYRj7hFjPxZ1oVn8hMpiXLFlCQbNmZQRCtMfhTrfdfOCll16iABm9lYmK2P28SNWdmeN5LmYSX1QAdOjQgTVr1rAShUc1QfkUfSkvKqLH07BTJ9asWUOHDh0IgoCNCQk899xzbHXX5BHgD5QXFQADBw7k66+/Zi4SHvPcNr6a0ywkEBYikTPeXavV7vz/jQSW32M2Eh7jkZiY5rbdgfI0HiL0+HhMVBixRAIr+lsl+fn5tGzZkry8PFq0aFH1BoZhGIZhVEh2djZpaWm0o6x3ojIikUjp+r6q0KxZsxgwYECN+hdEIpFqN1FbtWoVXbp0IR11mf6E8gZ+vP379T8lvqioCn9eQJXX6JtvvuHoo4+mH/JcpCGvxCtRY507dy4ZGRkkJiayc+dOIpEInYDDgTtQ8vWzSHTluG0aNGhASUkJCxYsoHfv3qXndSvyHJmoqD/UxA42YVENTFgYhmEYRu2SnZ1NZmZmjRJ6mzRpQtY+rirkxcX3FUE1FRXRLF26lMzMTAoLCytdL1pcnIJCsKojgDqhKlbdURO/nGpskw6cRhhOZqLi4MeERS1jwsIwDMMw6i+rVq3iyCOPZOPGjdXe5rDDDmP69OnfW1TUFC8uaiqAWqE8iy012MYfw0RF/aAmdrDlWBiGYRiGYVRC586dayQqADZu3LjPRAWEORfVFRW49bYC25OSarRNpH17ExVGXKwqlGEYhmEYxkHAwIEDa5RvAtUXIdGsXbu2xtsY9QPzWBiGYRiGYRiGsceYsDAMwzAMwzAMY48xYWEYhmEYhmEYxh5jwsIwDMMwDMMwjD3GhIVhGIZhGIZhGHuMCQvDMAzDMAzDMPYYExaGYRiGYRiGYewx1seiGvgaz/n5+XU8EsMwDMMwDMPYd3j7tzo9T0xYVIOCggKAfdpB0zAMwzAMwzD2FwoKCmjZsmWl60SC79NysZ5RUlLC2rVrad68OZFIpK6Hs9fJz8+nc+fOrFq1ihYtWtT1cIw9xO7nwYXdz4MPu6cHF3Y/Dy7sfspTUVBQQIcOHUhIqDyLwjwW1SAhIYFOnTrV9TD2OS1atKi3H6KDEbufBxd2Pw8+7J4eXNj9PLio7/ezKk+Fx5K3DcMwDMMwDMPYY0xYGIZhGIZhGIaxx5iwMMqRlJTE73//e5KSkup6KEYtYPfz4MLu58GH3dODC7ufBxd2P2uGJW8bhmEYhmEYhrHHmMfCMAzDMAzDMIw9xoSFYRiGYRiGYRh7jAkLwzAMwzAMwzD2GBMW9ZipU6dy9913c84559ChQwcikQiRSITVq1fv0X6Lior429/+RmZmJs2aNaN169accsopvPrqq7U0cqMiCgoK+H//7/+RlpZGkyZNOPTQQznrrLP44IMPvtf+TjnllNL3RbxHu3btavkM6h+vvPIKp5xyCq1bt6ZZs2ZkZmbyf//3fxQXF3+v/U2fPp2LLrqItm3b0rhxY7p168att97Kxo0ba3nkRjxq634+88wzlX72IpEIb7311l46C2PRokU8+OCDXHXVVWRkZNCwYUMikQh//vOf92i/7733HmeeeSaHHnooTZo0oXfv3vz617+msLCwlkZuVERt39O77767ys/owoULa/ks9n+sQV49ZtSoUeTl5dXqPrdv386wYcP4/PPPadWqFcOHD6ewsJAPPviAjz/+mLvuuov77ruvVo9piI0bN3LiiSeSnZ1N+/btOeecc9iwYQNvvvkmb775Jvfffz+33nrr99r36aefHldEVLdhjhGfO+64g/vvv5+GDRsyePBgkpOT+eCDD/jFL37B5MmTeeedd2jSpEm19/fqq69y6aWXsmvXLgYOHEi3bt2YNm0aDz30EK+88gqffvopPXv23ItnVL+p7fsJ0KNHD0444YS4r3Xs2LE2hm3E4dFHH+X++++v1X3+61//4s477yQSiXDiiSfStm1bPvnkE+655x7++9//8umnn3LooYfW6jGNkL1xTwEyMzPp379/3Nfq5W9kYNRbrr766uCee+4J3nrrrWDjxo0BEADBqlWrvvc+b7/99gAIMjIygk2bNpUunzZtWpCcnBwAweTJk2tj+EYM5557bgAEQ4YMCbZt21a6fMqUKUGDBg2ChISEICsrq0b7PPnkkwMg+PDDD2t5tMaECRMCIEhOTg6mT59eunzTpk1BRkZGAAR33XVXtfe3Zs2aoGnTpgEQPP7446XLd+3aFVx++eUBEAwcODAoKSmp1fMwRG3fz7FjxwZAcOWVV+6F0RpV8eSTTwY//elPgxdeeCFYsGBBMHr06AAI/vSnP32v/c2YMSOIRCJBgwYNgqlTp5Yu37ZtWzBkyJAACC644ILaGr4Rh9q+p7///e8DIPj9739fuwM9wDFhYZSyp8IiNzc3SExMDIDg008/Lff6n/70pwAIBg0atKdDNWKYN29eAAQNGjQIcnJyyr1+7bXXBkBwySWX1Gi/Jiz2HgMHDgyA4M9//nO51z755JMACJKSkoKtW7dWa38/+9nPAiAYOnRoudcKCgqCli1bBkDw1ltv7fHYjfLU9v00YbF/ceWVV+6REXrRRRcFQHDdddeVey0nJydISEgIgGDBggV7OlSjmuzpPTVhER/LsTBqjalTp1JUVESXLl04/vjjy70+atQoAL788kvWrl27r4d3UDNhwgQAjj/+eLp27VrudX/tJ0+e/L1j943aY82aNXzzzTdAeG+iOeGEE+jcuTM7d+5k6tSp1dqnfw/E219ycjIjRowA4LXXXvu+wzYqYG/cT+PgoaioiClTpgDx3x9du3Yt/c30n2PDOFCxHAuj1pg5cyYARx11VNzXu3fvTps2bcjNzWXWrFl06NBhXw7voKaqa++Xb9u2jcWLF9O3b98a7X/ChAlMnDiRHTt20LZtW4477jhOO+00EhJsbuL74O9XmzZt6NatW9x1jjrqKFatWsXMmTO59NJLK91fQUEBS5YsKd2uov0999xzpcc2ao/avp/RLFmyhN/85jds3LiR5ORk+vXrx4gRIywW/wAiOzub7du3A5V/Pj/55BP7fB6AzJgxg1/+8pfk5ubSsmVLBgwYwDnnnEPz5s3remh1ggkLo9ZYvnw5AF26dKlwnU6dOpGbm1u6rlE7VHXtW7RoQYsWLcjPz2f58uU1FhYPPPBAuWW9evXi+eefZ+DAgTUfcD2nOp+Vzp07l1m3MnJyckr/rmifNdmfUTNq+35G89lnn/HZZ5+VWda4cWPuvvtufvGLX9RwpEZd4O95q1atKjQ27fN54DJ58mQmT55cZlnLli154IEHuOKKK+poVHWHTTcatUZBQQEAzZo1q3Cd5ORkAPLz8/fJmOoLe+van3jiiTz55JMsWrSIbdu2sXr1aiZMmEB6ejrZ2dkMHTqUBQsW7Nng6yG1fb/8/irbp3329h574/PXrl07fv3rX/PVV1+xadMm8vPz+eabb7jiiivYuXMnv/zlL7nnnnv2fPDGXsd+Gw9OevTowT333MPMmTPJzc0lNzeXTz/9lLPPPpu8vDyuvPJKXnjhhboe5j7HPBYHID//+c+ZNGlSjbcbM2ZMhWULjbpjf76ff/rTn8r837RpUzp27MgZZ5zBiSeeyDfffMOvfvUrJk6cuFfHYRj1jeHDhzN8+PAyy4466iieffZZMjMzueuuu/jjH//ItddeS9u2betolIZRfxk9enS5ZccffzyTJ0/mtttu48EHH+QnP/kJF110EYmJiXUwwrrBhMUByNq1a1m0aFGNt9vbDXi8i3fbtm1VjqFFixZ7dSwHErVxP/f1tU9KSuLXv/41I0eO5K233qK4uJhGjRrt8X7rC7V9v6LDK7Zt2xa3drp99vYe+/rzd/vtt3PvvfeyefNm3nnnnbgGjrH/YL+N9Y+7776bRx55hE2bNvHVV19x4okn1vWQ9hkWCnUA8vzzzxOoVHCNHrGzX7VNSkoKACtXrqxwHd/V269r1M79rOra5+fnl7rYa+va9+nTB4CdO3eyefPmWtlnfcHfg1WrVlW4jn+tOvcruhJYRe+BmuzPqBm1fT+rokGDBqSmpgLhd6qx/+Lv+datW8uELUZjn8+DizZt2nDYYYcB9e8zasLCqDWOOOIIAKZNmxb39WXLlpGbmwvAgAED9tm46gNVXXu/vFmzZvTq1atWjvntt9+W/l1fq198X/z7/9tvv60wWdPfM39vK6NFixalHbWreg9UZ39Gzajt+1kd/OfPPnv7P2lpaTRt2hSwz2d9Yffu3eTl5QH17zNqwsKoNc4880wSExNZuXJluSomAOPGjQNg0KBBVmq2lhk5ciSgCjLxZqz9tT/nnHNqLWRp/PjxgDwXPvHQqB6dOnUqrabl7000n376KatWrSIpKYkzzzyzWvs877zzKtxfYWFhadWS888///sO26iAvXE/K2PGjBlkZ2cDcPTRR+/x/oy9S2JiImeddRYQ//2xYsUKPv/8cyD8HBsHNpMmTWL79u1EIpEKSwwftOyrTnzG/g/V7Lw9ePDgIC0tLXjttdfKvXb77bcHQHD44YcHmzdvLl0+ffr0IDk5OQCCyZMn1/rYjSA499xzSzsvb9++vXT51KlTgwYNGgQJCQlBVlZWue1Gjx4dpKWlBQ8++GCZ5R988EHw4YcfBiUlJWWW79y5M7j33nuDSCQSAMGYMWP2zgkd5EyYMCEAguTk5GD69Omlyzdv3hxkZGQEQHDXXXeV2ea1114L0tLSgsGDB5fb35o1a4KmTZsGQPDEE0+ULt+1a1cwevToAAgGDhxY7n4atUNt3s9t27YFDz30UJCfn1/uOB9//HGQkpISAMEJJ5ywd07GKEd1ujQ/+OCDQVpaWjB69Ohyr02fPj2IRCJBgwYNgjfffLN0+bZt24IhQ4YEQHDBBRfslbEb8dmTe7pixYrgueeeC3bs2FFumwkTJgRt2rQJgODyyy+v9XHv75iwqMf88Y9/DI455pjShxcWAwYMKF120003lduua9euARCMHTu23Gvbtm0Ljj322AAIWrduHVxwwQXB8OHDg0aNGgVAcOedd+6DM6ufbNiwIUhNTQ2AoH379sHFF18cnHLKKaUC4P7774+73cknnxwAwe9///syy//1r38FQNC2bdvg9NNPD0aNGhUMGzYsaNu2bel75ac//ek+OLODl9tuuy0AgkaNGgXDhw8PLrjggqBVq1YBEBx//PFlBGIQBMHYsWMDIOjatWvc/b388stBgwYNAiA45phjgh/+8IdB9+7dS+/j4sWL98FZ1V9q635u2bIlAIKkpKRg0KBBwcUXXxycf/75Qb9+/Uo/exkZGcHatWv34dnVL6ZPn17m9/HQQw8NgKBTp05llkffg9///vcBEJx88slx9/nPf/4zAIJIJBKccsopwcUXXxy0b98+AIK0tLRg06ZN++js6ie1eU9nzpxZOpFw4oknBpdccklw7rnnlv4GA8Gpp54aFBQU7OOzrHtMWNRjvFqv7BHvC7IyYREE4Yx2v379giZNmgQtW7YMTjrppODll1/euydkBHl5ecEvf/nLIDU1NUhKSgratGkTDB8+PHjvvfcq3KYiYTFjxozgpptuCo4++uigffv2QVJSUtCkSZOgR48ewRVXXBF89tlne/ls6gcvvfRScNJJJwUtWrQImjRpEvTr1y/461//GuzcubPculUJiyAIgmnTpgXnn39+8IMf/CBITEwMunbtGtxyyy3B+vXr9+JZGJ7auJ87d+4Mfvvb3wZnnHFG0K1bt6B58+ZBw4YNgx/84AfB0KFDg8cffzzu/oza48MPP6zy9xEIli9fXrpNVcIiCILg3XffDYYPHx60adMmSEpKClJTU4Nf/epXcb1TRu1Sm/d08+bNwS9+8Ytg8ODBQZcuXYJmzZoFjRo1Ctq3bx+cffbZwbhx44Ldu3fv2xPcT4gEQRDUPIDKMAzDMAzDMAwjxJK3DcMwDMMwDMPYY0xYGIZhGIZhGIaxx5iwMAzDMAzDMAxjjzFhYRiGYRiGYRjGHmPCwjAMwzAMwzCMPcaEhWEYhmEYhmEYe4wJC8MwDMMwDMMw9hgTFoZhGIZhGIZh7DEmLAzDMAzDMAzD2GNMWBiGYRiGYRiGsceYsDAMwzAMwzAMY48xYWEYhmEYhmEYxh5jwsIwDMMwDMMwjD3m/wOpFIo/RhllQgAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -336,7 +336,7 @@ "$$\n", "\\min_{T:\\mathbb{R}^d \\rightarrow \\mathbb{R}^d} \\Delta(T\\sharp \\mu, \\nu) + \\lambda_\\mathrm{MG} \\mathcal{M}_\\mu^c(T)\n", "$$\n", - "For all fittings, we use $\\Delta = S_{\\varepsilon, \\ell_2^2}$, the {func}`~ott.tools.sinkhorn_divergence.sinkhorn_divergence` with the {class}`squared-Euclidean cost `\n", + "For all fittings, we use $\\Delta = S_{\\varepsilon, \\ell_2^2}$, the {func}`~ott.tools.sinkhorn_divergence.sinkhorn_divergence` with the {class}`squared Euclidean cost `\n", "The function considers a ground cost function `cost_fn` (corresponding to $c$), as well as the `epsilon` regularization parameters to compute approximated Wasserstein distances, both for fitting and regularizer." ] }, @@ -355,7 +355,7 @@ "):\n", " dim_data = 2\n", " # define the neural map\n", - " model = models.MLP(\n", + " model = potentials.PotentialMLP(\n", " dim_hidden=[32, 64, 32], is_potential=False, act_fn=nn.gelu\n", " )\n", "\n", @@ -388,7 +388,7 @@ " print(\"Selected `epsilon_regularizer`:\", epsilon_regularizer)\n", "\n", " def regularizer(x, y):\n", - " gap, out = losses.monge_gap_from_samples(\n", + " gap, out = monge_gap.monge_gap_from_samples(\n", " x,\n", " y,\n", " cost_fn=cost_fn,\n", @@ -398,7 +398,7 @@ " return gap, out.n_iters\n", "\n", " # define solver\n", - " solver = map_estimator.MapEstimator(\n", + " solver = monge_gap.MongeGapEstimator(\n", " dim_data=dim_data,\n", " fitting_loss=fitting_loss,\n", " regularizer=regularizer,\n", From b34b886ff45196b31809de3cd7439b78c3fc64fc Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 11:20:33 +0200 Subject: [PATCH 176/186] Update MetaOT and NeuralDual --- docs/tutorials/MetaOT.ipynb | 30 +++++++++--------- docs/tutorials/neural_dual.ipynb | 53 +++++++++++++------------------- 2 files changed, 37 insertions(+), 46 deletions(-) diff --git a/docs/tutorials/MetaOT.ipynb b/docs/tutorials/MetaOT.ipynb index 172024733..ad7426a0f 100644 --- a/docs/tutorials/MetaOT.ipynb +++ b/docs/tutorials/MetaOT.ipynb @@ -23,8 +23,7 @@ "\n", "We will cover:\n", "\n", - "- {class}`~ott.neural.models.MetaInitializer`: The main class for the Meta OT initializer\n", - "- {class}`~ott.neural.models.MLP`: A Meta MLP to predict the dual potentials from the weights of the measures\n", + "- {class}`~ott.initializers.neural.meta_initializer.MetaInitializer`: The main class for the Meta OT initializer\n", "- {class}`~ott.initializers.linear.initializers.GaussianInitializer`: The main initialization class for the Gaussian initializer" ] }, @@ -46,8 +45,8 @@ "import sys\n", "\n", "if \"google.colab\" in sys.modules:\n", - " !pip install -q git+https://github.com/ott-jax/ott@main\n", - " !pip install -q torch torchvision" + " %pip install -q git+https://github.com/ott-jax/ott@main\n", + " %pip install -q torch torchvision" ] }, { @@ -71,7 +70,7 @@ "\n", "from ott.geometry import pointcloud\n", "from ott.initializers.linear import initializers\n", - "from ott.neural import models\n", + "from ott.initializers.neural import meta_initializer\n", "from ott.problems.linear import linear_problem\n", "from ott.solvers.linear import sinkhorn" ] @@ -216,7 +215,7 @@ "This tutorial shows how to train a meta OT model to predict\n", "the optimal Sinkhorn potentials from the image pairs.\n", "We will reproduce their results using \n", - "{class}`~ott.neural.models.MetaInitializer`,\n", + "{class}`~ott.neural.initializers.meta_initializer.MetaInitializer`,\n", "which provides an easy-to-use interface\n", "for training and using Meta OT models.\n", "\n", @@ -239,7 +238,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhUAAACLCAYAAADWF2tkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAAxOAAAMTgF/d4wjAACHNElEQVR4nO29d5Bc53Xm/XTOOafpmenJAwzCIAMkAIIgJSYFkkqUqJW0kssu21t2OZVd1tbu6tutXZd37S2v6bhaywq2JJIixSAQBAgQOcwAmMHk1DPdPd3TOefw/QG/L7uRw8x0D3B/VSqRnHRv33vfe95znvMcVqVSqYCBgYGBgYGB4QFh1/sAGBgYGBgYGB4OmKCCgYGBgYGBYVlgggoGBgYGBgaGZYEJKhgYGBgYGBiWBSaoYGBgYGBgYFgWmKCCgYGBgYGBYVlgggoGBgYGBgaGZYFbrz8sEAig0+nq9efrRiAQYM77EYI570cL5rwfLR7l887lcjf9Wt2CCp1OB7fbXa8/XzesVitz3o8QzHk/WjDn/WjxKJ/3rWDKHwwMDAwMDAzLAhNUMDAwMDAwMCwLTFDBwMDAwMDAsCzUTVPRCJBZan6/H16vF+VyGaVSCSqVCjabDVwuF2w2GywWq85HysDAwMDA0Pg80kEFAJRKJQwMDOBf/uVfUCgUkMlksHnzZvz2b/82RCIReDweE1QwMDAwMDDcBY9cUFEul1EoFFAul5FMJpHJZDA2Nga3241cLodMJgOz2YxQKASlUgmpVMpkLBgYGBqKSqWCSqWCYrGIbDaLYrGIRCKBYrGIQqFAs7AAIBaLIZPJwOVywefzwWazweVywWKxmDWNYdl55IKKUqmEUCiEeDyOo0ePYmZmBmfPnsWlS5dQqVRQLpfB4XBw/PhxNDc3Y8OGDZBIJODxeOBwOPU+fAYGBgaUy2UUi0VEo1FcvnwZLpcLR44cwdLSErxeL9LpNACAxWJh586deOKJJ2Cz2dDb21sTZJDvYWBYLh76oKJcLlOtRKFQQD6fh9frRSQSwdzcHBYWFuDz+ZDNZunPRCIRzM/Po1QqwWazoVwuQy6XN0xQQRYUcl4AIBQKG+b4lhuyKyuVSiiVSvT8SRBYvSu7GSTLRP5H/p3D4dAdG5u9NjXL5LMhnwWbzX5o7wOGT9azbDaLZDKJpaUlTE5OwufzYXp6GktLSwgGg8hkMgCuBQw6nQ52ux2ZTAZKpRIymQwcDgd8Ph98Pp8GF2sRct+Xy2XkcjkUi0WIRCIIBIJ6H9ojy9q9m+6STCaDSCSCpaUlfPzxxwiFQhgeHkYoFILb7UYikaAPIGFxcRH/7//9P2g0GkxNTcFqteKll16C2Wyu01nUkkqlsLCwQHcpAoEAL774IlQqVb0PbUUgKd1QKITx8XH4/X5cunQJ8Xgcfr8fiUTilj9LnFsFAgFkMhn4fD5MJhO0Wi3a29vR3t4OHo8HkUi0JgOLfD6PYrGIXC6HZDIJqVQKpVK5Js+F4fZUKhWk02lkMhmcO3cOb7zxBsLhMMbGxpDNZhEOh+mzAnySgRgfH8fS0hL4fD6USiWMRiOee+45OBwO9Pf3Q61W1/O0HohSqYR4PI5EIoGf/OQnGB0dxW/91m9h69at9T60R5aHLqggu1aye8vlcohEInC73RgZGUEoFMLFixdpMEF2+tUpwFwuB5fLhUgkgpaWFmSzWaRSKZTL5brXISuVCgqFAvx+PxYXFzE6OgqhUIh4PA6JRAIOh/PQ6T9I3TgajWJiYgI+nw8DAwPw+/2Yn5+nqd6bwePxYDabIRaLodVqIRaLEQqFYDAYIBAIYDAYIJFIIBAI6n5t7wZyf5OMTT6fp/dnNBpFuVyGWCy+abbi+vviVv/M0DiQ601245lMBrFYDFNTUzh37hwSiQQCgQDK5TK4XC7NvgGfXNNKpYJwOIxyuQyn0wmtVouenh6w2WysW7eubue2HFQqFfqZjIyM4MSJE3j55ZcbYq2+E+QdBVy7vtd/rfqfr39WqzOujcZDFVSQF26pVEI4HEY0GsWFCxdw9OjRmog+Go1SsSZwTcgkEonoCzmXyyEWiyGXy+HKlSvw+XwYHBwEh8OBVquFXC6vy/kVCgUUCgXMz8/jjTfeQDAYxNDQEDgcDrLZLEwmE1588UW0tLSAy+Wu6bRmNU6nE4cPH8b8/DzOnTuHeDyOpaUl5PN55PP52/5sqVRCMBgEl8tFMBgEm83G1NQUpFIpBgYG8MEHH6CzsxMvv/wyZDIZFeY2IiTFm8vlcPLkSczNzSEejyObzSIejyMajUKpVKKlpQU8Ho/+HFmUtmzZApvNBjabTbuaSBmIBKQMjQMJGguFAiYmJuD3+3H+/Hlato1GoxAKhdi8eTNUKhU6Ojogk8mgUqloOZTNZiORSCAajcLj8eD9998Hh8OB3+8Hj8e75fyGtUKhUMDY2BimpqYwOzuLcDiMiYkJbNiwAQqFAnK5vCFfvADoZiAWi2FhYYFuEAqFAqLRKF3fisUixGIxpFIpRCIRVCoVFAoFurq6IBAIaCNBo9CYq+cDQLQTgUAA8/PzOH/+PN5++216wW4GSY2TxTaVSiEej6NUKsHv9yOTyWBubg4WiwVisbhuQQU5t1AohKGhIXi9XrjdbvriVCqV2LBhA2w2G31ZNOoDdbdUKhX4fD6cPn0ac3NzuHTpEs0uAbgrPQQpb1XvDFgsFiYnJyESibBv3z7s378fwLUAs1EhnUuJRAIff/wxhoaGEIvF6E4tHo9DLpejra3thsCI3AtCoRB8Pp9mZthsNvh8PoRCYUPsfK7fld3v7wDWfvaFdHdkMhkMDQ1hcnISx48fx9DQEHg8HrhcLhQKBex2O7RaLXbu3AmtVgudTgexWEyvbyKRoGXf06dP05dZJBKhpZK1SqlUwuzsLGZmZhAIBJDJZBAIBOD3+8Hlcuu2Vt8N+XwesVgMHo8Hp06dojqZQqGAYDCIdDqNVCqFTCYDjUYDtVoNqVQKvV4Pq9UKu91e05nYKPf7QxVUFAoFDA4OYmZmBqOjo/B4PBgfH6cCnushL96DBw9ix44dNHU4OzuLH//4x4jH47RV6/Lly0gkEvjc5z4HrVYLNpu96tEheVG0tbXhG9/4Br0ZA4EAFhcXEQqF8OGHHyKRSECtVkOpVEKpVMJkMoHH49EXx1qCxWLBYDBgx44dUKlUWFpaQqFQAIvFgkQiweOPPw69Xn/H30MyPMFgEHNzc1haWgJwbbewsLCAkydPoq2tDSqVCnw+f6VP664gQVA6ncbS0hLi8ThGR0fh8/lw6dIlzMzMIJ/Po1Qq0QxGpVLB9PR0zXUmL2qBQIDJyUl6n5OAQiKRYNOmTVAoFGhvb6cB9kreK5lMBgsLC2CxWDCbzfS583q9sNvtMBqN4HK5NPgh50EEyteLd/P5PJLJJPL5PJaWllAul6HT6WoykOQZIHA4nFuWihoBcs8GAgGcOXMG09PTiMViUKlU2LdvH/bs2UOfb6FQCIPBQANEIkAGrpUAC4UCZDIZFTaSz3atrQe3ovqFKhAIIBaLa7J1jUKlUkE2m0U+n8eVK1dw9OhRBAIB+izncjmUy2W6VpdKJbDZbKTTabrJnZ6ehkqlQigUglwuh1gsBp/Px7p162A2myEUCmlQWQ8euqDinXfewalTpzA5OQm/33/b72exWOByudi+fTu+9KUvgcPhgMPhYGxsDB999BGKxSIikQhNN4+OjmLdunVYv349ANQlqOByuTCZTHjhhRfg9/tRKBTgcrloUPGrX/0KV69ehV6vh06nQ2dnJz71qU9RkeJaXETILozH42FkZASZTAZcLhdGoxFf+cpXYLfb7/g7CoUCTpw4AafTCQ6HQ8sGmUwGHo8HZ86cgd/vx/bt21fhjO4O8uKMxWI4ffo0FhYWcPToUfh8PiwsLNS0DRJyuRzi8fgNvwcA5ufna/47h8OBQCCAXC7H448/DpPJhG9961sQiUQrnlJNp9M4evQouFwuPv3pT0MikeDQoUM4ffo0nnvuOTzxxBOQy+U3qPiLxSKKxSINJjKZDDKZDOLxOObm5hAIBHD+/Hnkcjn09/fDarVCKBRCIBBAoVBArVbT8yLdD40aVOTzeQwNDWF6ehofffQR5ubmqMh43759+OpXv1rTwXQrWCwWMpkMZDIZ1eJUe1Y8TLBYrJrAqtGoVCq05HH06FG89tpryGQySKfT9Dnl8/loaWmBQqGg76RUKkVL+ouLiwCAI0eOgMfj0Q6eb3/72/jUpz4Fo9FY14xr433q9wFJEUajUQQCgRtaRG8Gh8OB2WyGWq2G1WqFRCKhKSSVSoXu7m4IhUIMDQ0hlUohnU6DzWZjcXERkUgEEokEUqm0LtFg9ctg3bp1kMlkuHTpEhKJBLLZLO1TJ/9OOh42b95MRYlk99YIKe87IRKJYDKZsGXLFqqZYbFYkMvlsNlsUCqVd/wdhUIBHR0dEIlE8Hg8mJ6epostUFsaqTf5fB7lchmJRAKRSAQzMzO4ePEiQqEQFhcXkUgkakpAACCXy6FSqcDhcMDj8cDj8eiiBFw7v7m5Ofj9/htac0l5L5/PIxQK0d3OSizKuVwO6XQabrcbk5OTKJVK9FkaGRmB0+nE4OAg2Gw2VCoVdDodJBIJzUYtLi4imUwimUzSNH4ikUAymaRfm52dRT6fh0AggNvtpsG4XC6HUqmEXC6H2WyGRCKheipSTmik9uJisYiFhQW4XC5ks1mw2Wz09/eju7sbXV1ddxRlk4xOJBLB4OAgxsbGUKlUIJfLsX79erS1tUEqla7yWa0sRJyfSqUgkUjqfTiUUqlEmwPGx8cxMzODyclJuv4Q75C+vj4olUq0t7dDLpfT61ssFmm5xO12I51Ow+PxIJvNIhKJIJvNYmxsDAKBALt27aJrQT1K4A9FUFEoFOB0OuHxeDA2Ngan03nDons9fD4fW7duRVNTEzo6OmoeLp1Ohx07dsBoNMLpdCKZTNKa9eTkJMbHx2Gz2WggstpUq7x37dqFzs5ODA4OIhKJIB6PIxAI0Bckl8vFL3/5SzgcDvyH//Af0NTUhKamJkilUrpTW4469koilUohkUhgsViwefPmmq/d7UNDTM2ampqwuLgIp9NJO3waCaJmT6VSuHz5Mo4dO4bZ2Vl8+OGHVLRFUtjVWCwW9Pf3g8/n0xel3W6HSCQCcG1R++CDD/Dhhx9S59hKpULFYIODg5iamsJTTz0Fk8kEnU5XUypYLuLxOEZGRnD58mW89957iMfjeO+998BmsxEMBpFKpTA7O4tf/OIX4PF4kEqlaG9vxxe+8AXw+Xx88MEH8Hg88Hg8CIfDVGdULBaRTCZpSQQArly5QoME8rwIBAK0tLTgqaeegk6nQ6lUglwuh8lkosFFowQV2WwWQ0NDGBkZQSwWA5fLxYEDB/CZz3wGKpXqjhkKEjxOT0/j7//+7xGLxVAsFtHa2opnn30WTU1NK3KN60F11188HkcoFIJMJqvzUX1CoVDA6OgonE4n3nzzTZw+fRqpVIoGP2q1GuvXr8d3vvMdmM1mOBwOCIXCmtJfuVymwUQwGMSbb75J9Wbk3w8dOoRvf/vb6OjogEAgoM//arKmgwrSk01e9nNzc3ShudWuk8PhQK1WQ6FQwOFwoKOjA0qlsubh5PF4UKvViEQidLdW3cpH6rr1hOhBxGIxisUi1q1bBx6Ph+npaXg8HsTjccRiMfpZ+P1+jI2N0bS/TCaDRqOh9XPy+0hKtJFEntUipHtNVZOOiUKhAK/Xi1AoBJ/PRy2NyUtYp9NBo9HU/YVCNBThcBhOpxNutxsejweZTIbqglgsFn0B2mw2GAwGtLa2oqOjg3puCIVCOhSP/F6v1wsAdMbN9c+IRCKhL5qVKglkMhlMTU3B4/FQm/xisQg2m41KpUKD3Fwuh3w+j0wmA6FQiLGxMfB4PCwuLsLtdiMYDCKZTNJsS6lUqslOslismnZxUurM5/OIRCIIh8NUf6JSqWh5sJFKIURDks/n6UslGo0iFApBIBBAIpHcNFNBMhQk0zU1NQW/3w82m42uri40NzdDKpU2VAB1vxCNAtHNEe1Qo2kqKpUKotEofD4fQqEQ0uk0zbYajUasX78edrsdFosFGo0GAoGgRttVbXKnVqvB5XLR29sLpVKJkZERmp0non2fzwelUkmz0qvJmg4qMpkMnE4nJicn8T/+x//AwsICXSxuhVwux5NPPgmr1YpvfOMbaGpquuHm43K5aG1tBYC6RHp3C4fDoW2Qv/Vbv4VCoYCjR4/i9OnTuHz5Mj7++GP6UnW5XHjttddompfH42Hv3r1Yt24d3cFZLBasX78eIpEIcrm8IWuS90o+n8fExAS8Xi9+9KMfYWBggGZ0SJ29q6sLBw4cgMViqbsTX7lcxszMDM6fP4+zZ8/i8OHDNENB4HA4sNls0Ov1+M3f/E3s3r2b6gaAT16i179sNm3adNuAGwAte6zUy2ZxcRE/+9nP4PV6adscCWh1Oh2kUiltoySGTtFolOpBMpkMbQcnQcOdSlfk6+T3BoNBjI6Ogs/n49SpUxCLxfjt3/5trF+/vqGCCkL1nI8jR47A7Xbjc5/7HPbt20fLXdU7WpK5OXr0KN566y3Mzs5ienoaGzduxDe/+U3YbDZotdoaEexapVwuw+v1wuv1IpfLgcvlwmq1orW1taHW7lKphKtXr1KhdalUgtFohMVioZo+lUoFg8FAyxbXP4OVSoWWgg0GA1paWhCPxzE9PY2JiQla0rxw4QL+4R/+AZs3b8aLL77IBBV3C3l4QqEQXC4XFhYWqKL/ZhANgsFggM1mg91uh1qtvqmghYh9Gl0dTRZj4FqJoFwuw2630wWb1Jaz2SzK5TLi8ThtSwRAW2RJUJFOp6HT6WjakNTmSeZiLegvgNpFOJfLYXFxkS6sTqeTvnDFYjGsVit9SOvpREnS9tXGZj6fD/F4HBwOB0KhkJYDhEIh2traoNFo0NzcDI1GU2N61MiUSiXaJkd21OR6iEQiqNVqxONxJJNJ+v3E/Ox2997d6mGu7xgJBoMQCARIpVJ3DLhWGzabTQN8Pp+PXC6HQCAAkUiE+fl5JJNJ8Pn8GqOn6u6ChYUFTE5OUk8XsnHQ6XR3LJ2sFUi5kFgAkLZpskNvlHMk97dYLIbJZEKlUkFzczPMZjNaW1upCd/tZkxVm14BoJkMspkg9240GoXX64Xf76dr4Wp+Do2/Ct0Esij4fD68/fbbcLvdSKVSt/2ZdevW4Rvf+AZsNhv6+voglUobqua2HLBYLHR3d6OlpQWf+tSn8M1vfhMulwtnz55FLBbD5OQkotEoXC4XkskkLl++jNnZWXqjisVivPnmm+Dz+RCJRBCJRHjyySfR09ODpqYm6HS6hp8tQZwHU6kUZmZm4PF48KMf/Qhzc3OYnp5GoVCA3W5HU1MT+vv78alPfQparRYOh4MGUfWgWCxifn4eoVAI77//Pj766CNEo1EA17pfent70dzcjJdeeonOb+DxeDAajTU71UZHrVZj+/bttITBYrGgVCohEomwYcMG6PV6uN1u2mK3EshkMvT09IDD4WBubo6+lKsN8RoBsViMxx57DFarFblcDvPz81haWqJdbfPz89BqtVSALBKJUKlU4HK5EA6HaRdcpVKBWCyG0WiEw+GAXC5vqNLAg1AsFuF2uzE+Po5UKgUWiwWZTNZwtvtklALp/Ein09BoNJDL5VSovFxt3ESsbLPZUCgUaKCyWmvEfQcV8Xgc/+f//B+88cYbmJiYQKFQQG9vL373d38XX/nKVwAAg4ODtK1renp62VLLpL6YSCRoluJ6H4rqzgYOhwOr1Yr+/n7odDoYjUbw+fw1sxDfLcS7gdykOp0OOp0O4XCYmqmIxWJqepNKpai4Dbj2uY6OjtLfJxQKodVqweVyoVQqodFoANy7rmE1ITvbZDIJp9OJ2dlZDA8P0/Qon8+HVquF1WpFW1sbOjs7qRC0notQuVxGKBTC3Nwc5ufnsbCwgEqlQoM9i8UCh8OB9evX09IU0Qnc6bhvt/te7WdAIBDAbDbTtuBisUhr4BKJhL4Qbpb+XS54PB4dECgUCqkmo9GCCg6Hg+bmZuq5EQwG6QtpbGyMptDz+TyEQiEUCgUqlQqcTicikQhmZ2cRj8chFAohk8noZ/ywiDOBa887EdETT4fVfoneDRwOhxpYGQwGOvhsJUo0RGdIMtSrnX27r6Digw8+wNe+9rUbfCAGBgbwyiuvoFKp4JVXXsF/+S//BQDwx3/8x8taqyZBBbEzvb7Fjs1mY8eOHejo6EBTUxPa29ths9nQ2tp6gzHMwwqXy4VIJILZbMbzzz+PXC6HUCiEXC5HVfakB56468ViMczPz9MArVAo4NixY5iamkI+n4dEIoFcLm9I61vSajoxMYHDhw8jEAhgYmIC6XQaSqUSWq0WGzduhE6nQ09PD3UhJKWDep9PoVDA8ePHMTIygpmZGVQqFdryuGPHDnz5y1+mJZpqi+27CShSqRQKhQKt1ZKUKJvNpkHJaj0TBoMBL730EoaHh3Hq1Clq6pVIJHD69GnIZDJ6LxIzr+UmmUxiZmYGLBYLU1NTyOVy+OCDDzAxMYFnn30Wvb29y/437weBQIC+vj60tbXBZDLB5XLh0KFDOHHiBLLZLEZHR2kGjpRrASAcDiOTydBysN1ux65du9Df39/QG4L7pdrOmsvlIhqNIp1O07JuI0A2AOS5q1QqK3YtuFwuNcSqR8n6noOKX/ziF3jppZdQKpWwf/9+/H//3/+Hrq4uDA0N4dVXX8XCwgL+5//8n1i/fj3eeust2O12fOtb31rWg66umZN+9WrYbDY6Ojqwbds29Pf3o7Ozk4oT6+GEWQ/IC4TP50MqlaJSqaCpqYkK3EqlEkwmEwQCAWKxGFUMezweGqCVSiWMjIxgdHQUnZ2d2LJlCxWH1vslXA2pk+fzeVy9ehU/+MEPaF1RIpFg69atMBqNePzxx+FwOGC1WqFQKBqqlFMqlTA+Po5Tp04hFAqhUqlQIanNZsP69eshlUrvWc1Nas6JRIJadJN7gMfjQSwW0wVuNa4pyaSVSiVotVrEYjEEAgHk83la7qk+9pUgm83C5XKhXC7D5/Mhn8/j4sWLcLvd6O/vb5iggsfj0bZXqVSK3t5ezMzMUE+aYDAIAJibm6M/c/1nxmKxYDKZ0NfXh87Ozody7SOGaOS9QAzRRCJRQ2WkV2utYbPZtHukHvbd9xRUeL1evPrqqyiVSti9ezcOHTpEa3N79+7F9773Pbz66qu4evUqvve976FSqeBP/uRPlt32mFyc5uZmfP3rX6cPGNlhczgc7NmzB62trdBoNDRiu1uPdFJaCYfDa94bv5pqwysOh0MHEJEhax6PB01NTQiFQjhz5kzNIk9MV4RCIYxGY12Ov9qTIJFIoFAoIJ1OI5fL0fbLwcFBxONxWrtXqVTYs2cPDAYD2traqK1tI2Qnqql22iNaAtJ2NjU1hY8++ggqlYqWo+6WQqGAS5cuwe/3Qy6XQyqV0lZskUgEo9FI0+MCgQBNTU2QSCTUSI4MqCKGa8s1Z4BoKDQaDc6fP0+da68vY1bP8bhTkFGpVKjOhMPhIBAI3HSCLemGAkDtj+/ka1NPiGCTy+XihRdegN1ux8DAAA4fPgyBQAC9Xo90Oo2ZmRlqplSN0+mkXgZ6vR4KhQI6na6hXrj3C4/HQ29vL4rFIj1/kuF72DPSt+p6UqlU6Onpgc1mo+tcwwo1v/e97yGRSIDD4dD2xGq2bNkC4Fo66uc//zlaWlrwjW98Y/mO9t8gu3Cz2Ywvf/nL1K6X1EOrxV/345BXLpcRCAQeqqCiulOEYLPZYLPZqA4hHo+jubmZur2RoIKMTp6YmIBara6bQr5YLFKn0MnJSYTDYUxPTyMajeLkyZO4evUqDZy6urrw/PPPo7m5Gf39/TVzTxpxoSmXywiHwwiHwwA+sVbOZDI4e/Ys0uk0pFIpLdfcLblcjrYhEmFYoVCg5SyHwwGxWAy1Wg25XI5XXnkFDoeDlpHa29uxc+dOatBDjNceFLFYjK1bt1JhZqlUQjQarQkqrl8079T9QXwu2traIBKJcPHixZsGFZlMhgYVJC1dPVOk0SCdSpVKBTt37sS2bdtgt9sxPT0NjUaDtrY2xGIxeL3emwYV8/PzcLvd6OnpgVKphMPhwL59+2qGUa1VeDwe1q9fD6FQSDdGZOPwsFPtXVGN0WhEb28vOjo66jLB9K6DinQ6jR/+8IcAgFdeeYXOv6im2i65UqngT//0T1e0zY0MRCJuidUfLolS7+eByefzdLwwGQ1MRsyS+nwjWcDeL9WfTblcRiaTwfT0NObn52kLH1l0dToduru7oVarV30RIkZI4XAYU1NTCIVCmJiYQDKZxNLSEtLpNPx+P/L5PBUztra2oqWl5QbNRKMuoGw2m/atRyKRmpcDsV4XCoVIJpP3HFSQdslMJkNNoYgocXFxEQKBAMlkElKpFCdPnoTf76eD+YgeQ6fToaOjg1pmP2itmsfjobOzExKJBOfPn6d18Hw+D6vViubmZjruOZvNYnZ2lgptSYBMsicCgQBCoZC+TBwOB7hcLtRqNXw+H8bHx+HxeG55LCRlTpxzs9ks9X9oJKo3SHa7HQcPHqTtgvl8nt7nJKNEMlMk6xqNRjE+Po5EIoHW1lbodDoolco1LdysvhcaedOw3JDnmdyv1ZBntG6Oz3f7jSdOnKCDil5++eWbfk/1S72trQ1f+9rXHvDwbk/1rulmtcT7/UCTySROnjyJyclJxGIx6nwmlUrhcDjQ0tLyUKQOCcSN0Ov14v3334fT6aQ7ZlKb6+npwa5du+py3tFolKZw/+Ef/gGJRII6YpLdJXlJ7tmzB9/5zncgl8uh0WiojoYEno0Kl8tFT08PSqUSTp8+DbfbTb8Wi8WQSCTue1dJsm3JZLJmcFEymUQoFKopaRw5cgQcDofWqYma3mg04qmnnkJTUxNeffXVBw4qxGIxNmzYgJaWFnz88ceIx+M023jw4EF89rOfhVarpUHWm2++iVgsBolEQkXIfD6fBvlqtRo2m41OIi2Xy1hYWEAoFMI//uM/4ic/+ckNI9GrP0viPutyuRCNRhs2fV6difvd3/1d+P1+fPTRRygUCrS0qVAoIJFIsHHjRlitVkxMTOD48ePw+Xz48Y9/DK1Wi3w+j5aWFjz33HN1K2cuF+Q90GjXaiUhc2G8Xi/V1hCIW7ROp2vsoOLkyZMArr1knnzyyZt+T/WL/bvf/e6qmPEs5w6UlAFIN0QqlaI97AqFAiaTiXruP0yCJ6JRiEQitG2temgXme5IpuGt9o1KRnsT7UwqlUI2m71pqjqdTiMYDCKTydCdLfF0IP/fiClfNpuNlpYW5HI5eL1eahBF7LSvb3UkO1SZTEZnBLDZbDogjNThrz/P2513uVymO5/rv4/L5cLj8YDFYsHv99e82O8HkgHj8/mwWq1IJBK03bm1tZU+a6TTqLOzE7FYjNovk2CX+M3IZDI6gInL5aJcLkMmk6FQKNQ4jVZ/dtUQUTNpR2zk55tcazIaOxgM0vEEXC4XDocDFosFra2tsNlsKBaLcDqdiMfj8Pv9iMfjcLvdNSO1G/GZuBfW8rHfD6VSCUtLS5idnUUikUClUqHvJZFIRE3yGjqoGBsbAwA66ORmDAwMALjm9EW8KtYSqVQKHo+H+hqQATw8Hg87duxAb28venp6aH3+YbmRo9EoTp06hZGREbhcLsRiMRQKBfD5fOzatQsOhwPr1q2r201KXBjT6TTS6fQtAwoAeP/993HhwgX6kpBIJGhra4NWq8W/+3f/Dna7nXZBNBJCoRDPP/88Dh48CIPBgKamJly5cgXDw8M3fG91S+jmzZvR1dVFjbsikQjGx8chFArR1dVFDZGAOy+8hUIBv/rVrzA5OXnD90ciEXz44YeQy+XI5XJoamrCF77wBbS0tDzQeQsEAnz+859HMpmkQk0y/4AIigUCAZ555pmaYKD6/8lLtlqUVi6XIRQK6VTe28FisaBWq2GxWGC326FSqeqimr8XstksAoEALl26hH/6p39CMBhENBqFQqHAb/zGb2Dnzp30xeJyudDX14exsTF8//vfRzwexzvvvAOVSoW9e/dCr9fT4YIMjQ159lOpFH76059iaGgIMzMzAED9iaxWKx05UI/g+K6DCp/PB+CaI97NKBaL+IM/+AMAuK3VaCNCUuikt9vr9SKbzdIBNWQImdVqhVQqXVPndj2kFZecMxmNvLCwAL/fj2w2Sz0NBAIBdDod7HY7HTxWD0iLFBEUkhT+zXbwxJ6Y+OALhUKkUilYrVY6ZAeo3e01wsuDzWbTF2Brays8Hg9tiyXdGsAnU1lJWaKpqQl2u53u3qVSKTKZDPh8Pux2OyQSyV2LHXO5HOx2O0KhEHUmJTM4iMlQPp+n+oY7udje7XkrlUqIxWJqky2Xy2s2Lvejb6iutd/p+pKsCZnq2Mh25+RaEo3R4uIivF4vEokE9SdoamqCwWCggSbRwxAjLJKJJZbwqVSK6tPWIsSnopE7eJYLsn6TcrXT6aT6K6KlUCgUdR2Od9dPD3kwY7HYTb/+l3/5l5iYmADQ2I6LN4MItM6ePYt/+qd/opPkyuUyNBoNlEolNm/ejB07dkCr1db7cB+IYrGI8+fPY2RkBHNzc/B6vYhEIpibm0MikUA6nQafz4fD4YDJZMJnP/tZbNq0qWY0/Gqj0WjQ398Ph8OBvXv3IpPJ0BbEUChUY+fsdDoxPj5Op3yWy2XMz8/D6/Xie9/7HgwGAx577DH09/fDbDbDbDY31FRWDoeDrVu3orOzE9FoFJFIBC6XC8PDw2Cz2VCpVBAKhWhtbaUTDknan81mI5fL0XkhxHnzbjMV5XIZ+/fvp/eD2+3G2NgY3nrrLfoZ5/N5XLp0CbOzs/j617/+wOdLHEOrMyprbf1YTYjJ09WrV/HDH/4QLpcLuVyOalTsdjsV0pLrLZfLsXHjRohEIhw6dAhzc3NYXFykQ8eCwSCeeuopdHV11fns7p1CoYChoSFcuXKF6sAeZsgsI6fTifn5eUSjUboJfPrpp/Hkk0+ir6+vbsZXwD0EFa2trTh58iRGRkYwOztLp3gCwNDQEP70T/+U/jvZ5a9UxP+gbV/VHzQZwBMKhTA2Nobjx4/TnRkxByKpUeJ5sVYhJlEjIyM4deoULly4QAPBaogXhdlsRnNzM/R6fR2OtvZ4hEIhvRak/ZUMhCIdOuR70+k0BAIB/H4/VfWXSiU6/pnP51NnVb1eT3e0jQDR78hkMhiNRhQKBTrCnM1mw2QyQSQS0e4IogMgP1sul6HX68Fise55JkilUoFSqUShUIDZbKZuq9UBV7lcrtG1LMf5NkKXBcn+NEJgeSuqTd6cTifOnj2LSCSCUqkEiUQCm80Gi8VCbc4JpLSh1+uh1+uRSCTg9/uRy+UwOzuLcrmM/v7+Op7Z/VMsFuH3++FyuZblfmx0CoUCAoEA3G53TecHmVy8adMmaLXaum6S7vqt/+yzz+IHP/gByuUyXnzxRfzd3/0dHA4Hjhw5gt/8zd9EJpPBc889h3feeQf5fB5/9md/ht/5nd+hEdNyQKyYiQPkvUBq7EKhEDqdDjwej6bQ3W43PvroI4yPjyOfz9OUukgkwrZt22CxWGA2m294WNcS+XwebrcbgUAA586dw4ULF26wWSdmSAaDAY8//jiampqgUCjqdMQ3QkoyJK1bLpehUqlq0p5WqxW7du2iQ3WWlpZw5MgRLC4uYnFxEalUCgMDA4jFYnA6nUgmkzAYDHSgWCO8WKq7MdhsNgwGA/bs2UPbBYlIsjoYqtYYLEcwT9xqV8oqu5HgcDhob29HR0fHLcu7jUClUoHH48H09DQGBwfh8/kgEAjQ29uLtrY2vPLKK7BarVCpVDU/R4zDBAIBbDYbgGsunJlMBj6fDxwOB4lEoh6ndN+Uy2UqSo7FYnTj0MgC2weBlHojkQjeeecdLCwsIB6Pg81mw2KxQKvVorOzEwaDoe4twne9+rz44ovYv38/PvroI1y+fBnbtm2r+fpTTz2Fn/3sZ9i4cSMmJibwx3/8x/jjP/5jjI6Ooru7+4EPlNSSCoUCPB4PBgYG7mmxEwqFUKlU0Ol0UKvVdHdEWs/OnTuHqakpavcKXBORORwOdHR0QKVSNYyP/P1QKBRw9epVTE1NYWBggGYoql+gIpEILS0tsFgsNJiqZ9njeu6m7kucMwuFArZs2QK/3w+/309dIlOpFCYmJjA5OQmv14tCoYD169ejqampYSzcr5/rQTpY7vZnlyPwJV1QD4v52+0gHiEdHR2Qy+X1PpxbQiaQHjt2DKOjo4hEIjAajejq6kJHRwd27dpF7eevh8Vigc/nw2Kx0MmVRE9Fur9We0T2g0CCCtIVRjxY1nIm+XYQjVMoFMKRI0fg8XiojqapqQk2m61G+1bP63jXQQWHw8F7772HP//zP8e//uu/YnZ2FtlsFmazGV/5ylfwve99D1wuF6+//jq+8Y1vYGBgAEKhEJ2dnQ90gET5T0b/+nw+jIyMYHx8/J5+DxExyeVyLC4uQi6XU3vejz76CFNTUwgEAgCumXitW7cONpsNu3btgtVqXXNmVySyJe2Y4XAYQ0NDcDqdN8xYkMvldITygQMHYDKZaM2+kUVrt4MEIGq1Gs8//zxcLheMRiMWFhYwPT0Nn8+HcDiM0dFRFAoFukttamq6afBIDKOIQHI1HlpiWkSuITFf4/F4kEgky1I2INk68izMzMzA7XZjeHgY4+PjmJ2dpS6XpFTR3NwMk8nUUFms6ymVSnA6nZicnITH47lhA1KtM2GxWFCpVGhqaqr7Lu9mkOtDxnzPz88jGAzSa3HgwAE4HA6qo8jlciiXyzVTmkmbbTKZpAMYSdeLzWZryCGBt6NYLCISidASJ+mGqvcLdaXIZDIIh8OYm5tDKBSidgcCgQAbN27EunXrYDQaG+L87+mNIRQK8Sd/8if4kz/5k1t+T29vL86fP//AB0YolUo0bf/6669jcHAQLpcLi4uL9/R7yMNFhGHVplnEE4D8u9FoxAsvvIC2tjbs3r2bzrpfS5CAIpPJwOv1wuVy4dSpUxgfH7+hdKTT6bBt2zb09fXhq1/9Ku1CaIQb9H7hcDgQiUQQCoU4cOAACoUCurq6MDY2hg8++AAffvghAoEAPvjgA4yNjaFSqcDhcNzS2IlYhBOjndUog3k8Hhw9ehTxeBxLS0tQKpXYtWsX1Go1bSN9EKo7aPL5PLLZLN5++218+OGHVMRLumiAT+yid+/ejebm5oYWLZdKJXz88cc4duwYrl69WvO1m9l/m81m2Gy2hsrMVZPP55HL5TA9PY2zZ88im82Cx+Ohq6sLL774Yo1nSCaToYJr0tVCrMhDoRBtlWez2bDb7Whtba1xQ14LEF0JmURcLpfB4/EeOg8hQiQSwfnz53Hx4kX4fD7aeSUQCHDgwAFs374dCoWiIcrzDbsNJbuzTCaD8fFxzM3NweVywefz0foZ8EkNmdQRSavUrX4nqS9Wvyzz+XzNIkPqybFYDJlMhtbxG+GC3QnSKppOp2n3wNWrV7GwsACfz0eDJy6XC6PRCL1ej+7ubvT19VFfg4dhJgCh2mpcr9ejUqnA6XQiEAhgYWGBtmT5/X7ajkmCzurFqVwuI5VKgc/nr/pcAeKfEolEIBAIoNVqqWMouVakpfReKJVKdCCb2+1GJBLBzMwMNd+qfo4EAgEMBgMMBgO6u7vR0dHRsC9gQrUd+Z1o5F0uMSWLxWIIh8N0l0pabblcLjWlI+6yuVyOCphVKhVEIhHdZBDbdg6HA71eT9Pmaw1SEiBZCp1OB41GsybP5VaQoD4UCuHq1at0Tg6Hw6kxZKzeKJMNJXmnVSoVqhUkujHy38n3kHISALqhvN85Pw0bVORyOdr69Dd/8zcYHh6mwrHqnRNps3vsscfAYrHw8ccf3yBAJJAPsVolTB7EapLJJKanp5FOp9Hd3Y1cLge9Xg+RSLRyJ7xMkCmeU1NTOHz4MJxOJ95//32kUim6mBAnwmeeeQaPPfYYHA4H2trawOfzIRKJGnZxfRA4HA6am5tpqre9vR0nTpzAP/3TPyEej+P8+fNIJBJU/CaRSGrqs+TFK5fLbxDCreQxi8VixGIxHD16lBqxKRQKzMzMoKWlBTabjabuTSbTPV23bDaL4eFhzM/P4x//8R/pPJXq4XwElUqFT33qU7Db7fjCF74AnU63JkpjNzOxqv73au1Ko97zxWIRly5dwvDwMIaGhhCNRqnonAQVRO1Pgl+SkYhGo+jt7aXuooFAAIuLi7QNsa+vD08++eSat+rm8/nYuXMnmpub1/y5ECqVCg0OL1y4gO9///v0HSiTybB161bY7Xbqn0Tu33w+T718yuUyzXCRgZEKhQKFQoEG3CTYdDqd4HA46O3thVwuh1qtvq93XsOuCuTlH4vF4Pf7EQqF6NcUCgUtSfB4PJhMJrS0tIDFYmFubg4sFguJRII6L1YHDXcj7iTGMnw+Hy6Xi7aXkkivUcVAxMArFothYWEBCwsLcLvd8Pv9KBaLEAqFEIlEdFplU1MTWlpaoNPpanbnjbq4Pihk8VUqlWhpacHExAQ1h0qn01RFnslkbniYyA5wNYWLQqEQer0ecrkcPB4P+Xye9qW73W6Uy2Vks1k6NK3am+BuIFNe5+fnMTs7C6/XC6DWxpq8vLRaLW0zlkqla0K0fH3wQFhLgkTg2vFGo1GEw+GaLG11RrH6fEgGI5vNIhgM0vbrVCqFRCKBVCpFO6g0Gg0UCkVDtPU+CCwWC0KhcM2bExKqnTMjkQgWFxcRDodvmlUns4GAa59DMplEJBKhmZxsNovJyUkkEgmqoyGmdiRDkUwmMTc3RzMgKpUKEonk4QsqcrkcTc+Wy2XqvPfpT38a+/fvh1gshkwmg1Qqhc1mQ7lcRl9fH2ZmZvDRRx9hYGCApgLvhXg8jjNnzkAoFGJ4eBhKpRIbN26EzWZraJOYcrmMQ4cO4a233oLb7cbMzAx1yJRKpXTU9J49e+BwONDe3k59Goj4cC0ttvcDi8WCXq+HSqVCMpnE8ePHEQgE4PF44HK5MDQ0hFKphA0bNtRdtGcwGKBSqaDRaOByueB2u3H+/HmkUin86le/ohM6SWblVnXx6qxDdUknk8kgFAohm81iaWkJQG1KlMViwWAwYPv27bDb7Xj66aeh0Wjq/rk8alQqFYTDYQQCASSTSQCg2cbrrwWbzYZarYZEIoHL5aIWzgAwPDyM0dFRxONxavzW3d0Nk8n0ULyIiX5krZ8L6XQk5mQffPABRkZGasYTpNNpXL16FXNzc0gmk9BoNPTniWEeEV8TG4ZSqQSxWAyBQEDLI6R8RLIVbDYb77zzDjQaDb773e/el39JQwcVJNIiJ05eflqtFi0tLdTbXyQSQalUolwuw2w2I5VKQS6X02mL1VSb3FRPsKxWwJPoLx6PI5FIUOOldDqN7du31+kTuTnkJUBuwunpaZw5cwbxeJxOWCW+BmazGUajET09PbBardBoNKuuD2gEiGGUUqmEXC6nu79cLodkMol4PN4Qlr/EtEin08FkMqFUKkEoFFInUbLjvl0geH2QcKegkTwbpKZKjN+sVivVcjyMQrhGJ5fLUVNB4Nq9cb2dOVkzyfXL5XKIxWI0aCZ23vl8nrbXy+Xyhs283i3k/iZZtYfh/iQmZ/Pz87h06RKWlpZqNgflcplO9r1y5QokEgl9dgOBAJaWlm7I0gOfrAE3+xoAOmQuEAggnU7f17E3bFBxPZVKBfF4HOl0GufPn0c2m4VYLKY+51KpFNlsFidPnsTCwgLm5uaooAn4pPtDIBBQccvu3bthNpuxuLhIXcquXr1K5xqQbAnp5ybDlBqFQqGARCKBeDyO119/HdPT0zh37hyCwSD129Bqtdi8eTOam5vx9a9/ne7Sq50YHzWq64iRSIS22PF4PBgMBhiNxob6bLRaLb7whS/A5XKBxWLB4/Hg8uXLiEajdzXX4mb/fDM4HA66urqoe6xSqURPTw+eeOIJSCQSqNVqKgpkWD1IWZNoogBg06ZNOHjwIPr7+8HhcFAqlZBMJpFKpXDixAlMTU1hZGQEV69exejoKC5fvoylpSVkMhkoFAo8+eST6OzsbGizr7uF+FN0d3dj165dq6Z5WikKhQLGx8exsLCAS5cuYX5+/ga30HK5jEwmg0KhQDdFcrkcYrEYzc3N6OzshEAggEwmo8aPXC6Xli6J8Pd6yDwhtVoNu91+X8ffsKvDzRZAIlqZnJxEPp+HTCaDUqmk35vNZnHixImbCjWrhwaZTCYYjUbs3LkTbW1tmJycxNzcHLhcLpxOJ43Wqm1xySjtu1GSrxYkWvV6vXjjjTdw+vTpG3ajEokELS0t6OzsRE9Pz0OljL5fSPaLBI8kUGSz2dQiu5FSqGTWh1Qqpc6fk5OTiMfjAG58Vm6lG7pTgEGMdIgPhcViwbp169Da2vrQtuqtFYjRExlyaDAYsGXLFjq7hnhQRCIRnD17FhcuXIDH40EwGASXy8XY2BjNwhL/oJ6enjUhPr8bOBwOzcSu9fJcuVzG7OwshoeHMTc3d4OvEIGUMIiZGXAtW6NSqaDVaiESiah7tFgsBo/Hg16vh1Qqhdfrvaktg0AgQF9fHxQKxX370DRsUEFaHsvlMpqamqhIiaT05ubm6IdFyOfztOZIUkFkZ6XX67Ft2zZotVrs2bMHBoMBzc3NkMlkUKlU6O3txdatW7F582Y4nU784he/oL3eYrEYzzzzDDZs2PDAo56Xg+oulitXrmB6ehrhcLjmRWGxWOBwONDb24uXXnoJOp1uzac5l4NKpYJkMolgMEg9O8rlMiwWCzo7O+lApkbKVHA4HAiFQhgMBnzuc59DLBZDb28vHaBUfd2j0ShmZ2fpxN3qYWtElGc0GmGxWG4InDgcDjZt2gSTyUSHfJHe97WmtWGz2Whubqafk9vtrvch3TeVSoXO6yDX0+l04qOPPsK2bdtgt9trxMZLS0twOp1IpVJU2EdakQ8ePAiLxYK+vj4YjcY1vyaw2Wxa6iaau7Ue/LJYLOh0OthsNvT399e4vJJJxAKBAGq1mk6lVSgU1KuEzEricrkQCoU103qJZ0lzc/NNZ6WQz5Note6Hhg0qOBwOtFotyuUyjEYjdDodFV3G43G6S7sVpCZMPuSOjg488cQTcDgc2LFjR00wQkQubW1t2LBhAyYnJzEyMgK/3w+FQgG1Wo2DBw+ir6+vIVTvJIOSSqVw5swZOJ1ORCKRmvkPNpsN27Ztw9atW7Ft27a6jsJtNCKRCPU+CYVCkEql6OzspCOjb2V1XC/IoiAUCrFhwwaUSiV0dHQgnU7f8LL3+Xw4evQootEo5ufna+qiNpsNer0e69atQ39//w0/y2az10zr9J0gz0A8HsfU1FS9D+eBIIFwOBymWTWi1BcKhXjiiSdqOgU8Hg/dhRJ9BTH127dvHxwOB5qammraENcqxGWWiJUfBqEmsUlobm5GLperMZmTSqV0jWptbYVEIoHJZIJQKGyYa9mwQQWxWVYoFFT7MDc3B5/PR7+HOA3m83nEYjFwOBy0tbVBo9HQEdFqtRoajQYWiwWbNm2CQqG4ZU2YPKRmsxkvvPACUqkUbcO0WCw1xiH1pFQq0XZbj8cDp9OJXC5H06JkVPju3btht9sfGjMrYmZUKpVoCxVpISP9+reCiFlLpRI8Hg8uXrwIl8uFSqVCxb/k3mj0z4nFYt3go0Fgs9nYs2cP0uk0vF5vTaZCp9NRgZ5CobhpULHWF2QCi8WCRqNBe3s7HA4Henp6EAqFarpcyPcRXxeDwYANGzY0XImQxWJBLpdDp9NRvxnSXjw3N4fLly+jVCrh3Llz8Pl8tDWYOMq2tbWhr6+P7nxVKlVDZeLuB9JW7fP5wOVyIZPJ6K68EdboB4HNZkOj0dCsBMm+A9c0DyRTQeZRNVomsWGDCmKzzOfz8YUvfIE6a87OztLvmZ+fx8DAAJaWljA+Pg6hUIhnnnkG7e3t1BTIYrHAYDDUdH3c6qYjLyaxWIxvf/vbNxxPo7RcFgoFhEIhzM/P49y5c5ifn6cT+jZt2oSuri4888wz2LFjR0276FqHCFOj0SguXLiAYrGI/v5+KJVKKiK8HUSTc/r0afzLv/wLbbMio95JOrjRFyU2m31LN0ulUgmr1XpL5Tf5/1ud48NwnwDXnlciyCYbjpGRERw+fJh2exHy+TyOHTuGubk5yGQyNDU11fHIb4QMPGtvb0c4HEY4HEY6nYbf78eJEyfgcrmQyWRw9epV2oIPXLsXDAYDnn76aXzta1+DTCajniaNspbdL+l0GleuXEEwGKQD98jLdi2fF3Dt3iXutTezlK/+50a8jg0bVACfLH4kAr1elU+Mi+LxOJqamsDn89Hb24vm5maoVCpIpVLIZLJ7qhtePyGyESkUCggGg1hcXKxpMyMeDCRbU+209zBAREmpVIoOtBOJRNBqtTCZTLedMFkul+mUUrfbjXg8jlwuBw6HA4lEArPZfFOdQaNyq2v6sFzr5YAIs81mM3p7e5HNZjE6OlpjDkRIp9O0u4zY+TfKGsBisWAymWC1WjE5OYmFhQXamZZKpeDz+VAul6kHAam12+12aDQadHV1QalUPjQ7eeATW+lKpQK5XE4zlg/L/b+Wr1FDBxUAaFAhEAhoKpOwdetWPP/887Q/GwDdad4pK7GWiUQiePfddzE/P08HywDXFtFt27bhs5/9LKRS6ZpPcV4PsSBeXFzEL37xC3g8HmpVbLPZbjvgqlwuw+/3I5FIwOv1IhQK0WmfHR0d+PSnPw2dTtcQmhmGB4e0GfL5fKxbtw5dXV3o6ekBj8eDy+XC+++/X6NJIXqEUCiETCZDA5JGeElxuVzs2LEDDocDfr8f09PTKBQKiEajSKfTSKfTUCgU2Lt3LzQaDfbt2wer1QqVSkXLAsR+/2FaD4nVuN1uh1qtXvNdHw8LDR9UAJ9EbWtlF7nSkKmShULhhhS3XC6nY7EbYUFcTojXCJ/Pp7XEZDJJjdGqAyzSgldtbBaNRmlqWCgU0nSw0WikC/CtFl02mw2BQLDm1fKPEuRakuBCqVRCqVQiHA7XaIyI3ob4lkSjUYhEIjqois/n1/VZYrFYkMlkqFQqsFqtaG1tpWZYpFyr1Wqp0Nhut8NgMNByABGtP2yQwE+hUECpVD6U57gWYa7CGkQmk2H79u2QSqX48MMP6304qwZJZSsUCvyn//SfsLS0hKtXr9JRztXp7FAohMHBwRonQhKEbdq0CZ2dnejs7MSePXvoXIvbTeUTCARobW1dM9NqGT6BBAREiH19YFipVBAKhaiJ3MTEBB3S5nA4sHv37rrugqv9U37zN38TX//61wGAumeSYJt4EYhEItox9DAItG+GQCCAxWIBi8WivgqPojtwI8IEFWsQPp8PvV4Pv98PmUxGd+ukFPCwUt1v3dnZCYvFgnK5jKWlJaRSqZq+62KxWDO5EfhkAJPZbEZzczP6+vrQ3d0NkUhEd3S3guwIH9ZF+lGAZB2ImyxptQRqjfVisRjMZjMV8u7cubOux01KOQCooPhRh8PhQK1W004J4lXBUH+YoGINIhKJ0NXVBavVio6ODprm53K56OnpeWj8728Fl8ulNtIKhQL5fB6lUqkmU5HJZPDNb36TugiSr1UvQjKZjM6yuJuZGCQNzgQVaxOlUoldu3bBaDQiEAjA7/djcnISqVQKmUyGzn+JRqOw2+2w2+2w2WwP9bO0VpHJZDh48CBtr+ZyuQ/1hmotwQQVa5DqGrHVaq334aw6JNULYNU8BR42kdujiFAohM1mA5fLRUdHBxQKBfx+f43/SaFQoEJNMieHofEQCoWwWCz1PgyGm8AEFQwMDI8ExCSJzWbjueeeQyAQAAAsLi5icnISkUgEu3btQl9fH8xmMxwOBzQaDRNMMjDcA0xQwcDA8EjA4/FoG7Fer0c6nUYkEsHk5CQAYHZ2Fo8//jheeuklWuYi3g4MDAx3BxNUMDAwPFIQTQyXy6XGUCqVCps2bcKWLVsgk8lqvofR0DAw3D1MUMHAwPBIwufzsWnTJlQqFTz11FPU76Q6M8EIcxkY7g0mqGBgYHgkYbFYjGESA8Myw6pcb8m4SggEAuh0unr86boSCASY836EYM770YI570eLR/W8PR7PDW7OhLqF6TqdDm63u15/vm5YrVbmvB8hmPN+tGDO+9HiUT7vW8H0SjEwMDAwMDAsC0xBkYHhIaBSqdyQjmT8FRgYGFYbJqhgYFjDkGBifn4eJ0+eRLlcRqlUgslkwp49e5h5JQwMDKsKE1QwMKxhKpUKyuUyxsfH8Vd/9VfI5/MoFovYuHEjNmzYAIFAwLRFMjAwrBpMUMHAsEapVCqIx+OIxWIYHx/H0tISHZ62tLQEr9cLNpsNlUoFgUBQ78NlYGB4BGCCCgaGNQope1y6dAmDg4Nwu91UVyEUCjE4OIh0Ok0zFgwMDAwrDRNUMDDcBblcDslkkk5I5XA4ddMqVCoVWuZwuVwYHR3F4uJijViTzWZDqVRCKpU+UoLNUqmETCaDYrGISCSCbDZ7w/dIpVIYjUbGgnsNQO7pbDaLfD6PdDqNZDIJFot1g/MpmSQsk8kgEAjA4XDoOHTmOq8eTFDBwHAHKpUKAoEA3n77bej1euzduxdisbhuw6ZKpRKCwSAikQjeffdd/OxnP0Mmk6np/pBKpWhra0NTUxP4fP6qH2O9yOfzuHr1Kubn5/HTn/4UFy9eBFD7Unn66afx3//7f4dEImECiwanXC6jXC5jbm4OY2NjOHfuHI4cOQKBQACNRkOfPxaLBblcDqFQiP3792Pjxo1Qq9XQaDQ3BCAMK8sjEVRc32r3sCwi5XIZ+Xy+5vx4PN4jZT1Mzn2lrmmhUEChUEAsFsPS0hLYbDbK5fKK/K27pVKpIJ1OIxqNIhKJIBqN0s9BIBBAqVTCarVCIpGAz+c/EpmKSqWCUqmEfD4Pl8uF2dlZjI+PY2FhAUDt/TE/Pw+fzweNRgOpVAoOhwMul/tIfE5rCSJCLhaLCIVCmJ6exsLCAubm5sDn8xEKheh15XA4UKlUkEgkmJ6ehkqlAgDIZLK6ZhUfRR76t0+pVEK5XKY3KIvFAp/PfyhusEwmg4GBAcRiMQDXAor+/v5HyjaWXFuS+lxuPB4PBgcHsbCwALfbTTMUAoGgbi+hcrkMp9OJ4eFhLC4u1gQ57e3t+OIXv4iOjg4YjUbaUvqwk8vlEA6H4Xa78eMf/xgjIyPwer03fc4HBgbwe7/3ezCbzXjllVdgMplgsVggkUjqcOQMt4IEz6lUCu+//z7+5V/+BYlEgpY/EokE/V4WiwW32w0Oh4PJyUn87Gc/wzPPPIOvfvWrUKlUMBqNTLZilXhogwrysikWiygUCjSoIBErh8O558CCfH+jBCTFYhHz8/MIBAIArgUV3d3ddT6qlYXsyEulEr2mJKgg13U5XqLk78TjcUxOTiIajdKXNwlg6nEfkHRwMBhEIBBAMpms+bpSqURnZydsNhv4fP4js5CWy2XE43EEg0FMTk5ifn4epVLpptcoHo/j7NmzsNls2L17NwQCAQwGQx2O+v6pzk6Sf64OLokWgUxevdfsJcn8AJ+sd6t9z5NjqM4UVioVei7k+SdrAfkcyCaru7sbgUAAHA4HWq2WtlY3yvr9sPJQBhWVSgV+vx9LS0uYm5vD0NAQCoUCMpkM5HI5tm/fDqVSCZPJBLFYjEwmg1wud8vfVy3+If+rJ+RBSqVSmJycxOLiIrLZLDgcDvbu3VvXY1tJyuUyCoUC0uk0Ll26hFQqBa1WCz6fj1QqhUwmg66uLjQ1NT3QwlGpVFAoFFAsFjE2NobDhw+ju7sbn/nMZ2CxWOqW6SKLayQSwenTpzE4OAiPxwMA4HK54PP5MBqNaGtrq6k3PwrE43H86le/gtPpRDgcpi+am0GeHb/fj0uXLiESicBms0GhUKzyUd8/hUIBuVwOpVIJ6XQauVwOLpcLqVSK/vvc3Bx8Ph9eeOEFHDhw4J6C7VQqhTNnziAej8NqtUIsFsNut0Mul6/gWdXCZrMhFovB4/Hw5JNPgsfjIZvN1ohvU6kULly4gHg8jlQqhWKxSL92/vx5lMtlOBwOvPTSS1Cr1bBYLHVfvx92HtqgIhwOY3x8HOfPn8eRI0eQyWSQSqWg0+lQLBZhMpmwZcsWqNVqRKPRmlTa9bDZbJhMJlqfq3f5hETo+Xwefr8fi4uLiEQi4HA4SKfTdTuulYYEU/F4HKdOnUIsFkNbWxvkcjm8Xi+i0SiUSiWampoe+G+RHZLH48HQ0BBaW1vR2dkJpVJ5X1mu5aBUKiEWi8Hv92Nubg7j4+P0enO5XAgEAkgkEqhUKshkskei7EFIp9MYHR2Fy+W6QbR6PaR7JpVKYXFxEWw2G9lsFpVKpeF3seS8isUiMpkMstksYrEYzb5EIhEkEglks1mcP38eo6OjUKvV2L9//z3dD9lsFidOnEA4HEZPTw8MBgN0Ot2qBhUsFgs8Hg8cDgcOhwPZbBbxeBzxeJx+D9FaFAoFZLPZmqDC4/EgFAph06ZN6Ovrg91uh9FoXLXjfxDWsg7woQkqyO5yfn4eoVAIR44cwejoKBVlFYtF2oZ36tQpKJVKjIyMQCKRIJ1OI5PJ3PJ3s1gsqFQqiMVi9PX10ZeLVqsFh8NZ9ZcM2bF6vV7Mzs5ibm4OAoEAMpls1Y6hHpB2wVAohKtXr8Lv9yOVSkEul0Mul0OtVkMkEt33768Who2OjmJiYgLT09NQq9V0URWJRHV7WReLRczNzWFqago+n4+2TgJAW1sbtm3bhi1btkCpVFInzYedVCqFSCRCr5XX60U+n7+rny0UCpidnUU6ncaxY8cQDodhNBqhUqlokFbvzUOxWKSBdCaTgc/nQyAQgN/vh9frRS6XQyKRQDqdhsfjQbFYRFtbG4xGIz7zmc/gs5/9LJ588sm7vmdJuZhsykirciAQwMaNG1f2hG8COW6bzQaZTEbFuOSz8fl8GBkZQTqdRiKRqMk4k3KoVqtFb28vdDpdQ4vYyfqTy+Vw4sQJzMzM0ECuvb0dTU1NtPRaLBYRj8eRy+WQy+VqgikCj8eDRCKh/79a5/5Af6VYLOJHP/oRfvSjH+HSpUuIRqMQi8Xo7u7GF7/4RfzWb/3WqpwIuRiFQgGnTp3ClStXcOzYMQwPD98waCmZTMLv9wP45Ia92TCm6yEX85lnnsGePXuwefNmyOVyGkmvJoVCAYFAAPPz85icnITP54PZbIZUKr3jeaxlSqUSEokElpaWcO7cOSwtLSGRSECv12Pr1q1oamqCUCh8oL9Bgs/jx4/jww8/RC6Xg8FggF6vh0KhoH3v9SCfz2NoaAizs7Pw+/01aeCuri48/fTTaGtrW9UFpN7E43GMj49jcHAQExMTiMfjdx1U5PN5qkmSy+UYHx+n7YgSiaTuafJqTZjT6YTP58PRo0epQNfr9dIyCMmykFZio9GIzZs3w2azQS6X33VwVCqV6Bo5MDAAn88H4NpL/XbZ3JWCBAZqtRpqtZr+d5K11Ov10Ol0WFxcvOGer/atsNvttNOnUSEaklQqhb/6q7/C+++/j+7ubjQ3N+OrX/0qzGYz1YwVi0V4PB6aubxeWwWAZm2lUimEQmHjBxWRSATPPfccTp8+XfPf4/E4zp07h3PnzuHEiRN44403HvggbwXZVZJ6YigUwqVLlzA7O4tQKHTHYIEImAQCAfh8PsRiMTQaDQ028vk8wuEwcrkc0uk0CoUC3G43hoeHUSgUIJVKoVarV90LoFAoIBgMwu/3UxHqckCCM2D1RVm3I5vNUnX/zMwMnE4n7HY7dDod9u7dC5PJhN7eXhiNRiiVynv+/aSURK5vOBzG/Pw84vE4Ojo60Nraig0bNtS124MsNrOzs5iZmUEqlQIAKjyTyWSwWCxQKBQNc91WAiJUzeVyNNNw4sQJzM3N1WRuquFyueDxeDAajejt7UUkEsHIyAjVzaRSKfrzYrEYhUIBHR0dq/4SIp1qJLBNp9NYWFhAIpHA4OAgAoEApqenqYZKLBbTFmKhUAir1QqZTIbdu3ejtbUVBoMBMpnsrsq15G+HQiEMDAxgZGQEqVQKHA4HLS0taG5ubojumGKxiGKxSD8b0pUVDodpMEnW9ba2NmzZsgX9/f3g8XgN/1yUy2Vks9majItOp4PNZkM2m4XL5UKhUKCZqcuXLyORSCAWi91UEygSiWA0GqHT6bB//34oFAqIRKIV3xjdV1BRqVTw4osv4vTp0xAIBPj93/99vPzyy7DZbHA6nfijP/ojfPDBB3jzzTcxPDyM9evXL/dxAwC9uVwuF/7yL/8Sc3NzuHr1KiKRyG2FWsC1xZg8lHq9HlqtFna7HZ2dnfTlkU6nMTY2hmAwiJmZGYTDYYyNjWF4eBitra2YmZnBunXr8O///b9f1Zs2nU7j6tWrmJ2dvetd2d1ASkgA6q4bqSYajWJubg7T09M4e/YsisUi1q9fD5VKhVdeeQVGoxFCoZBeg3s97mKxiGg0inA4jO9///twOp1U8PfFL34RX/rSl+pa9iiVSsjlcgiFQjh69Cimp6epMp/sxoxGI1pbWx/qFtLqcgDZqf/yl7/ED37wA2QyGaTT6Zs+82KxGGKxGPv27cMrr7yCiYkJ/N3f/R38fj99GQ0MDIDFYtEs58svv4y2trZVDSrIBikWi8HtdsPpdOKNN96Ay+XC3NwckskkCoUCyuUytFotTCYTmpqasH79emg0GuzatQsKhQJWqxUikeiengXyt4eHh/Hd734XgUAAoVAICoUCmzZtwubNm2syBfUin88jFothcnISf/u3fwuPx0NF29UW9WKxGI8//ji+/e1vQ6PRUIfNRqZUKiEUCmFpaQnpdBpsNhutra3YvHkz4vE4Xn/9dbjdbhw/fhzJZBLxeJxqv27mncPhcCAQCGgnWGdnJ9ra2hozqPjZz36Gjz76CADw4x//GJ///Ofp11QqFf7xH/8RNpsNADA5ObnsQQVpI0yn03A6nZidncXi4iJ8Ph8KhQIVUxL1sFgsrqmXZzIZsNls9PT0QK/XQ6PRQKFQwGg0wuFw0EU5m82Cy+UiHo/DaDTSPniv10sXNpVKhXA4jEqlApFItKIpJvLQEIEmCZ44HA6USiX0ev19ZUyqa3kk3alSqcDj8SAQCOqa8geu+XGQzz0cDqNcLtPrm0wmkc1m78vdslrwSurUPp8Pi4uLUCqVMBgMMJvNK35d7wRJScdisRt246QbSSwW1/04V5pyuYx0Ok13qaTmn0wm6WfCZrNpK61arYZYLIbRaIRarUZnZycMBgNyuRw6OjogFouRTCZpFwVw7ZlPpVK0pLCS4s3qdYzsUOPxOKLRKKampuDxeOByuRAIBFAsFqlgXCqVoqWlBTabDVqtFt3d3VCr1dDpdLRb4l6fhWw2C7/fj/n5eYTDYWSzWUilUqodUyqVdX8GyuUyYrEYZmdnMTo6CrfbjaWlJboR0mg09HprNBq0trZCrVZDIpE0dKBdvf66XC5MT08jHo+jUqkgGo3Srp5EIgG/349AIIBCoQAWiwUul0uvC9EMkv+xWCwUCgVaGlmtTeJ93SWvvfYaAOBTn/pUTUBBqE6TrYR4sFAoIJ/PY3h4GH/xF38Bj8eDiYkJ5PN5KBQKyOVyOvegvb0dHR0dKBaLdMGYmJgAl8vFr/3ar6GnpwdSqZTuRKvT/pVKBZ/+9KdRLpcRjUaRTqdx9OhRHD16FG63G6dPn4bX64XdbkdbWxt27969omJJUnMLBoM4e/YsPB4PstksBAIBNm7ciObm5vtK/5O67NzcHP78z/8cpVIJ+/btg81mw9atW+u6Q6lUKvB4PDh06BBCoRAmJyfpHA65XA6LxYKenh5s27btnvUU5Ly9Xi/efPNNeDwenDhxAolEAt/5znewc+dObNiwAWKxuK797alUCsPDw7hy5UqNjoLFYkGj0UCv16OpqYkuno2SYVpuisUihoaGMD4+jsOHD+P06dO0fRK4FlAQzwmNRoOnnnoKzc3NcDgc0Ol01LZZp9MhlUphZmYGi4uLNVqBau8D8tJfic+TZAULhQIOHTqEs2fPwuv1wuVyIZFIIBQKIZ/PI5FIgMViwWAwQKFQ4Omnn8aGDRvQ3t4Ou90OHo8HHo8HNptNBeP3E1xPT0/jjTfewOTkJEKhEAQCAXp7e2G329Hb2wubzfbAeqX7pVKpIJfLIZvN4tixY/ibv/kbBAIBLCws0BKWQCDAgQMHYLfbsXHjRjgcDphMJhiNxhUzxlsuSJbI7Xbjf//v/43R0VEsLCygVCrhgw8+wIkTJ+j9SLpchEIhOjs7IZPJqNbL5XJRnRnZ6JIshkqlgkKhWJVszT0HFalUCqdOnQIAfOELX7jp91y6dAnAtUVvORXD1Tv1SCRCdyuRSATpdBocDgd6vR5qtRparRYikQjNzc1oa2ujpZJEIoF8Pg82m00jfaFQeNsdPjGQkclkaG9vpy1MV65cQSQSgcvlAofDwcaNGyEQCFbM8pekulKpFILBIGKxGDX4EYvFkMlk97WbIGllstDmcjm0t7eDx+Mta3nlfo6rUqnQ8w0Gg3RnSVJ/i4uLVBysUqnoPVJt300WW3JNyEuDtOMFAgF4PB4sLS0hlUqhXC5DrVbDbDY3RDkhn8/T9rjrNQMSiQRarRYSiaRura4rSXWGkdSV5+fnqQcDgc/nQy6XQywWo6OjA2q1Gi0tLWhra4PJZIJSqaS7eKFQCLVajVAodMPzcn1QQVx4l/tzLZfLSCaTSCaTmJ6extzcHBYWFuDxeOjfJ+sZn89Ha2srlEolHA4HWltbYTQaoVAoaDBxP1T7sfj9fiwsLMDr9aJcLkMgEKC5uRlWqxVyubyuDrIA6PUnuhLSsVc9QE8mk0Gv18NqtcJkMkEul6+J2S6kqy0ajdKsOwmUyT1CtH88Hg9SqRQymQwOh4Ou+zwej2oy8vk8vWf5fD4kEglEItGqXcN7fgNduHCBppv27dt30+8hmYzNmzdDr9ff/9FdR6lUQqlUwsWLF/GjH/0I8/PzcLlcKJVK4PP50Ov1+O3f/m2sX7+eDpcRCAQQiUT0BUUuIHAtXXY3sxFYLBYNPLZt24auri4cPXoUAwMDyOVyOHToEL2JSWbkfjIGdyIajcLpdOL8+fM0JUZcQiUSyX2nKEl7Enm5kjQci8W6rSnYSkPSeEQcm06naSBF/vfWW29BpVIhl8thz549yOfzyGQyNKKXyWTo7OysefhImWNqagrnz5+H3+/HyZMnkcvlIBaLIZfL0dvbSzUK9SYSieDo0aNwuVw1PiRsNhsdHR3o7e2FxWKp4xGuDCSYSKfTGBoagsvlwk9/+lNcvXoVoVCo5nubm5vxwgsvwGq1UlGaVCqls3Cqd/K3I5fLIRqNIhQKIRaLQSKRQCqVLvuLKZ1O48/+7M8wNDSEmZkZRKNR+iLo6enBrl27YDKZsHv37pqShlQqpRuX+y1HVAdqFy9exKVLl3DmzBkcPXoU5XIZYrEYmzdvxu/93u9R8TOXy62rJiGZTGJpaYl68lwvUGez2dBoNLBarbBYLNDr9WsmyE4kEjh9+jSuXr1as7GpxuFwoL+/n3b1KJVKmj0i+rdDhw7h+PHjmJiYwNLSEhQKBTZv3oy2tjbYbDZa0l5p7vmuJFkIjUaDlpaWG77+93//9/j5z38OAPjd3/3dBzy8TyABQbFYxMzMDD7++GPqokaGyZB+5I6OjtsK98gFu5cdCPGjkMlkkEgkcDgckMvliEajWFhYQCwWw9TUFNhsNqxW64oEFel0Gl6vF4FAoCbtS4Ke+60dkh1LJpOh7qLELKxew7PIwkeib2L0Q3rUyddJym9mZgYmk4nWHol2RqFQUM0MaREkwdnY2BguXrxIO2lYLBaampqg1+uhUqkgEokaQtxFdujEh4BAJjMS/4w7Ud0NdTNznUazoSfp3mQyifHxcczPz2N4eBjz8/P0e0hqW6fTobu7G62trXA4HHf8PMjzTEoH1ZuOfD5PnwE+n78ibdqFQgGDg4M4evQo/f1isZial7W1tcHhcKCrq2tF/DJIq+rExAQGBwcxMjKCpaUl2gGn0+nQ0tLSMC6j5HnO5/O3FOFX7+bXQoaCkMvlsLCwQNvEib4HAG0hNRqNaG5uRktLCzZv3gyJRAK1Wl0zloA4n5J7n5QCq7U2DZmpGBwcBABs2bIFwLUHPxKJ4OLFi/iHf/gHGlC8+OKL+PKXv7wsB0kedp/PB7/fj5GREdpOScoYr7zyCpqbm2G32+nL4FZBw4MsniSNbrPZ8K1vfQtzc3P4+c9/jmAwiHfffReXL19GZ2cnzGbzA5/39WQyGSwsLCASidS0fgqFQnR1dWH9+vWQSqUP/HdIkKbX6+sm0iQiJDabjS1btuDVV1+F1+vFpUuXkEgksLi4iGKxSEs3w8PD9IEslUqQSCQwGo0IBoP427/9W5TLZVgsFkilUrhcLpqtmJ+fh1gsxt69e6HT6fD000/DarWio6Nj1R7CO1Gd1qxeTFksFkwmEzZu3EinMt4MsggTESDJSpF7SCgUwmaz0ZcaWZTrLfqMRCI4c+YMnE4nDh8+TIW61WzYsAG7du1Cd3c39u/fT7MTt0MgEFCXyCtXrsBisWB4eBh+vx+5XI62Vvr9flQqFcjl8mUPLrlcLnp7e5FOpzE1NYVgMIh8Po9yuQy3242pqSmUy2V0dnZCIpHclxD5VuRyOQwMDMDpdOLIkSO4ePEi1Sjt2LEDn/nMZ2gw0yiQbMmmTZtw4MABLC4uYmRkhL6Ac7kcDh8+jLGxMYyMjKCvrw/t7e1oaWmpi0HhvSASidDV1YV8Pk+zDiTLfvDgQXR0dGDdunXo6+uDWCymwQSPx6OaM9IZduLECfp+4HK5kMvlNEO7WhukBw4qnn32WfzqV7+q+Z5f//Vfx//6X/9r2S4i2ZWSgMLj8SAej9N6uV6vx5NPPgmLxQKlUnnHDogHOS4SqCiVSjz++ONQKpV48803kUqlMDAwgNHRUfz6r//6ff/+W1GpVJDNZhEKhWo8Cog4zWq1wmAwLEsQwOFwIJFIoFAo6vpiIYuByWTC448/jtHRUTpYiCjigWsv3bm5OcRiMXpPkIxDPB7H22+/jVAoBKPRCJlMhng8jnQ6TV+2IpGICt+eeOIJ6qjYCFkK4JNM0vVZI5LyNRqNkEgkN72vq3fgJEND1PPk98lkMhw4cABqtRp6vZ7+rnoHFdFoFMeOHYPb7aYC2utpa2vD3r174XA4YDab72qHyufzodFoaDBeqVQwPz8Pv99PW1aJ2I2UTpcbktEkzrhEL0PGfHu9XtqdQua6LNf9WCwWcebMGYyMjODs2bM0sCbGWU8//TR9ETUC5CXL5XLhcDjQ2dkJgUCAiYkJGlSQzM/Q0BCCwSDcbjcOHjxI5wA1yrN8M3g8HnQ6HXQ6HQ1+iG/S+vXrceDAAVitVio6rT6XQqEAr9eLq1evYmRkBBMTE/RrHA6HtteuZlB1T6tGJpOhB02CiosXL97wfT//+c+xceNGfOc733ngAyRtZJlMhppqjY2NAbjm8rZr1y50dHSgqamJzmVYDUjrmlAoXJWLRdzyWlpaEAgEqC++Uqmkoq37bSlks9kQiUR0aFo6nYbb7abtbvVGqVSip6cHZrMZnZ2dCAQCGBoaQjQaxZUrVxAOh8Hj8VAsFmkLHEkXBgIBGtGnUina8ZHP56HVatHU1ISOjg4cPHgQBoOBGh41QobiVhBhLhFpElvuaqoNvchOZmBgABMTE4hGozUj08ViMSKRCKRSKQ3MOzo6YLVaIZVK78mRcTlIpVKIx+OYmprC5OQkPB5PjWCYzWZjw4YNaGtrw+OPP46+vj4qWryX4+RwOGhtbUWxWMSFCxfof69UKkgmk1hcXIRUKl2REiCPx8OBAwfoTApi5DQzMwM+n4/R0VGEQiEUCgWo1Wp6jmazmQoQ7/alTwLLZDKJoaEhuN1uXLhwAbOzs9REa//+/ejv76d+FI3kUwOAruvNzc34/Oc/D5/Ph56eHvj9fhw5cgTRaJS2W8/Pz9Ndv1AohNlsRnd393372Kw0XC4XCoUCWq2WBsVyuZy2xTY1NdFZPtcfO8lAkpEF1ZDGgvb29lXdINzTX7py5QqNDElQMTMzg0QigdnZWRw9ehSvvfYalpaW8Gu/9mvgcrn45je/+UAHSNo5w+EwTpw4gddff53uHLq6uvDSSy/BbrfDZDKtarqOxWJBIBBAKBSu2guIiA7n5+fpDWYwGGC1WulskvuBw+FAJBJRO9dUKoXR0VFEIpEbbtR6QFJ4drsdGzZsQDabxc6dOxEIBPCTn/wEk5OTCIfDSKVSUCgUMJlMsNls6OjogEQioXVxMhKZoFar0d/fj3Xr1mHPnj1U9NTIAQXwidJdpVLRIOD6YLpcLlN9ya9+9SuMjo7i/PnzuHr16k133iTrpdVqIZPJ8Mwzz+DAgQNoa2uDTCZb1YU4EolgfHwcZ8+exblz55BOp28IKnbt2oW9e/eir68Pra2tNd09dwPZvdrtdgiFQhw+fLjm64lEAm63GwqFYkUyFQKBABs2bEC5XEZbWxt8Ph8+/vhjiMViLCws4NKlSyiVSnjvvfcgkUjw5JNPwmw246WXXkJ3dzetkd8N1eXjv/iLv8D09DRmZmbo1GaZTIbHHnsMX/3qV6mgudFevNVZS6PRiHw+j8ceewwejwfBYBDDw8Pw+XxUn+ByubC4uIjZ2Vls27YNra2tYLPZdc++3QwulwulUlmjkSATVR0OB81Q3CoTGQqF4PP5agatAdc2Y+vXr4fJZGrcoIKUPoxGI6xWKwDQYU4WiwWPPfYYfu3Xfg0bN27E0tISXnvttQcOKorFIlwuF513QLoduFwu1Go1mpuba6y1V5PVfvBIK5xarYZQKKRjj0lJoHoyJXkISUbldsdanXWpNrtqpDki1cdPzL64XC527tyJ5uZmpFIpZLNZagIkEomQSqUQjUZvudOMRqNwu93g8/kYHx+HRqOpaz/+vUBS4mSxqPZWIVM4XS4XgsEgZmdnsbCwQI3DCNW6lWrzp0qlQruMuFwu7HY71Vis1D1f3d7o8Xhw7tw5zM3N1QxL4vP5aG5upqJMh8NBbcnv97hu9bPEdEipVK6YWJn8bblcjkqlQoMMYkEfiUTgdDoBgM75IOPIjUYjDAYD3QwQa+qbnUsikcDCwgJGRkbgdrsRiUTAYrEgEonQ398Pm82G3t7eewpU6gU5Pw6HA7FYDK1Wi+3bt8NsNuPq1atUyB6LxZBKpeB2u6HX6zE+Pg6tVtuwo8+rrxsxveLz+QiHw0in0+Dz+RAIBLTdmKz9ZNJuLBajWWWSdTabzdBoNJBKpav6frynoIJ0fvT399/ye4xGI55++mn84Ac/QDQafaCDA6716L/55pu4dOkSLXuQNlGbzYaurq5lrTc2MiKRCBaLhfbhk7pvuVzGsWPHqHqbCHTILkStVt82NUxeGCqVCkqlEslksqE/Tx6PR4d82Ww2qrmpDoL8fj8+/PBDLCws3LItljjyaTQaTE9Pw+Fw4I/+6I8aPqggwQAR712/IBUKBaolmZmZwYkTJ+DxeGpU5QDofcJms6mqnpQeDh06RCd39vX1QSqVrkhrZfVxJxIJRKNRHDp0CH/3d39H3TOBa4GvXC6nIsL9+/fDbDavmLHR0tIS7cy42TyR5YLNZtPnTq/XY/PmzdSWfmZmhpqyjY+PI5vN4uzZsxAKhdi0aRPWrVtHBaqk9fVmz+3ExAT+63/9r/B4PBgbG6NCZrlcji996Ut4/PHHodPp6PVttCzF9ZD7XyaTQSgU4lvf+haSySQOHTqEyclJfPzxxxgcHEQkEqGi5Gw2i9bWVvzO7/xOQwYV1ZCNdCAQwOXLl2GxWGAymaDT6WgwkUgkMDAwAI/HgytXrmBmZoZmKjQaDfr6+tDT0wOLxbLqTrv3lakgpY9b4Xa7AQDd3d33eVifQKK2akMQuVxOrVjrqey92cCylTwOItKRSCQwmUzgcDhYWlqiNtbVfhoqlQoajaZGKUxS+9drBqq7Ycg/k4FGxGSL/HcSMddz4blZ+2N1mynZWS4tLSEajUIgENA6NIfDQTKZRCaToda4yWQSbrcbcrmcWjY3sjMledGRbpfqe5AYwy0tLcHr9cLr9SKRSNBOKdIWrVarIZPJYLVaqd15LpejM27IUKt4PE7FgittBEY8RsiQJNLdQ8TIYrEYFosFra2tkEqlK5o54fP5UCgU1E11JSGfKRGYVyoVmEwmsNlsdHd3Q6fTYXx8HNFolLoCe71eKqRuaWmhayKfz6ddS8Sxc2FhAfPz84hEIqhUKhAIBGhvb4fZbEZLSwsVtzd62a8ack3IfcnhcNDW1oZyuQyPxwO32410Oo1kMkk1YmKxGLFY7L6tzFcKUooTCoXQ6/WIRqP0mSVdSAqFgmYpSPA9PT0Nn89H12hSIiTrP+laWu3zvOugolAo4OrVqwCATZs23fL7FhcXcezYMQDAM88882BHB1DXxLm5Obowbtu2DXv27MHOnTvrGlAQAxkCeXmv1EUkN4jRaMT+/fsxPz+Pt99+G8FgEP/6r/9KFwZyk/J4PLS1tVEjnV27dkEul0OlUt02Wi+XywgGg4hGo/jnf/5nnDx5EkKhEEKhENu2bcOWLVsa5oGsHl8PfGKY5PP58N577yGZTFLnOVIWOXPmDIaHh2l2I5VKUY+RQCAAqVTa0KlgUh9PJpMIBoPU55/L5SIQCOCv//qv4fF4cPz4cUQiEWr2JhQKIRKJ8MQTT+DZZ5+FzWajA/RI8PDaa6/h8OHDiMVidKzy0NAQmpubV7QbiGQqfD4fnT1Brimfz4fBYEBHRweeeOIJtLa2rvgAv6amJjzzzDPo7e1dtfuArB1CoZC6Z/b09FDzL6/XizNnzuDy5cvUm0UgEOCnP/0pdDodduzYAb1ej927d0Oj0eD8+fMYGBjA5OQkpqamAIC6DP/hH/4hNUVq9NkYt4OUcQQCAXbu3IktW7bAYrHAYDBgZGSEDt8aGhpCJBLBiRMn0NnZie7ubsjl8nofPoBPAiOj0YinnnoK7e3tOHz4MAKBAC5cuIB4PI6XX34ZJpMJ8XgcAwMDmJ2dxU9+8hMaPBHfEQAwGAzo6upCW1sbDbxX8x151yvEyMgIjYR+8IMf4IUXXrjhQEulEr71rW9Rm+OvfvWry3KQxPiIiLHkcjmamprqPuaZ7ASIlW91NmAlIOdKxhwTx0gyop2kt6t3rplMBjqdjr5cC4UC+Hx+jQEY+f5kMkmH0pDOgfn5eRQKBdqa1NTU1NA7+WKxiGQyScVL5XIZ69evh0KhgMFggFgsxuLiIhYXF2lNknQYxeNxJBIJZDKZhhikdjtIpoIo3okugozxdjqdCIVCSKfT9LkhGQrSRaPT6aDX68FmsyGVSpFOp2EymaDVaum0zEQiAa/XC6VSueIam2w2i0gkgmw2W6NjIL4pZPd1N0Zf98LNRJ5CoRAajQYqlWrV7vPq1kfS3cHhcCCXy6kImQz8IhqZTCYDv98Pv98PrVaLaDQKi8WCYrGIyclJjI2NYX5+Hrlcjp6T1WpFc3MzzGYzbdVcq5AXJimDCQQCWCwW2Gw2BAIBcDgcKlomGW+5XE5Hud+rwHelzoEEk1arFblcDhKJBOFwmA6x9Hg8yGQyiMVi1AjP7XbD7/ff8PuEQiF1lK1HOeuu7yZS+gCAN954Ay+88AL+6I/+CD09PYjH47h8+TL+23/7bzh37hwA4K//+q+XxYip5mD/rfZvNpuxfv16qNXqur3YstksnE4nFhYWUCwWqZLcZDLVDFRbCdRqNT7zmc8gEAhAIpFgcXERhw8fhs/no0EBIRAI4MiRIxAKhTh//jyEQiHtQSemOvl8HtlsFsFgEJOTk8hkMjRAGRoawsTEBO10kcvl2L17N0QiUV3HgV9PLpdDoVDAhQsX8MMf/hButxuBQACtra341re+BZvNRmuLe/fuxezsLE6ePIkf/ehH9GeJL4LP58O+ffsaXltRKpUwNTWF4eFhyGQyyOVyXL16FaOjowgEAnQGgEgkglAoxEsvvYQDBw6gubmZ7vbJS4y8XJ588kmoVCocO3YMv/zlLzEzM4Nf/OIXcLvd2LZt24qVvsrlMiYmJvDhhx9icnKy5mvEE6alpWVZny2ymJMXOJfLpeUzcq+v9LN8p+Mjx9XW1kZnmSQSCQwPD+PMmTMIBoO4fPkyNbRis9m4evUqJBIJbSUulUqQyWTYsGEDvvGNb1Bnxodtqi25ni0tLdQE7dy5c7RcEIlEcOTIEYyMjEAoFNLpris5BPJeEIlEeOqpp+gcFuBaJ5Tf70cmk8HFixepczPJJF4Pi8WCSqWiE1rr8X6856CCy+WiWCzinXfewTvvvHPD90kkEvzVX/0VvvjFLy7fUf4b1TVhMiCoXkFFoVCA3+9HNBqlQQXpPFhpIRApRQiFQmzcuBEymQzDw8P0Jqu2YiYWzwAwNTVVs9tUKpWQyWTIZDJIJpPUBr0a4mAoEAjA5/MRCoWQyWSoULARIC1z2WwWIyMjeOutt5DP55HP5yGTyehQKZIKFIvFMBgMCAaDtIuGWDMvLCyAzWZj+/bt9T6tGm5W5qtUKggGg3A6ndBoNFRbEwqF6MA14Nq1k0gkaG9vx6ZNm+h1r/595LMxmUzo6+vD8PAwACAWi2FoaAh6vR7FYpF2X63Ec7e0tISFhYUb5noQr4GmpqZlfwlWd0lxOJyaQXQrNRjwbqnOXJANGvHN4PP5iMViWFhYwOLiIsLhMObm5pDJZGpszAHQuUBWq5VOHW7k8t79Qu5J4gpLpjYTW4JcLoeJiQkEAgHMzMxAq9XSFtpGgMvl0vcHsdeem5tDMBjEwMAApqamqAbsZms1yUoQDZlIJGrsoIJ0fvzBH/wB5HI5fvKTn2B6ehq5XA5yuRxdXV04ePAgvv3tb6/IcKNqV0FSDqmHipe08/h8Phw5cgQulwuZTAYcDgdWq5XuiFcDgUCA/v5+dHd3Y926dQiFQgiFQkgkElSoF4lEaPaBuPaRhbPa1rr6vxM4HA66u7tputRut2PLli10ME2jZCkqlQrcbjcmJiYwPj6OcrmMnp4ePPPMM2hra4NKpaqxbRcKhdBqtdi0aROeffZZuFwunDx5krZiVtfzGwG1Wo29e/fC5XLh3XffRTKZBHBtd3/hwgVEo1Fq+uV0OqnIC7imRyBtgxs3boRGo7mlHoGkgqszGGSgl8/nw/T0NEwmE/R6/Yo8exqNBhaLBUtLSzUvxnw+j0AgALlcfkMHy4NA7gWpVAqJRAKxWEyzdE6nE0ePHkUwGLythmy1IQGdyWTC008/jUAgAKvVioWFBfzzP/9zzeROco0PHDiAr33ta7BYLDCbzQ99txwJkNva2vD8889jbm4Ob7zxBjKZDB1GNjs7S8uBOp2u3ocMAPTZI105u3btwgcffICBgYEap1cyVPF6yPOr0+nQ1tZWN63MXQUV5XIZV65cAQBs3LgRL7/8Mv7wD/9wRQ/sZhCley6XQzabhUQiQaVSWdVorFoIeOTIEcTjcRQKBeqAtpomXHw+n84YaWlpoa55qVQK8/PzGBwchNvtRi6Xo7tXIuoDQJ0lq7l+99rd3Y2Ojg5s374dvb29UCgUt7SErheVSgUulwsff/wxZmdnUSqV4HA48JWvfIVmtKoXUYFAAIFAAJvNhq1bt0Imk+H8+fM0cCUdII2CRCLB5s2bIRKJcPTo0ZqgYmRkBJOTk/TeI3V2cvxcLhft7e00OLxTSfL67iAy1XdpaQljY2MoFAp3FPreD8T6XqvV3jDEqlAoIBKJ3HT0+4P+TT6fD5FIBLFYDKFQSLUqfr8fR48eBZvNXtGW0nul2ttCLpfDYDBALpdjenoab775Zs04eLI2rl+/Hi+88MJDVeq4HaRMbjAY8Pjjj0MsFuPdd9+lGqp8Pg+32w2JRIK+vr56Hy6lutzV19eHrq4uqvGKxWIIBALI5XI37TokP0/8m4g7Zz24q786MTFBnRXXr1+/ogd0N5C652rtJiuVCh2n7fV6MTc3h48//pi2uIpEIigUCrS1tWHjxo3LriW5G8hLXiAQ0IFnQqEQsVgMra2tiEQiGBoaQiwWQzAYRCqVoiI/DocDgUCAXC6HeDxOU9xisRgbN27E7t27YTKZoFKp7jhXZTWpNkyanZ2lbXd6vR46nQ5yufy2JTI+nw+9Xg+v1wsOhwM+n4/Ozk50dnbW5RreCpFIhJaWFiQSiRsWiupJnj6fj7bKEsGa2WzG1q1b0dfXd8uhY6VSibbZEk0GqemSZ4xY/loslmVdrMgmIZ/PY3Z2FnNzc1haWgIA+sJvaWnBvn370NLSsqyj6El5MJVK0dZDEmQbDAbs2bMH69ata+iXcTabxdDQEKampqgo9/p1MRAI0JZppVLZMBnGlaJUKtFBl4ODg5iZmaGZOwDUmbOrq6thOkCuh6xHW7ZsgUKhQCwWQzQapULNRCKBwcFBJJNJRKPRmvOrN3f1tJDSh1AoRHt7+4oe0N1AsgWrMZabtI6Gw2EEg0G8/fbbeOONN6jTGVHO6/V6bN++HV1dXXURd5GFgojyZDIZLBYLyuUydu7ciWw2i8uXL8Pr9WJiYgLhcBiRSASJRIK68iWTSUxMTKBQKFAzrKeeegobNmyoqaM3SpaCvJBSqRRGRkZw9OhRmM1mOguGjDu/1fGSF6/X6wWfz6d2xe3t7Q1TZwWuHWdrayvy+fwNGQJSvopEIohGozVjtHfs2AGbzYYnnngCVqv1lp9DsVikL/Nf/vKXuHjxIhYXF2teTgqFAu3t7dRIbbkgyvx4PI7R0VGcOnWKZmIEAgF0Oh16enrw6U9/etlnkJTLZSSTSfrZVQvfOjo68LnPfW5FdBzLSSwWw6FDh+ByuejnRgIL8lk5nU58+OGH6OrqwrZt2xpqY7ASkMFsLpcLH330UY3HEQDqaUHmnDQaJIMGAOvWrUNvby8KhUKNd5DP58P//b//F7Ozs5icnFwWo8nl4q6eFiLS7OrqWvVaHJklbzab6dAYr9eLgYEB+gLn8XjLMoOj2gKVjCEm6bLJyUksLCxgdnYW4XCYigCVSiV27NgBs9lMRTb1rldeb2ZFFhkyzVIkElGDlVQqRR1Kg8EgdWP0+Xx0ABdpmW20HQ7RQFTvMI1GI7q6utDU1HRHQSGp1cfjcSpukslkkEgkdb+G1ZBMkkwmg81mQyaTQTgcvmF3Uh0ElEolxGIxSCQSLC0t0XOqFiMWi0UkEgmk02lcuXIFXq8Xi4uLiEQidBEmglxipLPcAWV1toBkz8h5kXuOlGOW6/4jz3kul4PL5aKjxwHQv0PEjfUUg9+OTCaDeDyO+fl5OuWU6ELa29uhUqkwNTVFn+OxsTHweDxs2LABAFbc52O1qDa9y2QytL0+HA5jcHAQPp8PsViMDk3U6/XUiVculze8WPV6Hx4yVVkgENB7eDU21/fCPQUV9Sh9cLlcbNq0CXw+H8ePH4fT6cS7776LY8eOUWGowWBAa2vrA0fghUKBDmGamZlBIBDAxYsXEQqFcOnSJeptkM1moVQq0dLSgs2bN+P3f//3odVqaRtTIz2s1T3QDocDlUoFfX19N9TlWCwW/H4/xGIxXC4XPX9isnS9LqERIBkk0nIlFAqxefNmfOUrX6FjhG9HNBrFqVOn4Pf7oVKp0NzcDJPJBKVS2VDXkMvlQiqVwmg0YufOnTCZTDh27NhNe9QJ6XQag4ODmJ2dhUKhQEtLCwwGA9RqNTXKITqUaDSKyclJJJNJJBKJGp8IqVQKq9UKvV5fI3ZdLkqlEoLBILxeL/x+P+Lx+IqXNUm5JxAI4Mc//jEuX75Myz2k5KJUKmkJrdGCaeBa9uH111/H9PQ0zp49S903rVYrfuM3fgNtbW34yU9+gjfeeANLS0v4/ve/jwMHDmDHjh3QaDRQKBQN/0K9G8jGIp/PY3BwEGNjYzh9+jROnz6NVCqFSCRCjQA1Gg0+//nPw263Y+PGjdSjZS1Q7YxJrACIBfn1urh6c1dBxeXLlwFcS8WsNmw2GxaLBfF4nA6RymazSKfTdJdRKBSg1WpptqJ6h052O2R3cjNIDY4YpEQiEVoiWFhYgN/vp05/AoGATsLs6elBS0sLNBoNzZg06k1KREC3g+x0iDkWMfe63gq6kSCdK9WtVGSIzq1efiSgymaz1CCKGCs14jWs7lppbm4GAFitVupCmc1mb/gZogNisVhYXFwEAMTjcahUKmpu5vV6MT09XROYkWeBZCg0Gg26u7vpnI3lhkyPXVxcRDKZrNl1kdkkQqFwWQIZct65XA5LS0vw+Xzw+Xzw+/30M5TJZHQEgEgkargR4KTsG4lEsLCwgKWlJeTzeYjFYthsNlgsFhiNRiiVStjtdjQ3NyOfz2NmZoaKtUUiUcNqCe5EtZ6OtJKTacqTk5OYm5vD7Ows3G43XfMFAgGam5thsVjQ0tKC1tZWmrlrpGt7O66f70NKv2R9BkANCus9RuGuggriVVAP+Hw+9u3bh02bNiEQCCAUCiEWiyEej2NoaAh/+Zd/CaPRiM2bN0OpVNLZ82QUOBm0k0gkqNV3NZVKBTMzM/B6vdS9LJVK0QiQ9ASz2WwolUps2rQJPT09WL9+PQ4cOACJRAKFQrGmbtC7gTyQZBFr1KAC+MTwpbm5Ge3t7XQuyq1egkTc6XK5cOrUKWi1Wmzbto2OwW5UpFIpvvSlLyGZTKKjowOzs7M4dOgQLl26dEPmibSfpVIpHDp0iDqE8ng8ej2JayYp95E6PJvNpuPjH3vsMbz66qtQKBQr0vdeKBTwwQcf0KFI1RDr6Z6engfKkpHPxev14sSJE3C73RgYGEAwGMTQ0BCSySRdF/bu3YuDBw9i/fr1MJvNt72PVptKpUK7YE6ePIkjR46gUChALBbD4XDgS1/6Es22CQQCbNu2DXq9Hu+88w6GhoYQDAZx4cIFtLe3Q6PRrEltBemEyOfzSKVSCAaD+OEPfwiPxwOn04l4PI5YLFazzvf09OC73/0ujEYjmpqa6DTmtbheVyoV6v4bCAQQiURQKpXAYrHgcDjQ3d2Ntra2uh5j4yqQ/g1iy83j8Wg9jLS5pdNpTE5OIhAIgMfjQalUIpVKQS6XQ6/X0xn1uVwOkUgEY2NjN7SHlctlTE1Nwel0wufzwe12o1gs1tSUiWhRIpHAarWitbUVXV1d1FCpUS2rH4TrMz2NSqlUojNhyCTNOy0YxCiL6ErIhEgyxKlR4XK5UCgUEAqF6OzsBIfDwfDwMG31vD5gJvqg682kbgbREhCPCo1Gg6amJrS0tMBqta7YTJtyuYxAIACn00lnlBBIKaJ65POdsk83C37Jf49GoxgbG4Pb7ca5c+douYf8XmJg197eDp1O13BDtsicGr/fj6WlJYTDYZrNIUGgwWCgxy2Xy2tKV7lcDoFAAAqFoqFapgnVwwDJdaz+/EkgTGbDxGIxzM3N4eLFi5ifn6d6HHIfkwGCVquVBlJksOBahQwTJBk38j5jsVhQKBTQarXLLmi+Vxr+0yVpbS6Xiy9/+cvYu3cvTp06hYGBASwsLGBsbIzahPN4PAwPD9OJhsRaWigUIpPJIBAI3PAwEbe1VCpF/S/IZES5XI4nn3wSer0eGzduhN1uh16vp+n1hzWgID3eMpkMZrOZnmujQIRZ+XweAwMDGBkZweLiYk1aFLixS4V8jdShp6en0dPTg+7ubjz77LNQKpUNPxaZlLHWrVuH1tZWsFgsNDU14dKlSzhz5sx9ZZR4PB6sVisUCgV6enqg1Wqxfft2bNy4EUqlsqasuBLnQ9Lx1R4LwLVpxx9++CHcbjesViudunuzaxSJRHD16lU6Frr6OSf3y9TUFE6dOoV4PI5QKIRyuQyRSAQOh0MX4+bmZmg0mmVtXV0uKpUKjh8/jp///OfweDzU2OyVV16BzWZDa2srtVsnzrF8Ph92ux1yuRyFQgFDQ0NIJBJ4+umn6306FBL8xmIxTE5OIh6PY2ZmBiwWC5s3b4ZcLkcymUQ2m8XAwAAuXLiAQqFAd+zz8/PIZrMwGAyQSCTo6OiA0WiEwWCAw+FAc3MzneDaSEHicsJisWCxWNDZ2QmNRlPXY2mcN8UtIIYeZLYGMXsi7XPj4+PI5/M3LEgP8vfIwi2RSNDZ2YnW1lbs2bOH7gIa/cXzoJCAitToGlFnQOqp8/PzmJ6eRjqdBvCJPuZmL0HycnG73Th79izK5TL0ej2sViusVuuamYXA4XCgUCggk8nQ19eHYrGISCSCc+fO3XWpqvqz4fF4MBqN0Ov16OrqQktLC7Zv347W1taVPI2avy8UCm/IhMRiMYyOjiKXy+HSpUuwWCxob2+/qYfIwsICPvroIyQSCYTD4ZoWQnLdXS4XZmdn6efD4XAgFoshEono+RPr5kbTUgDXNkBjY2N49913IZFIaNv4nj176P1Q/Rny+Xzw+XyqDymVSpiZmaEmX40CyUJFo1FcuHABS0tLuHz5Mu1UstlsWFxcRCwWw4ULF/DLX/6yJqsBXNsIKZVKGI1GtLW1ob29HV1dXWhvb6fmZg/jBrAaiUSyKrOn7kTjr6D/RrUXf1dXF2QyGRYXF7Fx40aEw2EagVcLV4Br5jDZbBY8Hu+m7WEsFgtGoxFarbYm3apQKKBUKrF3717qUMbn89fES+dBIWlGkk68Xdq5npDjJCOzY7EYjhw5AoFAAKPRCIfDQa9XsVikqW+Spejv78dTTz0Fs9lMTcPWEiwWi46utlgs6OrqQiwWg8fjoUFGMpmk963FYqGGWHa7nZY8hEIhWlpaoFAooNfrIZPJoNVqV+Uc+Hw+nn/+eaxbtw7vvvsuzp8/TzVTJNXr9/tx7NgxyGQyaDSamwb14XAYo6OjNZNnqyFus5VKBXw+nxpBHTx4EDqdDp2dnTAYDLBardBqtQ3VGVEul5FIJKinBgBs2LAB+/btw8aNG6kp3a2eUYVCgW3bttGZFzfL2NaTUCgEp9OJoaEhHD9+nArkyZovk8mQTqeRTqcxMzNTEzjzeDyYzWZotVq8/PLL6OnpgcFggFKphFQqpa3QD2tAUT0wsKOjA5s2barroE1gDQUVAOiDbjabYTabkU6nsXHjRvh8Prz++uvUorg6Co/H4wgGg3SI1PUfNpvNRltbG+x2O7UJFolE0Ol0dEbEWhQ0PQjVVujEr6NcLjdcSylwLWiMx+NYWlpCIBDAu+++i/HxcWzevBmvvvoqncOSTCbxgx/8AKdPn0Y6nUYul8PBgwfx+OOP0/rrWlt0WCwW1Go11Go1rFYr+vv7EQqFcPHiRczNzWFychL5fB4SiQQCgQAbNmxAZ2cnNm3ahP7+ftqmxuVyacvwcvpB3A0khd/d3Q23241gMIjZ2VkaVJCW0/fee2/Z/ibRZ7W2tuK5555Dc3PzXVmY14tyuUzHm8diMWq9/fLLL9POs9s9m3K5HN3d3RAKhRgcHITf72+YoKJSqcDn8+H999/H+Pg43nvvvRptkNPpvO3PCwQCtLW1oampCZ/97GfR0tLSUOLalYbFYtGsVVdXF+x2+7K3fd8rayqouB4iXGOxWNi/fz/i8fgNZiDEgpfP50OhUNxws5FaFKkdi0Qi8Hi8Gs3Eo0a5XEY6nabWzUT81EiQXUxbWxsymQxSqRRCoRBtF5yZmcEHH3xAd7XZbBYLCwvIZDK0Q4SYua3lXQwxNiNeJCqVCr29vbTllBh7cblctLa2wmazwWg0QiqV0vOuNpha7c+BXEeRSIQtW7aAw+FgcHAQuVyOTmS81+4jcg6kbEd8GSQSCVQqFTQaDTo6OqDX69HS0kJb1RuVanHe9e3dd3O9iDCZZB0bzSyJmLndzqOEBP5isRhSqRQKhQJ2ux0KhQLr16+HwWCg6/tafZYflJXwkbkfGvdJugv4fD6d6ka0FtdDREBk4bzVB369BXWjWVKvJuVyGfF4HPF4HJFIBNlstqEWXaLU5/P52LNnDxwOB4LBIGZmZmia1OVy4ciRIzdoB3g8Hvr7+/Hcc8+hqampIfUi9wqx9SUlPiLU2rVrV80iTc7zZh099brPq4999+7d6O/vxzvvvINIJAKfz4e5uTna8nq3kGedZGj6+/vhcDhgMploQNnT00M7uxphIb4dxFMlHo/f4ElyN8FWsVikgwYbqTWcCKfj8ThcLhe8Xu8tAx6xWAyxWIzm5ma0tLSgpaUFBw8ehFwuh91up1q3RsymrjTkeV7tLOOtaJw3xX1yp7bH6mDiUbzh7pXq1rzbtek1AqSNqlQqoaWlBevXr6dBBfEaAUBNjFpbW6HX69HW1ga9Xt9w01YflOtfjmvlfq/OLLBYLLS2tqK/v5+2P5IXaj6fh9/vrxFhXg8ZEieRSNDe3g65XI62tjaaoSGOqWT8dyMswneCpLhVKhUt50UiEczMzCCfz9OWexIgk/uAZCai0SiCwSAymQzMZjNaWloaapNAhLpkhACXy6UjDyQSCbhcLhWbGgwGWCwW2Gw2ep1JJm4tXMuVgM/nU0F9I9A4d9YK8Sinw+4HkmputBTpzSDTBvV6Pb72ta9h586d8Hg88Hg8mJ2dxVtvvQUWi4W2tjaYzWa88sor6O7uhs1mo9MamXujMajOWGzbtg2bNm1COBzG5OQkYrEY5ufnEQwG8a//+q+Yn5+/5e9RKpX47Gc/C5vNhueee67GCK263LOWNDQcDgcWiwVqtRp6vR4AcPz4cczNzaGvrw9f/OIXodFo4HA46GfIZrMRjUbh8/lw8uRJfPjhhzCbzfjc5z6HlpaWuncIAJ8EkzKZjA5uCwaDaGpqwgsvvACj0YiWlhZIpVIqNhaJRLQN+Prrulau53JCyntNTU004Kw3D31QATyaJYz7gQzlIeOzG53qdmO1Wg273Q6JRAI+n0+zF6RbSKPRoKWlhQ5Va5SonuETqu3IiTW31Wql3Q0KhQJdXV23/R1WqxXNzc1obW2ldfa1DrnPhUIhFAoFVCoVbY3WaDSYmppCKpWCQqGgLbIcDgfBYBButxuBQADFYhECgQA2mw12u72hslgSiQRGoxEA0N3dDYPBgI6ODmg0GpjN5hoRMY/He+SE87eDuAmTZ6QReCSCCoa7I51O4+LFi5ienkYikWiohed2kC4IhUIBh8OBzZs3I5/P41vf+haAWu0NEXwxND5k1km5XEZ7eztKpRIOHDhwW31F9XVuRAOr+4H45rDZbOzfv5+Oqh8YGMD09DRee+01CAQC6rGh1+shEongdruxsLAAPp+PrVu3oq+vD0899VRDmbyxWCy0tLTg1VdfRbFYpO3/xHOj2mDwUc1G3A6BQIAtW7Zg8+bNMBgM9T4cAExQwVAFcbWLx+Pg8Xh0XsRaeJBvtkiS3Q/D2oQIKatRq9V1Opr6QvQCer0ePT09yGazkMvliMVimJ2dpS2iHA4HZrMZcrkci4uLCIVCaGtrw5YtW2CxWKDT6Wgmo1EgJQ2Ge4fD4UCn08FqtTbM3CImqGCgGAwG/Of//J+RyWSQz+fB4XDQ1dW15v3yGRgeBlgsFrUR37x5M5577jnE43E6ZTUSiVAzuGw2i/7+fvB4PPT09GDXrl10yOJa0pMwrD2YNwUDRSKRYOvWrfU+DAYGhpvAYrEgl8vpwMS2tjYUCgUkEgnkcjn4/X6kUikMDw8jHA7DaDRCp9Nh/fr16OjoeGS7IxhWFyaoYGBgYFhjkPIFCTTIcLRCoQCdTodsNguxWAyhUFh322aGRwsmqGBgYGBYY1SPqie1dLlcDgCw2Ww138sEFAyrCRNUMDAwMDwEMMHDww+ZUNzU1IQXX3wRiUQC/f39MBqNjFCTgYGBgYGB4e4hjrFarRb/8T/+RwCoyyDA28EEFQwMDAwMDGuA6rEUjdqRx6rUabCDQCCATqerx5+uKx6PBxaLpd6Hseow5/1owZz3owVz3o8WgUDgljN46hZUMDAwMDAwMDxcNEYRhoGBgYGBgWHNwwQVDAwMDAwMDMsCE1QwMDAwMDAwLAtMUMHAwMDAwMCwLDBBBQMDAwMDA8OywAQVDAwMDAwMDMvC/w/JI8gw+SgdQAAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhUAAACLCAYAAADWF2tkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAAxOAAAMTgF/d4wjAACHNElEQVR4nO29d5Bc53Xm/XTOOafpmenJAwzCIAMkAIIgJSYFkkqUqJW0kssu21t2OZVd1tbu6tutXZd37S2v6bhaywq2JJIixSAQBAgQOcwAmMHk1DPdPd3TOefw/QG/L7uRw8x0D3B/VSqRnHRv33vfe95znvMcVqVSqYCBgYGBgYGB4QFh1/sAGBgYGBgYGB4OmKCCgYGBgYGBYVlgggoGBgYGBgaGZYEJKhgYGBgYGBiWBSaoYGBgYGBgYFgWmKCCgYGBgYGBYVlgggoGBgYGBgaGZYFbrz8sEAig0+nq9efrRiAQYM77EYI570cL5rwfLR7l887lcjf9Wt2CCp1OB7fbXa8/XzesVitz3o8QzHk/WjDn/WjxKJ/3rWDKHwwMDAwMDAzLAhNUMDAwMDAwMCwLTFDBwMDAwMDAsCzUTVPRCJBZan6/H16vF+VyGaVSCSqVCjabDVwuF2w2GywWq85HysDAwMDA0Pg80kEFAJRKJQwMDOBf/uVfUCgUkMlksHnzZvz2b/82RCIReDweE1QwMDAwMDDcBY9cUFEul1EoFFAul5FMJpHJZDA2Nga3241cLodMJgOz2YxQKASlUgmpVMpkLBgYGBqKSqWCSqWCYrGIbDaLYrGIRCKBYrGIQqFAs7AAIBaLIZPJwOVywefzwWazweVywWKxmDWNYdl55IKKUqmEUCiEeDyOo0ePYmZmBmfPnsWlS5dQqVRQLpfB4XBw/PhxNDc3Y8OGDZBIJODxeOBwOPU+fAYGBgaUy2UUi0VEo1FcvnwZLpcLR44cwdLSErxeL9LpNACAxWJh586deOKJJ2Cz2dDb21sTZJDvYWBYLh76oKJcLlOtRKFQQD6fh9frRSQSwdzcHBYWFuDz+ZDNZunPRCIRzM/Po1QqwWazoVwuQy6XN0xQQRYUcl4AIBQKG+b4lhuyKyuVSiiVSvT8SRBYvSu7GSTLRP5H/p3D4dAdG5u9NjXL5LMhnwWbzX5o7wOGT9azbDaLZDKJpaUlTE5OwufzYXp6GktLSwgGg8hkMgCuBQw6nQ52ux2ZTAZKpRIymQwcDgd8Ph98Pp8GF2sRct+Xy2XkcjkUi0WIRCIIBIJ6H9ojy9q9m+6STCaDSCSCpaUlfPzxxwiFQhgeHkYoFILb7UYikaAPIGFxcRH/7//9P2g0GkxNTcFqteKll16C2Wyu01nUkkqlsLCwQHcpAoEAL774IlQqVb0PbUUgKd1QKITx8XH4/X5cunQJ8Xgcfr8fiUTilj9LnFsFAgFkMhn4fD5MJhO0Wi3a29vR3t4OHo8HkUi0JgOLfD6PYrGIXC6HZDIJqVQKpVK5Js+F4fZUKhWk02lkMhmcO3cOb7zxBsLhMMbGxpDNZhEOh+mzAnySgRgfH8fS0hL4fD6USiWMRiOee+45OBwO9Pf3Q61W1/O0HohSqYR4PI5EIoGf/OQnGB0dxW/91m9h69at9T60R5aHLqggu1aye8vlcohEInC73RgZGUEoFMLFixdpMEF2+tUpwFwuB5fLhUgkgpaWFmSzWaRSKZTL5brXISuVCgqFAvx+PxYXFzE6OgqhUIh4PA6JRAIOh/PQ6T9I3TgajWJiYgI+nw8DAwPw+/2Yn5+nqd6bwePxYDabIRaLodVqIRaLEQqFYDAYIBAIYDAYIJFIIBAI6n5t7wZyf5OMTT6fp/dnNBpFuVyGWCy+abbi+vviVv/M0DiQ601245lMBrFYDFNTUzh37hwSiQQCgQDK5TK4XC7NvgGfXNNKpYJwOIxyuQyn0wmtVouenh6w2WysW7eubue2HFQqFfqZjIyM4MSJE3j55ZcbYq2+E+QdBVy7vtd/rfqfr39WqzOujcZDFVSQF26pVEI4HEY0GsWFCxdw9OjRmog+Go1SsSZwTcgkEonoCzmXyyEWiyGXy+HKlSvw+XwYHBwEh8OBVquFXC6vy/kVCgUUCgXMz8/jjTfeQDAYxNDQEDgcDrLZLEwmE1588UW0tLSAy+Wu6bRmNU6nE4cPH8b8/DzOnTuHeDyOpaUl5PN55PP52/5sqVRCMBgEl8tFMBgEm83G1NQUpFIpBgYG8MEHH6CzsxMvv/wyZDIZFeY2IiTFm8vlcPLkSczNzSEejyObzSIejyMajUKpVKKlpQU8Ho/+HFmUtmzZApvNBjabTbuaSBmIBKQMjQMJGguFAiYmJuD3+3H+/Hlato1GoxAKhdi8eTNUKhU6Ojogk8mgUqloOZTNZiORSCAajcLj8eD9998Hh8OB3+8Hj8e75fyGtUKhUMDY2BimpqYwOzuLcDiMiYkJbNiwAQqFAnK5vCFfvADoZiAWi2FhYYFuEAqFAqLRKF3fisUixGIxpFIpRCIRVCoVFAoFurq6IBAIaCNBo9CYq+cDQLQTgUAA8/PzOH/+PN5++216wW4GSY2TxTaVSiEej6NUKsHv9yOTyWBubg4WiwVisbhuQQU5t1AohKGhIXi9XrjdbvriVCqV2LBhA2w2G31ZNOoDdbdUKhX4fD6cPn0ac3NzuHTpEs0uAbgrPQQpb1XvDFgsFiYnJyESibBv3z7s378fwLUAs1EhnUuJRAIff/wxhoaGEIvF6E4tHo9DLpejra3thsCI3AtCoRB8Pp9mZthsNvh8PoRCYUPsfK7fld3v7wDWfvaFdHdkMhkMDQ1hcnISx48fx9DQEHg8HrhcLhQKBex2O7RaLXbu3AmtVgudTgexWEyvbyKRoGXf06dP05dZJBKhpZK1SqlUwuzsLGZmZhAIBJDJZBAIBOD3+8Hlcuu2Vt8N+XwesVgMHo8Hp06dojqZQqGAYDCIdDqNVCqFTCYDjUYDtVoNqVQKvV4Pq9UKu91e05nYKPf7QxVUFAoFDA4OYmZmBqOjo/B4PBgfH6cCnushL96DBw9ix44dNHU4OzuLH//4x4jH47RV6/Lly0gkEvjc5z4HrVYLNpu96tEheVG0tbXhG9/4Br0ZA4EAFhcXEQqF8OGHHyKRSECtVkOpVEKpVMJkMoHH49EXx1qCxWLBYDBgx44dUKlUWFpaQqFQAIvFgkQiweOPPw69Xn/H30MyPMFgEHNzc1haWgJwbbewsLCAkydPoq2tDSqVCnw+f6VP664gQVA6ncbS0hLi8ThGR0fh8/lw6dIlzMzMIJ/Po1Qq0QxGpVLB9PR0zXUmL2qBQIDJyUl6n5OAQiKRYNOmTVAoFGhvb6cB9kreK5lMBgsLC2CxWDCbzfS583q9sNvtMBqN4HK5NPgh50EEyteLd/P5PJLJJPL5PJaWllAul6HT6WoykOQZIHA4nFuWihoBcs8GAgGcOXMG09PTiMViUKlU2LdvH/bs2UOfb6FQCIPBQANEIkAGrpUAC4UCZDIZFTaSz3atrQe3ovqFKhAIIBaLa7J1jUKlUkE2m0U+n8eVK1dw9OhRBAIB+izncjmUy2W6VpdKJbDZbKTTabrJnZ6ehkqlQigUglwuh1gsBp/Px7p162A2myEUCmlQWQ8euqDinXfewalTpzA5OQm/33/b72exWOByudi+fTu+9KUvgcPhgMPhYGxsDB999BGKxSIikQhNN4+OjmLdunVYv349ANQlqOByuTCZTHjhhRfg9/tRKBTgcrloUPGrX/0KV69ehV6vh06nQ2dnJz71qU9RkeJaXETILozH42FkZASZTAZcLhdGoxFf+cpXYLfb7/g7CoUCTpw4AafTCQ6HQ8sGmUwGHo8HZ86cgd/vx/bt21fhjO4O8uKMxWI4ffo0FhYWcPToUfh8PiwsLNS0DRJyuRzi8fgNvwcA5ufna/47h8OBQCCAXC7H448/DpPJhG9961sQiUQrnlJNp9M4evQouFwuPv3pT0MikeDQoUM4ffo0nnvuOTzxxBOQy+U3qPiLxSKKxSINJjKZDDKZDOLxOObm5hAIBHD+/Hnkcjn09/fDarVCKBRCIBBAoVBArVbT8yLdD40aVOTzeQwNDWF6ehofffQR5ubmqMh43759+OpXv1rTwXQrWCwWMpkMZDIZ1eJUe1Y8TLBYrJrAqtGoVCq05HH06FG89tpryGQySKfT9Dnl8/loaWmBQqGg76RUKkVL+ouLiwCAI0eOgMfj0Q6eb3/72/jUpz4Fo9FY14xr433q9wFJEUajUQQCgRtaRG8Gh8OB2WyGWq2G1WqFRCKhKSSVSoXu7m4IhUIMDQ0hlUohnU6DzWZjcXERkUgEEokEUqm0LtFg9ctg3bp1kMlkuHTpEhKJBLLZLO1TJ/9OOh42b95MRYlk99YIKe87IRKJYDKZsGXLFqqZYbFYkMvlsNlsUCqVd/wdhUIBHR0dEIlE8Hg8mJ6epostUFsaqTf5fB7lchmJRAKRSAQzMzO4ePEiQqEQFhcXkUgkakpAACCXy6FSqcDhcMDj8cDj8eiiBFw7v7m5Ofj9/htac0l5L5/PIxQK0d3OSizKuVwO6XQabrcbk5OTKJVK9FkaGRmB0+nE4OAg2Gw2VCoVdDodJBIJzUYtLi4imUwimUzSNH4ikUAymaRfm52dRT6fh0AggNvtpsG4XC6HUqmEXC6H2WyGRCKheipSTmik9uJisYiFhQW4XC5ks1mw2Wz09/eju7sbXV1ddxRlk4xOJBLB4OAgxsbGUKlUIJfLsX79erS1tUEqla7yWa0sRJyfSqUgkUjqfTiUUqlEmwPGx8cxMzODyclJuv4Q75C+vj4olUq0t7dDLpfT61ssFmm5xO12I51Ow+PxIJvNIhKJIJvNYmxsDAKBALt27aJrQT1K4A9FUFEoFOB0OuHxeDA2Ngan03nDons9fD4fW7duRVNTEzo6OmoeLp1Ohx07dsBoNMLpdCKZTNKa9eTkJMbHx2Gz2WggstpUq7x37dqFzs5ODA4OIhKJIB6PIxAI0Bckl8vFL3/5SzgcDvyH//Af0NTUhKamJkilUrpTW4469koilUohkUhgsViwefPmmq/d7UNDTM2ampqwuLgIp9NJO3waCaJmT6VSuHz5Mo4dO4bZ2Vl8+OGHVLRFUtjVWCwW9Pf3g8/n0xel3W6HSCQCcG1R++CDD/Dhhx9S59hKpULFYIODg5iamsJTTz0Fk8kEnU5XUypYLuLxOEZGRnD58mW89957iMfjeO+998BmsxEMBpFKpTA7O4tf/OIX4PF4kEqlaG9vxxe+8AXw+Xx88MEH8Hg88Hg8CIfDVGdULBaRTCZpSQQArly5QoME8rwIBAK0tLTgqaeegk6nQ6lUglwuh8lkosFFowQV2WwWQ0NDGBkZQSwWA5fLxYEDB/CZz3wGKpXqjhkKEjxOT0/j7//+7xGLxVAsFtHa2opnn30WTU1NK3KN60F11188HkcoFIJMJqvzUX1CoVDA6OgonE4n3nzzTZw+fRqpVIoGP2q1GuvXr8d3vvMdmM1mOBwOCIXCmtJfuVymwUQwGMSbb75J9Wbk3w8dOoRvf/vb6OjogEAgoM//arKmgwrSk01e9nNzc3ShudWuk8PhQK1WQ6FQwOFwoKOjA0qlsubh5PF4UKvViEQidLdW3cpH6rr1hOhBxGIxisUi1q1bBx6Ph+npaXg8HsTjccRiMfpZ+P1+jI2N0bS/TCaDRqOh9XPy+0hKtJFEntUipHtNVZOOiUKhAK/Xi1AoBJ/PRy2NyUtYp9NBo9HU/YVCNBThcBhOpxNutxsejweZTIbqglgsFn0B2mw2GAwGtLa2oqOjg3puCIVCOhSP/F6v1wsAdMbN9c+IRCKhL5qVKglkMhlMTU3B4/FQm/xisQg2m41KpUKD3Fwuh3w+j0wmA6FQiLGxMfB4PCwuLsLtdiMYDCKZTNJsS6lUqslOslismnZxUurM5/OIRCIIh8NUf6JSqWh5sJFKIURDks/n6UslGo0iFApBIBBAIpHcNFNBMhQk0zU1NQW/3w82m42uri40NzdDKpU2VAB1vxCNAtHNEe1Qo2kqKpUKotEofD4fQqEQ0uk0zbYajUasX78edrsdFosFGo0GAoGgRttVbXKnVqvB5XLR29sLpVKJkZERmp0non2fzwelUkmz0qvJmg4qMpkMnE4nJicn8T/+x//AwsICXSxuhVwux5NPPgmr1YpvfOMbaGpquuHm43K5aG1tBYC6RHp3C4fDoW2Qv/Vbv4VCoYCjR4/i9OnTuHz5Mj7++GP6UnW5XHjttddompfH42Hv3r1Yt24d3cFZLBasX78eIpEIcrm8IWuS90o+n8fExAS8Xi9+9KMfYWBggGZ0SJ29q6sLBw4cgMViqbsTX7lcxszMDM6fP4+zZ8/i8OHDNENB4HA4sNls0Ov1+M3f/E3s3r2b6gaAT16i179sNm3adNuAGwAte6zUy2ZxcRE/+9nP4PV6adscCWh1Oh2kUiltoySGTtFolOpBMpkMbQcnQcOdSlfk6+T3BoNBjI6Ogs/n49SpUxCLxfjt3/5trF+/vqGCCkL1nI8jR47A7Xbjc5/7HPbt20fLXdU7WpK5OXr0KN566y3Mzs5ienoaGzduxDe/+U3YbDZotdoaEexapVwuw+v1wuv1IpfLgcvlwmq1orW1taHW7lKphKtXr1KhdalUgtFohMVioZo+lUoFg8FAyxbXP4OVSoWWgg0GA1paWhCPxzE9PY2JiQla0rxw4QL+4R/+AZs3b8aLL77IBBV3C3l4QqEQXC4XFhYWqKL/ZhANgsFggM1mg91uh1qtvqmghYh9Gl0dTRZj4FqJoFwuw2630wWb1Jaz2SzK5TLi8ThtSwRAW2RJUJFOp6HT6WjakNTmSeZiLegvgNpFOJfLYXFxkS6sTqeTvnDFYjGsVit9SOvpREnS9tXGZj6fD/F4HBwOB0KhkJYDhEIh2traoNFo0NzcDI1GU2N61MiUSiXaJkd21OR6iEQiqNVqxONxJJNJ+v3E/Ox2997d6mGu7xgJBoMQCARIpVJ3DLhWGzabTQN8Pp+PXC6HQCAAkUiE+fl5JJNJ8Pn8GqOn6u6ChYUFTE5OUk8XsnHQ6XR3LJ2sFUi5kFgAkLZpskNvlHMk97dYLIbJZEKlUkFzczPMZjNaW1upCd/tZkxVm14BoJkMspkg9240GoXX64Xf76dr4Wp+Do2/Ct0Esij4fD68/fbbcLvdSKVSt/2ZdevW4Rvf+AZsNhv6+voglUobqua2HLBYLHR3d6OlpQWf+tSn8M1vfhMulwtnz55FLBbD5OQkotEoXC4XkskkLl++jNnZWXqjisVivPnmm+Dz+RCJRBCJRHjyySfR09ODpqYm6HS6hp8tQZwHU6kUZmZm4PF48KMf/Qhzc3OYnp5GoVCA3W5HU1MT+vv78alPfQparRYOh4MGUfWgWCxifn4eoVAI77//Pj766CNEo1EA17pfent70dzcjJdeeonOb+DxeDAajTU71UZHrVZj+/bttITBYrGgVCohEomwYcMG6PV6uN1u2mK3EshkMvT09IDD4WBubo6+lKsN8RoBsViMxx57DFarFblcDvPz81haWqJdbfPz89BqtVSALBKJUKlU4HK5EA6HaRdcpVKBWCyG0WiEw+GAXC5vqNLAg1AsFuF2uzE+Po5UKgUWiwWZTNZwtvtklALp/Ein09BoNJDL5VSovFxt3ESsbLPZUCgUaKCyWmvEfQcV8Xgc/+f//B+88cYbmJiYQKFQQG9vL373d38XX/nKVwAAg4ODtK1renp62VLLpL6YSCRoluJ6H4rqzgYOhwOr1Yr+/n7odDoYjUbw+fw1sxDfLcS7gdykOp0OOp0O4XCYmqmIxWJqepNKpai4Dbj2uY6OjtLfJxQKodVqweVyoVQqodFoANy7rmE1ITvbZDIJp9OJ2dlZDA8P0/Qon8+HVquF1WpFW1sbOjs7qRC0notQuVxGKBTC3Nwc5ufnsbCwgEqlQoM9i8UCh8OB9evX09IU0Qnc6bhvt/te7WdAIBDAbDbTtuBisUhr4BKJhL4Qbpb+XS54PB4dECgUCqkmo9GCCg6Hg+bmZuq5EQwG6QtpbGyMptDz+TyEQiEUCgUqlQqcTicikQhmZ2cRj8chFAohk8noZ/ywiDOBa887EdETT4fVfoneDRwOhxpYGQwGOvhsJUo0RGdIMtSrnX27r6Digw8+wNe+9rUbfCAGBgbwyiuvoFKp4JVXXsF/+S//BQDwx3/8x8taqyZBBbEzvb7Fjs1mY8eOHejo6EBTUxPa29ths9nQ2tp6gzHMwwqXy4VIJILZbMbzzz+PXC6HUCiEXC5HVfakB56468ViMczPz9MArVAo4NixY5iamkI+n4dEIoFcLm9I61vSajoxMYHDhw8jEAhgYmIC6XQaSqUSWq0WGzduhE6nQ09PD3UhJKWDep9PoVDA8ePHMTIygpmZGVQqFdryuGPHDnz5y1+mJZpqi+27CShSqRQKhQKt1ZKUKJvNpkHJaj0TBoMBL730EoaHh3Hq1Clq6pVIJHD69GnIZDJ6LxIzr+UmmUxiZmYGLBYLU1NTyOVy+OCDDzAxMYFnn30Wvb29y/437weBQIC+vj60tbXBZDLB5XLh0KFDOHHiBLLZLEZHR2kGjpRrASAcDiOTydBysN1ux65du9Df39/QG4L7pdrOmsvlIhqNIp1O07JuI0A2AOS5q1QqK3YtuFwuNcSqR8n6noOKX/ziF3jppZdQKpWwf/9+/H//3/+Hrq4uDA0N4dVXX8XCwgL+5//8n1i/fj3eeust2O12fOtb31rWg66umZN+9WrYbDY6Ojqwbds29Pf3o7Ozk4oT6+GEWQ/IC4TP50MqlaJSqaCpqYkK3EqlEkwmEwQCAWKxGFUMezweGqCVSiWMjIxgdHQUnZ2d2LJlCxWH1vslXA2pk+fzeVy9ehU/+MEPaF1RIpFg69atMBqNePzxx+FwOGC1WqFQKBqqlFMqlTA+Po5Tp04hFAqhUqlQIanNZsP69eshlUrvWc1Nas6JRIJadJN7gMfjQSwW0wVuNa4pyaSVSiVotVrEYjEEAgHk83la7qk+9pUgm83C5XKhXC7D5/Mhn8/j4sWLcLvd6O/vb5iggsfj0bZXqVSK3t5ezMzMUE+aYDAIAJibm6M/c/1nxmKxYDKZ0NfXh87Ozody7SOGaOS9QAzRRCJRQ2WkV2utYbPZtHukHvbd9xRUeL1evPrqqyiVSti9ezcOHTpEa3N79+7F9773Pbz66qu4evUqvve976FSqeBP/uRPlt32mFyc5uZmfP3rX6cPGNlhczgc7NmzB62trdBoNDRiu1uPdFJaCYfDa94bv5pqwysOh0MHEJEhax6PB01NTQiFQjhz5kzNIk9MV4RCIYxGY12Ov9qTIJFIoFAoIJ1OI5fL0fbLwcFBxONxWrtXqVTYs2cPDAYD2traqK1tI2Qnqql22iNaAtJ2NjU1hY8++ggqlYqWo+6WQqGAS5cuwe/3Qy6XQyqV0lZskUgEo9FI0+MCgQBNTU2QSCTUSI4MqCKGa8s1Z4BoKDQaDc6fP0+da68vY1bP8bhTkFGpVKjOhMPhIBAI3HSCLemGAkDtj+/ka1NPiGCTy+XihRdegN1ux8DAAA4fPgyBQAC9Xo90Oo2ZmRlqplSN0+mkXgZ6vR4KhQI6na6hXrj3C4/HQ29vL4rFIj1/kuF72DPSt+p6UqlU6Onpgc1mo+tcwwo1v/e97yGRSIDD4dD2xGq2bNkC4Fo66uc//zlaWlrwjW98Y/mO9t8gu3Cz2Ywvf/nL1K6X1EOrxV/345BXLpcRCAQeqqCiulOEYLPZYLPZqA4hHo+jubmZur2RoIKMTp6YmIBara6bQr5YLFKn0MnJSYTDYUxPTyMajeLkyZO4evUqDZy6urrw/PPPo7m5Gf39/TVzTxpxoSmXywiHwwiHwwA+sVbOZDI4e/Ys0uk0pFIpLdfcLblcjrYhEmFYoVCg5SyHwwGxWAy1Wg25XI5XXnkFDoeDlpHa29uxc+dOatBDjNceFLFYjK1bt1JhZqlUQjQarQkqrl8079T9QXwu2traIBKJcPHixZsGFZlMhgYVJC1dPVOk0SCdSpVKBTt37sS2bdtgt9sxPT0NjUaDtrY2xGIxeL3emwYV8/PzcLvd6OnpgVKphMPhwL59+2qGUa1VeDwe1q9fD6FQSDdGZOPwsFPtXVGN0WhEb28vOjo66jLB9K6DinQ6jR/+8IcAgFdeeYXOv6im2i65UqngT//0T1e0zY0MRCJuidUfLolS7+eByefzdLwwGQ1MRsyS+nwjWcDeL9WfTblcRiaTwfT0NObn52kLH1l0dToduru7oVarV30RIkZI4XAYU1NTCIVCmJiYQDKZxNLSEtLpNPx+P/L5PBUztra2oqWl5QbNRKMuoGw2m/atRyKRmpcDsV4XCoVIJpP3HFSQdslMJkNNoYgocXFxEQKBAMlkElKpFCdPnoTf76eD+YgeQ6fToaOjg1pmP2itmsfjobOzExKJBOfPn6d18Hw+D6vViubmZjruOZvNYnZ2lgptSYBMsicCgQBCoZC+TBwOB7hcLtRqNXw+H8bHx+HxeG55LCRlTpxzs9ks9X9oJKo3SHa7HQcPHqTtgvl8nt7nJKNEMlMk6xqNRjE+Po5EIoHW1lbodDoolco1LdysvhcaedOw3JDnmdyv1ZBntG6Oz3f7jSdOnKCDil5++eWbfk/1S72trQ1f+9rXHvDwbk/1rulmtcT7/UCTySROnjyJyclJxGIx6nwmlUrhcDjQ0tLyUKQOCcSN0Ov14v3334fT6aQ7ZlKb6+npwa5du+py3tFolKZw/+Ef/gGJRII6YpLdJXlJ7tmzB9/5zncgl8uh0WiojoYEno0Kl8tFT08PSqUSTp8+DbfbTb8Wi8WQSCTue1dJsm3JZLJmcFEymUQoFKopaRw5cgQcDofWqYma3mg04qmnnkJTUxNeffXVBw4qxGIxNmzYgJaWFnz88ceIx+M023jw4EF89rOfhVarpUHWm2++iVgsBolEQkXIfD6fBvlqtRo2m41OIi2Xy1hYWEAoFMI//uM/4ic/+ckNI9GrP0viPutyuRCNRhs2fV6difvd3/1d+P1+fPTRRygUCrS0qVAoIJFIsHHjRlitVkxMTOD48ePw+Xz48Y9/DK1Wi3w+j5aWFjz33HN1K2cuF+Q90GjXaiUhc2G8Xi/V1hCIW7ROp2vsoOLkyZMArr1knnzyyZt+T/WL/bvf/e6qmPEs5w6UlAFIN0QqlaI97AqFAiaTiXruP0yCJ6JRiEQitG2temgXme5IpuGt9o1KRnsT7UwqlUI2m71pqjqdTiMYDCKTydCdLfF0IP/fiClfNpuNlpYW5HI5eL1eahBF7LSvb3UkO1SZTEZnBLDZbDogjNThrz/P2513uVymO5/rv4/L5cLj8YDFYsHv99e82O8HkgHj8/mwWq1IJBK03bm1tZU+a6TTqLOzE7FYjNovk2CX+M3IZDI6gInL5aJcLkMmk6FQKNQ4jVZ/dtUQUTNpR2zk55tcazIaOxgM0vEEXC4XDocDFosFra2tsNlsKBaLcDqdiMfj8Pv9iMfjcLvdNSO1G/GZuBfW8rHfD6VSCUtLS5idnUUikUClUqHvJZFIRE3yGjqoGBsbAwA66ORmDAwMALjm9EW8KtYSqVQKHo+H+hqQATw8Hg87duxAb28venp6aH3+YbmRo9EoTp06hZGREbhcLsRiMRQKBfD5fOzatQsOhwPr1q2r201KXBjT6TTS6fQtAwoAeP/993HhwgX6kpBIJGhra4NWq8W/+3f/Dna7nXZBNBJCoRDPP/88Dh48CIPBgKamJly5cgXDw8M3fG91S+jmzZvR1dVFjbsikQjGx8chFArR1dVFDZGAOy+8hUIBv/rVrzA5OXnD90ciEXz44YeQy+XI5XJoamrCF77wBbS0tDzQeQsEAnz+859HMpmkQk0y/4AIigUCAZ555pmaYKD6/8lLtlqUVi6XIRQK6VTe28FisaBWq2GxWGC326FSqeqimr8XstksAoEALl26hH/6p39CMBhENBqFQqHAb/zGb2Dnzp30xeJyudDX14exsTF8//vfRzwexzvvvAOVSoW9e/dCr9fT4YIMjQ159lOpFH76059iaGgIMzMzAED9iaxWKx05UI/g+K6DCp/PB+CaI97NKBaL+IM/+AMAuK3VaCNCUuikt9vr9SKbzdIBNWQImdVqhVQqXVPndj2kFZecMxmNvLCwAL/fj2w2Sz0NBAIBdDod7HY7HTxWD0iLFBEUkhT+zXbwxJ6Y+OALhUKkUilYrVY6ZAeo3e01wsuDzWbTF2Brays8Hg9tiyXdGsAnU1lJWaKpqQl2u53u3qVSKTKZDPh8Pux2OyQSyV2LHXO5HOx2O0KhEHUmJTM4iMlQPp+n+oY7udje7XkrlUqIxWJqky2Xy2s2Lvejb6iutd/p+pKsCZnq2Mh25+RaEo3R4uIivF4vEokE9SdoamqCwWCggSbRwxAjLJKJJZbwqVSK6tPWIsSnopE7eJYLsn6TcrXT6aT6K6KlUCgUdR2Od9dPD3kwY7HYTb/+l3/5l5iYmADQ2I6LN4MItM6ePYt/+qd/opPkyuUyNBoNlEolNm/ejB07dkCr1db7cB+IYrGI8+fPY2RkBHNzc/B6vYhEIpibm0MikUA6nQafz4fD4YDJZMJnP/tZbNq0qWY0/Gqj0WjQ398Ph8OBvXv3IpPJ0BbEUChUY+fsdDoxPj5Op3yWy2XMz8/D6/Xie9/7HgwGAx577DH09/fDbDbDbDY31FRWDoeDrVu3orOzE9FoFJFIBC6XC8PDw2Cz2VCpVBAKhWhtbaUTDknan81mI5fL0XkhxHnzbjMV5XIZ+/fvp/eD2+3G2NgY3nrrLfoZ5/N5XLp0CbOzs/j617/+wOdLHEOrMyprbf1YTYjJ09WrV/HDH/4QLpcLuVyOalTsdjsV0pLrLZfLsXHjRohEIhw6dAhzc3NYXFykQ8eCwSCeeuopdHV11fns7p1CoYChoSFcuXKF6sAeZsgsI6fTifn5eUSjUboJfPrpp/Hkk0+ir6+vbsZXwD0EFa2trTh58iRGRkYwOztLp3gCwNDQEP70T/+U/jvZ5a9UxP+gbV/VHzQZwBMKhTA2Nobjx4/TnRkxByKpUeJ5sVYhJlEjIyM4deoULly4QAPBaogXhdlsRnNzM/R6fR2OtvZ4hEIhvRak/ZUMhCIdOuR70+k0BAIB/H4/VfWXSiU6/pnP51NnVb1eT3e0jQDR78hkMhiNRhQKBTrCnM1mw2QyQSQS0e4IogMgP1sul6HX68Fise55JkilUoFSqUShUIDZbKZuq9UBV7lcrtG1LMf5NkKXBcn+NEJgeSuqTd6cTifOnj2LSCSCUqkEiUQCm80Gi8VCbc4JpLSh1+uh1+uRSCTg9/uRy+UwOzuLcrmM/v7+Op7Z/VMsFuH3++FyuZblfmx0CoUCAoEA3G53TecHmVy8adMmaLXaum6S7vqt/+yzz+IHP/gByuUyXnzxRfzd3/0dHA4Hjhw5gt/8zd9EJpPBc889h3feeQf5fB5/9md/ht/5nd+hEdNyQKyYiQPkvUBq7EKhEDqdDjwej6bQ3W43PvroI4yPjyOfz9OUukgkwrZt22CxWGA2m294WNcS+XwebrcbgUAA586dw4ULF26wWSdmSAaDAY8//jiampqgUCjqdMQ3QkoyJK1bLpehUqlq0p5WqxW7du2iQ3WWlpZw5MgRLC4uYnFxEalUCgMDA4jFYnA6nUgmkzAYDHSgWCO8WKq7MdhsNgwGA/bs2UPbBYlIsjoYqtYYLEcwT9xqV8oqu5HgcDhob29HR0fHLcu7jUClUoHH48H09DQGBwfh8/kgEAjQ29uLtrY2vPLKK7BarVCpVDU/R4zDBAIBbDYbgGsunJlMBj6fDxwOB4lEoh6ndN+Uy2UqSo7FYnTj0MgC2weBlHojkQjeeecdLCwsIB6Pg81mw2KxQKvVorOzEwaDoe4twne9+rz44ovYv38/PvroI1y+fBnbtm2r+fpTTz2Fn/3sZ9i4cSMmJibwx3/8x/jjP/5jjI6Ooru7+4EPlNSSCoUCPB4PBgYG7mmxEwqFUKlU0Ol0UKvVdHdEWs/OnTuHqakpavcKXBORORwOdHR0QKVSNYyP/P1QKBRw9epVTE1NYWBggGYoql+gIpEILS0tsFgsNJiqZ9njeu6m7kucMwuFArZs2QK/3w+/309dIlOpFCYmJjA5OQmv14tCoYD169ejqampYSzcr5/rQTpY7vZnlyPwJV1QD4v52+0gHiEdHR2Qy+X1PpxbQiaQHjt2DKOjo4hEIjAajejq6kJHRwd27dpF7eevh8Vigc/nw2Kx0MmVRE9Fur9We0T2g0CCCtIVRjxY1nIm+XYQjVMoFMKRI0fg8XiojqapqQk2m61G+1bP63jXQQWHw8F7772HP//zP8e//uu/YnZ2FtlsFmazGV/5ylfwve99D1wuF6+//jq+8Y1vYGBgAEKhEJ2dnQ90gET5T0b/+nw+jIyMYHx8/J5+DxExyeVyLC4uQi6XU3vejz76CFNTUwgEAgCumXitW7cONpsNu3btgtVqXXNmVySyJe2Y4XAYQ0NDcDqdN8xYkMvldITygQMHYDKZaM2+kUVrt4MEIGq1Gs8//zxcLheMRiMWFhYwPT0Nn8+HcDiM0dFRFAoFukttamq6afBIDKOIQHI1HlpiWkSuITFf4/F4kEgky1I2INk68izMzMzA7XZjeHgY4+PjmJ2dpS6XpFTR3NwMk8nUUFms6ymVSnA6nZicnITH47lhA1KtM2GxWFCpVGhqaqr7Lu9mkOtDxnzPz88jGAzSa3HgwAE4HA6qo8jlciiXyzVTmkmbbTKZpAMYSdeLzWZryCGBt6NYLCISidASJ+mGqvcLdaXIZDIIh8OYm5tDKBSidgcCgQAbN27EunXrYDQaG+L87+mNIRQK8Sd/8if4kz/5k1t+T29vL86fP//AB0YolUo0bf/6669jcHAQLpcLi4uL9/R7yMNFhGHVplnEE4D8u9FoxAsvvIC2tjbs3r2bzrpfS5CAIpPJwOv1wuVy4dSpUxgfH7+hdKTT6bBt2zb09fXhq1/9Ku1CaIQb9H7hcDgQiUQQCoU4cOAACoUCurq6MDY2hg8++AAffvghAoEAPvjgA4yNjaFSqcDhcNzS2IlYhBOjndUog3k8Hhw9ehTxeBxLS0tQKpXYtWsX1Go1bSN9EKo7aPL5PLLZLN5++218+OGHVMRLumiAT+yid+/ejebm5oYWLZdKJXz88cc4duwYrl69WvO1m9l/m81m2Gy2hsrMVZPP55HL5TA9PY2zZ88im82Cx+Ohq6sLL774Yo1nSCaToYJr0tVCrMhDoRBtlWez2bDb7Whtba1xQ14LEF0JmURcLpfB4/EeOg8hQiQSwfnz53Hx4kX4fD7aeSUQCHDgwAFs374dCoWiIcrzDbsNJbuzTCaD8fFxzM3NweVywefz0foZ8EkNmdQRSavUrX4nqS9Wvyzz+XzNIkPqybFYDJlMhtbxG+GC3QnSKppOp2n3wNWrV7GwsACfz0eDJy6XC6PRCL1ej+7ubvT19VFfg4dhJgCh2mpcr9ejUqnA6XQiEAhgYWGBtmT5/X7ajkmCzurFqVwuI5VKgc/nr/pcAeKfEolEIBAIoNVqqWMouVakpfReKJVKdCCb2+1GJBLBzMwMNd+qfo4EAgEMBgMMBgO6u7vR0dHRsC9gQrUd+Z1o5F0uMSWLxWIIh8N0l0pabblcLjWlI+6yuVyOCphVKhVEIhHdZBDbdg6HA71eT9Pmaw1SEiBZCp1OB41GsybP5VaQoD4UCuHq1at0Tg6Hw6kxZKzeKJMNJXmnVSoVqhUkujHy38n3kHISALqhvN85Pw0bVORyOdr69Dd/8zcYHh6mwrHqnRNps3vsscfAYrHw8ccf3yBAJJAPsVolTB7EapLJJKanp5FOp9Hd3Y1cLge9Xg+RSLRyJ7xMkCmeU1NTOHz4MJxOJ95//32kUim6mBAnwmeeeQaPPfYYHA4H2trawOfzIRKJGnZxfRA4HA6am5tpqre9vR0nTpzAP/3TPyEej+P8+fNIJBJU/CaRSGrqs+TFK5fLbxDCreQxi8VixGIxHD16lBqxKRQKzMzMoKWlBTabjabuTSbTPV23bDaL4eFhzM/P4x//8R/pPJXq4XwElUqFT33qU7Db7fjCF74AnU63JkpjNzOxqv73au1Ko97zxWIRly5dwvDwMIaGhhCNRqnonAQVRO1Pgl+SkYhGo+jt7aXuooFAAIuLi7QNsa+vD08++eSat+rm8/nYuXMnmpub1/y5ECqVCg0OL1y4gO9///v0HSiTybB161bY7Xbqn0Tu33w+T718yuUyzXCRgZEKhQKFQoEG3CTYdDqd4HA46O3thVwuh1qtvq93XsOuCuTlH4vF4Pf7EQqF6NcUCgUtSfB4PJhMJrS0tIDFYmFubg4sFguJRII6L1YHDXcj7iTGMnw+Hy6Xi7aXkkivUcVAxMArFothYWEBCwsLcLvd8Pv9KBaLEAqFEIlEdFplU1MTWlpaoNPpanbnjbq4Pihk8VUqlWhpacHExAQ1h0qn01RFnslkbniYyA5wNYWLQqEQer0ecrkcPB4P+Xye9qW73W6Uy2Vks1k6NK3am+BuIFNe5+fnMTs7C6/XC6DWxpq8vLRaLW0zlkqla0K0fH3wQFhLgkTg2vFGo1GEw+GaLG11RrH6fEgGI5vNIhgM0vbrVCqFRCKBVCpFO6g0Gg0UCkVDtPU+CCwWC0KhcM2bExKqnTMjkQgWFxcRDodvmlUns4GAa59DMplEJBKhmZxsNovJyUkkEgmqoyGmdiRDkUwmMTc3RzMgKpUKEonk4QsqcrkcTc+Wy2XqvPfpT38a+/fvh1gshkwmg1Qqhc1mQ7lcRl9fH2ZmZvDRRx9hYGCApgLvhXg8jjNnzkAoFGJ4eBhKpRIbN26EzWZraJOYcrmMQ4cO4a233oLb7cbMzAx1yJRKpXTU9J49e+BwONDe3k59Goj4cC0ttvcDi8WCXq+HSqVCMpnE8ePHEQgE4PF44HK5MDQ0hFKphA0bNtRdtGcwGKBSqaDRaOByueB2u3H+/HmkUin86le/ohM6SWblVnXx6qxDdUknk8kgFAohm81iaWkJQG1KlMViwWAwYPv27bDb7Xj66aeh0Wjq/rk8alQqFYTDYQQCASSTSQCg2cbrrwWbzYZarYZEIoHL5aIWzgAwPDyM0dFRxONxavzW3d0Nk8n0ULyIiX5krZ8L6XQk5mQffPABRkZGasYTpNNpXL16FXNzc0gmk9BoNPTniWEeEV8TG4ZSqQSxWAyBQEDLI6R8RLIVbDYb77zzDjQaDb773e/el39JQwcVJNIiJ05eflqtFi0tLdTbXyQSQalUolwuw2w2I5VKQS6X02mL1VSb3FRPsKxWwJPoLx6PI5FIUOOldDqN7du31+kTuTnkJUBuwunpaZw5cwbxeJxOWCW+BmazGUajET09PbBardBoNKuuD2gEiGGUUqmEXC6nu79cLodkMol4PN4Qlr/EtEin08FkMqFUKkEoFFInUbLjvl0geH2QcKegkTwbpKZKjN+sVivVcjyMQrhGJ5fLUVNB4Nq9cb2dOVkzyfXL5XKIxWI0aCZ23vl8nrbXy+Xyhs283i3k/iZZtYfh/iQmZ/Pz87h06RKWlpZqNgflcplO9r1y5QokEgl9dgOBAJaWlm7I0gOfrAE3+xoAOmQuEAggnU7f17E3bFBxPZVKBfF4HOl0GufPn0c2m4VYLKY+51KpFNlsFidPnsTCwgLm5uaooAn4pPtDIBBQccvu3bthNpuxuLhIXcquXr1K5xqQbAnp5ybDlBqFQqGARCKBeDyO119/HdPT0zh37hyCwSD129Bqtdi8eTOam5vx9a9/ne7Sq50YHzWq64iRSIS22PF4PBgMBhiNxob6bLRaLb7whS/A5XKBxWLB4/Hg8uXLiEajdzXX4mb/fDM4HA66urqoe6xSqURPTw+eeOIJSCQSqNVqKgpkWD1IWZNoogBg06ZNOHjwIPr7+8HhcFAqlZBMJpFKpXDixAlMTU1hZGQEV69exejoKC5fvoylpSVkMhkoFAo8+eST6OzsbGizr7uF+FN0d3dj165dq6Z5WikKhQLGx8exsLCAS5cuYX5+/ga30HK5jEwmg0KhQDdFcrkcYrEYzc3N6OzshEAggEwmo8aPXC6Xli6J8Pd6yDwhtVoNu91+X8ffsKvDzRZAIlqZnJxEPp+HTCaDUqmk35vNZnHixImbCjWrhwaZTCYYjUbs3LkTbW1tmJycxNzcHLhcLpxOJ43Wqm1xySjtu1GSrxYkWvV6vXjjjTdw+vTpG3ajEokELS0t6OzsRE9Pz0OljL5fSPaLBI8kUGSz2dQiu5FSqGTWh1Qqpc6fk5OTiMfjAG58Vm6lG7pTgEGMdIgPhcViwbp169Da2vrQtuqtFYjRExlyaDAYsGXLFjq7hnhQRCIRnD17FhcuXIDH40EwGASXy8XY2BjNwhL/oJ6enjUhPr8bOBwOzcSu9fJcuVzG7OwshoeHMTc3d4OvEIGUMIiZGXAtW6NSqaDVaiESiah7tFgsBo/Hg16vh1Qqhdfrvaktg0AgQF9fHxQKxX370DRsUEFaHsvlMpqamqhIiaT05ubm6IdFyOfztOZIUkFkZ6XX67Ft2zZotVrs2bMHBoMBzc3NkMlkUKlU6O3txdatW7F582Y4nU784he/oL3eYrEYzzzzDDZs2PDAo56Xg+oulitXrmB6ehrhcLjmRWGxWOBwONDb24uXXnoJOp1uzac5l4NKpYJkMolgMEg9O8rlMiwWCzo7O+lApkbKVHA4HAiFQhgMBnzuc59DLBZDb28vHaBUfd2j0ShmZ2fpxN3qYWtElGc0GmGxWG4InDgcDjZt2gSTyUSHfJHe97WmtWGz2Whubqafk9vtrvch3TeVSoXO6yDX0+l04qOPPsK2bdtgt9trxMZLS0twOp1IpVJU2EdakQ8ePAiLxYK+vj4YjcY1vyaw2Wxa6iaau7Ue/LJYLOh0OthsNvT399e4vJJJxAKBAGq1mk6lVSgU1KuEzEricrkQCoU103qJZ0lzc/NNZ6WQz5Note6Hhg0qOBwOtFotyuUyjEYjdDodFV3G43G6S7sVpCZMPuSOjg488cQTcDgc2LFjR00wQkQubW1t2LBhAyYnJzEyMgK/3w+FQgG1Wo2DBw+ir6+vIVTvJIOSSqVw5swZOJ1ORCKRmvkPNpsN27Ztw9atW7Ft27a6jsJtNCKRCPU+CYVCkEql6OzspCOjb2V1XC/IoiAUCrFhwwaUSiV0dHQgnU7f8LL3+Xw4evQootEo5ufna+qiNpsNer0e69atQ39//w0/y2az10zr9J0gz0A8HsfU1FS9D+eBIIFwOBymWTWi1BcKhXjiiSdqOgU8Hg/dhRJ9BTH127dvHxwOB5qammraENcqxGWWiJUfBqEmsUlobm5GLperMZmTSqV0jWptbYVEIoHJZIJQKGyYa9mwQQWxWVYoFFT7MDc3B5/PR7+HOA3m83nEYjFwOBy0tbVBo9HQEdFqtRoajQYWiwWbNm2CQqG4ZU2YPKRmsxkvvPACUqkUbcO0WCw1xiH1pFQq0XZbj8cDp9OJXC5H06JkVPju3btht9sfGjMrYmZUKpVoCxVpISP9+reCiFlLpRI8Hg8uXrwIl8uFSqVCxb/k3mj0z4nFYt3go0Fgs9nYs2cP0uk0vF5vTaZCp9NRgZ5CobhpULHWF2QCi8WCRqNBe3s7HA4Henp6EAqFarpcyPcRXxeDwYANGzY0XImQxWJBLpdDp9NRvxnSXjw3N4fLly+jVCrh3Llz8Pl8tDWYOMq2tbWhr6+P7nxVKlVDZeLuB9JW7fP5wOVyIZPJ6K68EdboB4HNZkOj0dCsBMm+A9c0DyRTQeZRNVomsWGDCmKzzOfz8YUvfIE6a87OztLvmZ+fx8DAAJaWljA+Pg6hUIhnnnkG7e3t1BTIYrHAYDDUdH3c6qYjLyaxWIxvf/vbNxxPo7RcFgoFhEIhzM/P49y5c5ifn6cT+jZt2oSuri4888wz2LFjR0276FqHCFOj0SguXLiAYrGI/v5+KJVKKiK8HUSTc/r0afzLv/wLbbMio95JOrjRFyU2m31LN0ulUgmr1XpL5Tf5/1ud48NwnwDXnlciyCYbjpGRERw+fJh2exHy+TyOHTuGubk5yGQyNDU11fHIb4QMPGtvb0c4HEY4HEY6nYbf78eJEyfgcrmQyWRw9epV2oIPXLsXDAYDnn76aXzta1+DTCajniaNspbdL+l0GleuXEEwGKQD98jLdi2fF3Dt3iXutTezlK/+50a8jg0bVACfLH4kAr1elU+Mi+LxOJqamsDn89Hb24vm5maoVCpIpVLIZLJ7qhtePyGyESkUCggGg1hcXKxpMyMeDCRbU+209zBAREmpVIoOtBOJRNBqtTCZTLedMFkul+mUUrfbjXg8jlwuBw6HA4lEArPZfFOdQaNyq2v6sFzr5YAIs81mM3p7e5HNZjE6OlpjDkRIp9O0u4zY+TfKGsBisWAymWC1WjE5OYmFhQXamZZKpeDz+VAul6kHAam12+12aDQadHV1QalUPjQ7eeATW+lKpQK5XE4zlg/L/b+Wr1FDBxUAaFAhEAhoKpOwdetWPP/887Q/GwDdad4pK7GWiUQiePfddzE/P08HywDXFtFt27bhs5/9LKRS6ZpPcV4PsSBeXFzEL37xC3g8HmpVbLPZbjvgqlwuw+/3I5FIwOv1IhQK0WmfHR0d+PSnPw2dTtcQmhmGB4e0GfL5fKxbtw5dXV3o6ekBj8eDy+XC+++/X6NJIXqEUCiETCZDA5JGeElxuVzs2LEDDocDfr8f09PTKBQKiEajSKfTSKfTUCgU2Lt3LzQaDfbt2wer1QqVSkXLAsR+/2FaD4nVuN1uh1qtXvNdHw8LDR9UAJ9EbWtlF7nSkKmShULhhhS3XC6nY7EbYUFcTojXCJ/Pp7XEZDJJjdGqAyzSgldtbBaNRmlqWCgU0nSw0WikC/CtFl02mw2BQLDm1fKPEuRakuBCqVRCqVQiHA7XaIyI3ob4lkSjUYhEIjqois/n1/VZYrFYkMlkqFQqsFqtaG1tpWZYpFyr1Wqp0Nhut8NgMNByABGtP2yQwE+hUECpVD6U57gWYa7CGkQmk2H79u2QSqX48MMP6304qwZJZSsUCvyn//SfsLS0hKtXr9JRztXp7FAohMHBwRonQhKEbdq0CZ2dnejs7MSePXvoXIvbTeUTCARobW1dM9NqGT6BBAREiH19YFipVBAKhaiJ3MTEBB3S5nA4sHv37rrugqv9U37zN38TX//61wGAumeSYJt4EYhEItox9DAItG+GQCCAxWIBi8WivgqPojtwI8IEFWsQPp8PvV4Pv98PmUxGd+ukFPCwUt1v3dnZCYvFgnK5jKWlJaRSqZq+62KxWDO5EfhkAJPZbEZzczP6+vrQ3d0NkUhEd3S3guwIH9ZF+lGAZB2ImyxptQRqjfVisRjMZjMV8u7cubOux01KOQCooPhRh8PhQK1W004J4lXBUH+YoGINIhKJ0NXVBavVio6ODprm53K56OnpeWj8728Fl8ulNtIKhQL5fB6lUqkmU5HJZPDNb36TugiSr1UvQjKZjM6yuJuZGCQNzgQVaxOlUoldu3bBaDQiEAjA7/djcnISqVQKmUyGzn+JRqOw2+2w2+2w2WwP9bO0VpHJZDh48CBtr+ZyuQ/1hmotwQQVa5DqGrHVaq334aw6JNULYNU8BR42kdujiFAohM1mA5fLRUdHBxQKBfx+f43/SaFQoEJNMieHofEQCoWwWCz1PgyGm8AEFQwMDI8ExCSJzWbjueeeQyAQAAAsLi5icnISkUgEu3btQl9fH8xmMxwOBzQaDRNMMjDcA0xQwcDA8EjA4/FoG7Fer0c6nUYkEsHk5CQAYHZ2Fo8//jheeuklWuYi3g4MDAx3BxNUMDAwPFIQTQyXy6XGUCqVCps2bcKWLVsgk8lqvofR0DAw3D1MUMHAwPBIwufzsWnTJlQqFTz11FPU76Q6M8EIcxkY7g0mqGBgYHgkYbFYjGESA8Myw6pcb8m4SggEAuh0unr86boSCASY836EYM770YI570eLR/W8PR7PDW7OhLqF6TqdDm63u15/vm5YrVbmvB8hmPN+tGDO+9HiUT7vW8H0SjEwMDAwMDAsC0xBkYHhIaBSqdyQjmT8FRgYGFYbJqhgYFjDkGBifn4eJ0+eRLlcRqlUgslkwp49e5h5JQwMDKsKE1QwMKxhKpUKyuUyxsfH8Vd/9VfI5/MoFovYuHEjNmzYAIFAwLRFMjAwrBpMUMHAsEapVCqIx+OIxWIYHx/H0tISHZ62tLQEr9cLNpsNlUoFgUBQ78NlYGB4BGCCCgaGNQope1y6dAmDg4Nwu91UVyEUCjE4OIh0Ok0zFgwMDAwrDRNUMDDcBblcDslkkk5I5XA4ddMqVCoVWuZwuVwYHR3F4uJijViTzWZDqVRCKpU+UoLNUqmETCaDYrGISCSCbDZ7w/dIpVIYjUbGgnsNQO7pbDaLfD6PdDqNZDIJFot1g/MpmSQsk8kgEAjA4XDoOHTmOq8eTFDBwHAHKpUKAoEA3n77bej1euzduxdisbhuw6ZKpRKCwSAikQjeffdd/OxnP0Mmk6np/pBKpWhra0NTUxP4fP6qH2O9yOfzuHr1Kubn5/HTn/4UFy9eBFD7Unn66afx3//7f4dEImECiwanXC6jXC5jbm4OY2NjOHfuHI4cOQKBQACNRkOfPxaLBblcDqFQiP3792Pjxo1Qq9XQaDQ3BCAMK8sjEVRc32r3sCwi5XIZ+Xy+5vx4PN4jZT1Mzn2lrmmhUEChUEAsFsPS0hLYbDbK5fKK/K27pVKpIJ1OIxqNIhKJIBqN0s9BIBBAqVTCarVCIpGAz+c/EpmKSqWCUqmEfD4Pl8uF2dlZjI+PY2FhAUDt/TE/Pw+fzweNRgOpVAoOhwMul/tIfE5rCSJCLhaLCIVCmJ6exsLCAubm5sDn8xEKheh15XA4UKlUkEgkmJ6ehkqlAgDIZLK6ZhUfRR76t0+pVEK5XKY3KIvFAp/PfyhusEwmg4GBAcRiMQDXAor+/v5HyjaWXFuS+lxuPB4PBgcHsbCwALfbTTMUAoGgbi+hcrkMp9OJ4eFhLC4u1gQ57e3t+OIXv4iOjg4YjUbaUvqwk8vlEA6H4Xa78eMf/xgjIyPwer03fc4HBgbwe7/3ezCbzXjllVdgMplgsVggkUjqcOQMt4IEz6lUCu+//z7+5V/+BYlEgpY/EokE/V4WiwW32w0Oh4PJyUn87Gc/wzPPPIOvfvWrUKlUMBqNTLZilXhogwrysikWiygUCjSoIBErh8O558CCfH+jBCTFYhHz8/MIBAIArgUV3d3ddT6qlYXsyEulEr2mJKgg13U5XqLk78TjcUxOTiIajdKXNwlg6nEfkHRwMBhEIBBAMpms+bpSqURnZydsNhv4fP4js5CWy2XE43EEg0FMTk5ifn4epVLpptcoHo/j7NmzsNls2L17NwQCAQwGQx2O+v6pzk6Sf64OLokWgUxevdfsJcn8AJ+sd6t9z5NjqM4UVioVei7k+SdrAfkcyCaru7sbgUAAHA4HWq2WtlY3yvr9sPJQBhWVSgV+vx9LS0uYm5vD0NAQCoUCMpkM5HI5tm/fDqVSCZPJBLFYjEwmg1wud8vfVy3+If+rJ+RBSqVSmJycxOLiIrLZLDgcDvbu3VvXY1tJyuUyCoUC0uk0Ll26hFQqBa1WCz6fj1QqhUwmg66uLjQ1NT3QwlGpVFAoFFAsFjE2NobDhw+ju7sbn/nMZ2CxWOqW6SKLayQSwenTpzE4OAiPxwMA4HK54PP5MBqNaGtrq6k3PwrE43H86le/gtPpRDgcpi+am0GeHb/fj0uXLiESicBms0GhUKzyUd8/hUIBuVwOpVIJ6XQauVwOLpcLqVSK/vvc3Bx8Ph9eeOEFHDhw4J6C7VQqhTNnziAej8NqtUIsFsNut0Mul6/gWdXCZrMhFovB4/Hw5JNPgsfjIZvN1ohvU6kULly4gHg8jlQqhWKxSL92/vx5lMtlOBwOvPTSS1Cr1bBYLHVfvx92HtqgIhwOY3x8HOfPn8eRI0eQyWSQSqWg0+lQLBZhMpmwZcsWqNVqRKPRmlTa9bDZbJhMJlqfq3f5hETo+Xwefr8fi4uLiEQi4HA4SKfTdTuulYYEU/F4HKdOnUIsFkNbWxvkcjm8Xi+i0SiUSiWampoe+G+RHZLH48HQ0BBaW1vR2dkJpVJ5X1mu5aBUKiEWi8Hv92Nubg7j4+P0enO5XAgEAkgkEqhUKshkskei7EFIp9MYHR2Fy+W6QbR6PaR7JpVKYXFxEWw2G9lsFpVKpeF3seS8isUiMpkMstksYrEYzb5EIhEkEglks1mcP38eo6OjUKvV2L9//z3dD9lsFidOnEA4HEZPTw8MBgN0Ot2qBhUsFgs8Hg8cDgcOhwPZbBbxeBzxeJx+D9FaFAoFZLPZmqDC4/EgFAph06ZN6Ovrg91uh9FoXLXjfxDWsg7woQkqyO5yfn4eoVAIR44cwejoKBVlFYtF2oZ36tQpKJVKjIyMQCKRIJ1OI5PJ3PJ3s1gsqFQqiMVi9PX10ZeLVqsFh8NZ9ZcM2bF6vV7Mzs5ibm4OAoEAMpls1Y6hHpB2wVAohKtXr8Lv9yOVSkEul0Mul0OtVkMkEt33768Who2OjmJiYgLT09NQq9V0URWJRHV7WReLRczNzWFqago+n4+2TgJAW1sbtm3bhi1btkCpVFInzYedVCqFSCRCr5XX60U+n7+rny0UCpidnUU6ncaxY8cQDodhNBqhUqlokFbvzUOxWKSBdCaTgc/nQyAQgN/vh9frRS6XQyKRQDqdhsfjQbFYRFtbG4xGIz7zmc/gs5/9LJ588sm7vmdJuZhsykirciAQwMaNG1f2hG8COW6bzQaZTEbFuOSz8fl8GBkZQTqdRiKRqMk4k3KoVqtFb28vdDpdQ4vYyfqTy+Vw4sQJzMzM0ECuvb0dTU1NtPRaLBYRj8eRy+WQy+VqgikCj8eDRCKh/79a5/5Af6VYLOJHP/oRfvSjH+HSpUuIRqMQi8Xo7u7GF7/4RfzWb/3WqpwIuRiFQgGnTp3ClStXcOzYMQwPD98waCmZTMLv9wP45Ia92TCm6yEX85lnnsGePXuwefNmyOVyGkmvJoVCAYFAAPPz85icnITP54PZbIZUKr3jeaxlSqUSEokElpaWcO7cOSwtLSGRSECv12Pr1q1oamqCUCh8oL9Bgs/jx4/jww8/RC6Xg8FggF6vh0KhoH3v9SCfz2NoaAizs7Pw+/01aeCuri48/fTTaGtrW9UFpN7E43GMj49jcHAQExMTiMfjdx1U5PN5qkmSy+UYHx+n7YgSiaTuafJqTZjT6YTP58PRo0epQNfr9dIyCMmykFZio9GIzZs3w2azQS6X33VwVCqV6Bo5MDAAn88H4NpL/XbZ3JWCBAZqtRpqtZr+d5K11Ov10Ol0WFxcvOGer/atsNvttNOnUSEaklQqhb/6q7/C+++/j+7ubjQ3N+OrX/0qzGYz1YwVi0V4PB6aubxeWwWAZm2lUimEQmHjBxWRSATPPfccTp8+XfPf4/E4zp07h3PnzuHEiRN44403HvggbwXZVZJ6YigUwqVLlzA7O4tQKHTHYIEImAQCAfh8PsRiMTQaDQ028vk8wuEwcrkc0uk0CoUC3G43hoeHUSgUIJVKoVarV90LoFAoIBgMwu/3UxHqckCCM2D1RVm3I5vNUnX/zMwMnE4n7HY7dDod9u7dC5PJhN7eXhiNRiiVynv+/aSURK5vOBzG/Pw84vE4Ojo60Nraig0bNtS124MsNrOzs5iZmUEqlQIAKjyTyWSwWCxQKBQNc91WAiJUzeVyNNNw4sQJzM3N1WRuquFyueDxeDAajejt7UUkEsHIyAjVzaRSKfrzYrEYhUIBHR0dq/4SIp1qJLBNp9NYWFhAIpHA4OAgAoEApqenqYZKLBbTFmKhUAir1QqZTIbdu3ejtbUVBoMBMpnsrsq15G+HQiEMDAxgZGQEqVQKHA4HLS0taG5ubojumGKxiGKxSD8b0pUVDodpMEnW9ba2NmzZsgX9/f3g8XgN/1yUy2Vks9majItOp4PNZkM2m4XL5UKhUKCZqcuXLyORSCAWi91UEygSiWA0GqHT6bB//34oFAqIRKIV3xjdV1BRqVTw4osv4vTp0xAIBPj93/99vPzyy7DZbHA6nfijP/ojfPDBB3jzzTcxPDyM9evXL/dxAwC9uVwuF/7yL/8Sc3NzuHr1KiKRyG2FWsC1xZg8lHq9HlqtFna7HZ2dnfTlkU6nMTY2hmAwiJmZGYTDYYyNjWF4eBitra2YmZnBunXr8O///b9f1Zs2nU7j6tWrmJ2dvetd2d1ASkgA6q4bqSYajWJubg7T09M4e/YsisUi1q9fD5VKhVdeeQVGoxFCoZBeg3s97mKxiGg0inA4jO9///twOp1U8PfFL34RX/rSl+pa9iiVSsjlcgiFQjh69Cimp6epMp/sxoxGI1pbWx/qFtLqcgDZqf/yl7/ED37wA2QyGaTT6Zs+82KxGGKxGPv27cMrr7yCiYkJ/N3f/R38fj99GQ0MDIDFYtEs58svv4y2trZVDSrIBikWi8HtdsPpdOKNN96Ay+XC3NwckskkCoUCyuUytFotTCYTmpqasH79emg0GuzatQsKhQJWqxUikeiengXyt4eHh/Hd734XgUAAoVAICoUCmzZtwubNm2syBfUin88jFothcnISf/u3fwuPx0NF29UW9WKxGI8//ji+/e1vQ6PRUIfNRqZUKiEUCmFpaQnpdBpsNhutra3YvHkz4vE4Xn/9dbjdbhw/fhzJZBLxeJxqv27mncPhcCAQCGgnWGdnJ9ra2hozqPjZz36Gjz76CADw4x//GJ///Ofp11QqFf7xH/8RNpsNADA5ObnsQQVpI0yn03A6nZidncXi4iJ8Ph8KhQIVUxL1sFgsrqmXZzIZsNls9PT0QK/XQ6PRQKFQwGg0wuFw0EU5m82Cy+UiHo/DaDTSPniv10sXNpVKhXA4jEqlApFItKIpJvLQEIEmCZ44HA6USiX0ev19ZUyqa3kk3alSqcDj8SAQCOqa8geu+XGQzz0cDqNcLtPrm0wmkc1m78vdslrwSurUPp8Pi4uLUCqVMBgMMJvNK35d7wRJScdisRt246QbSSwW1/04V5pyuYx0Ok13qaTmn0wm6WfCZrNpK61arYZYLIbRaIRarUZnZycMBgNyuRw6OjogFouRTCZpFwVw7ZlPpVK0pLCS4s3qdYzsUOPxOKLRKKampuDxeOByuRAIBFAsFqlgXCqVoqWlBTabDVqtFt3d3VCr1dDpdLRb4l6fhWw2C7/fj/n5eYTDYWSzWUilUqodUyqVdX8GyuUyYrEYZmdnMTo6CrfbjaWlJboR0mg09HprNBq0trZCrVZDIpE0dKBdvf66XC5MT08jHo+jUqkgGo3Srp5EIgG/349AIIBCoQAWiwUul0uvC9EMkv+xWCwUCgVaGlmtTeJ93SWvvfYaAOBTn/pUTUBBqE6TrYR4sFAoIJ/PY3h4GH/xF38Bj8eDiYkJ5PN5KBQKyOVyOvegvb0dHR0dKBaLdMGYmJgAl8vFr/3ar6GnpwdSqZTuRKvT/pVKBZ/+9KdRLpcRjUaRTqdx9OhRHD16FG63G6dPn4bX64XdbkdbWxt27969omJJUnMLBoM4e/YsPB4PstksBAIBNm7ciObm5vtK/5O67NzcHP78z/8cpVIJ+/btg81mw9atW+u6Q6lUKvB4PDh06BBCoRAmJyfpHA65XA6LxYKenh5s27btnvUU5Ly9Xi/efPNNeDwenDhxAolEAt/5znewc+dObNiwAWKxuK797alUCsPDw7hy5UqNjoLFYkGj0UCv16OpqYkuno2SYVpuisUihoaGMD4+jsOHD+P06dO0fRK4FlAQzwmNRoOnnnoKzc3NcDgc0Ol01LZZp9MhlUphZmYGi4uLNVqBau8D8tJfic+TZAULhQIOHTqEs2fPwuv1wuVyIZFIIBQKIZ/PI5FIgMViwWAwQKFQ4Omnn8aGDRvQ3t4Ou90OHo8HHo8HNptNBeP3E1xPT0/jjTfewOTkJEKhEAQCAXp7e2G329Hb2wubzfbAeqX7pVKpIJfLIZvN4tixY/ibv/kbBAIBLCws0BKWQCDAgQMHYLfbsXHjRjgcDphMJhiNxhUzxlsuSJbI7Xbjf//v/43R0VEsLCygVCrhgw8+wIkTJ+j9SLpchEIhOjs7IZPJqNbL5XJRnRnZ6JIshkqlgkKhWJVszT0HFalUCqdOnQIAfOELX7jp91y6dAnAtUVvORXD1Tv1SCRCdyuRSATpdBocDgd6vR5qtRparRYikQjNzc1oa2ujpZJEIoF8Pg82m00jfaFQeNsdPjGQkclkaG9vpy1MV65cQSQSgcvlAofDwcaNGyEQCFbM8pekulKpFILBIGKxGDX4EYvFkMlk97WbIGllstDmcjm0t7eDx+Mta3nlfo6rUqnQ8w0Gg3RnSVJ/i4uLVBysUqnoPVJt300WW3JNyEuDtOMFAgF4PB4sLS0hlUqhXC5DrVbDbDY3RDkhn8/T9rjrNQMSiQRarRYSiaRura4rSXWGkdSV5+fnqQcDgc/nQy6XQywWo6OjA2q1Gi0tLWhra4PJZIJSqaS7eKFQCLVajVAodMPzcn1QQVx4l/tzLZfLSCaTSCaTmJ6extzcHBYWFuDxeOjfJ+sZn89Ha2srlEolHA4HWltbYTQaoVAoaDBxP1T7sfj9fiwsLMDr9aJcLkMgEKC5uRlWqxVyubyuDrIA6PUnuhLSsVc9QE8mk0Gv18NqtcJkMkEul6+J2S6kqy0ajdKsOwmUyT1CtH88Hg9SqRQymQwOh4Ou+zwej2oy8vk8vWf5fD4kEglEItGqXcN7fgNduHCBppv27dt30+8hmYzNmzdDr9ff/9FdR6lUQqlUwsWLF/GjH/0I8/PzcLlcKJVK4PP50Ov1+O3f/m2sX7+eDpcRCAQQiUT0BUUuIHAtXXY3sxFYLBYNPLZt24auri4cPXoUAwMDyOVyOHToEL2JSWbkfjIGdyIajcLpdOL8+fM0JUZcQiUSyX2nKEl7Enm5kjQci8W6rSnYSkPSeEQcm06naSBF/vfWW29BpVIhl8thz549yOfzyGQyNKKXyWTo7OysefhImWNqagrnz5+H3+/HyZMnkcvlIBaLIZfL0dvbSzUK9SYSieDo0aNwuVw1PiRsNhsdHR3o7e2FxWKp4xGuDCSYSKfTGBoagsvlwk9/+lNcvXoVoVCo5nubm5vxwgsvwGq1UlGaVCqls3Cqd/K3I5fLIRqNIhQKIRaLQSKRQCqVLvuLKZ1O48/+7M8wNDSEmZkZRKNR+iLo6enBrl27YDKZsHv37pqShlQqpRuX+y1HVAdqFy9exKVLl3DmzBkcPXoU5XIZYrEYmzdvxu/93u9R8TOXy62rJiGZTGJpaYl68lwvUGez2dBoNLBarbBYLNDr9WsmyE4kEjh9+jSuXr1as7GpxuFwoL+/n3b1KJVKmj0i+rdDhw7h+PHjmJiYwNLSEhQKBTZv3oy2tjbYbDZa0l5p7vmuJFkIjUaDlpaWG77+93//9/j5z38OAPjd3/3dBzy8TyABQbFYxMzMDD7++GPqokaGyZB+5I6OjtsK98gFu5cdCPGjkMlkkEgkcDgckMvliEajWFhYQCwWw9TUFNhsNqxW64oEFel0Gl6vF4FAoCbtS4Ke+60dkh1LJpOh7qLELKxew7PIwkeib2L0Q3rUyddJym9mZgYmk4nWHol2RqFQUM0MaREkwdnY2BguXrxIO2lYLBaampqg1+uhUqkgEokaQtxFdujEh4BAJjMS/4w7Ud0NdTNznUazoSfp3mQyifHxcczPz2N4eBjz8/P0e0hqW6fTobu7G62trXA4HHf8PMjzTEoH1ZuOfD5PnwE+n78ibdqFQgGDg4M4evQo/f1isZial7W1tcHhcKCrq2tF/DJIq+rExAQGBwcxMjKCpaUl2gGn0+nQ0tLSMC6j5HnO5/O3FOFX7+bXQoaCkMvlsLCwQNvEib4HAG0hNRqNaG5uRktLCzZv3gyJRAK1Wl0zloA4n5J7n5QCq7U2DZmpGBwcBABs2bIFwLUHPxKJ4OLFi/iHf/gHGlC8+OKL+PKXv7wsB0kedp/PB7/fj5GREdpOScoYr7zyCpqbm2G32+nL4FZBw4MsniSNbrPZ8K1vfQtzc3P4+c9/jmAwiHfffReXL19GZ2cnzGbzA5/39WQyGSwsLCASidS0fgqFQnR1dWH9+vWQSqUP/HdIkKbX6+sm0iQiJDabjS1btuDVV1+F1+vFpUuXkEgksLi4iGKxSEs3w8PD9IEslUqQSCQwGo0IBoP427/9W5TLZVgsFkilUrhcLpqtmJ+fh1gsxt69e6HT6fD000/DarWio6Nj1R7CO1Gd1qxeTFksFkwmEzZu3EinMt4MsggTESDJSpF7SCgUwmaz0ZcaWZTrLfqMRCI4c+YMnE4nDh8+TIW61WzYsAG7du1Cd3c39u/fT7MTt0MgEFCXyCtXrsBisWB4eBh+vx+5XI62Vvr9flQqFcjl8mUPLrlcLnp7e5FOpzE1NYVgMIh8Po9yuQy3242pqSmUy2V0dnZCIpHclxD5VuRyOQwMDMDpdOLIkSO4ePEi1Sjt2LEDn/nMZ2gw0yiQbMmmTZtw4MABLC4uYmRkhL6Ac7kcDh8+jLGxMYyMjKCvrw/t7e1oaWmpi0HhvSASidDV1YV8Pk+zDiTLfvDgQXR0dGDdunXo6+uDWCymwQSPx6OaM9IZduLECfp+4HK5kMvlNEO7WhukBw4qnn32WfzqV7+q+Z5f//Vfx//6X/9r2S4i2ZWSgMLj8SAej9N6uV6vx5NPPgmLxQKlUnnHDogHOS4SqCiVSjz++ONQKpV48803kUqlMDAwgNHRUfz6r//6ff/+W1GpVJDNZhEKhWo8Cog4zWq1wmAwLEsQwOFwIJFIoFAo6vpiIYuByWTC448/jtHRUTpYiCjigWsv3bm5OcRiMXpPkIxDPB7H22+/jVAoBKPRCJlMhng8jnQ6TV+2IpGICt+eeOIJ6qjYCFkK4JNM0vVZI5LyNRqNkEgkN72vq3fgJEND1PPk98lkMhw4cABqtRp6vZ7+rnoHFdFoFMeOHYPb7aYC2utpa2vD3r174XA4YDab72qHyufzodFoaDBeqVQwPz8Pv99PW1aJ2I2UTpcbktEkzrhEL0PGfHu9XtqdQua6LNf9WCwWcebMGYyMjODs2bM0sCbGWU8//TR9ETUC5CXL5XLhcDjQ2dkJgUCAiYkJGlSQzM/Q0BCCwSDcbjcOHjxI5wA1yrN8M3g8HnQ6HXQ6HQ1+iG/S+vXrceDAAVitVio6rT6XQqEAr9eLq1evYmRkBBMTE/RrHA6HtteuZlB1T6tGJpOhB02CiosXL97wfT//+c+xceNGfOc733ngAyRtZJlMhppqjY2NAbjm8rZr1y50dHSgqamJzmVYDUjrmlAoXJWLRdzyWlpaEAgEqC++Uqmkoq37bSlks9kQiUR0aFo6nYbb7abtbvVGqVSip6cHZrMZnZ2dCAQCGBoaQjQaxZUrVxAOh8Hj8VAsFmkLHEkXBgIBGtGnUina8ZHP56HVatHU1ISOjg4cPHgQBoOBGh41QobiVhBhLhFpElvuaqoNvchOZmBgABMTE4hGozUj08ViMSKRCKRSKQ3MOzo6YLVaIZVK78mRcTlIpVKIx+OYmprC5OQkPB5PjWCYzWZjw4YNaGtrw+OPP46+vj4qWryX4+RwOGhtbUWxWMSFCxfof69UKkgmk1hcXIRUKl2REiCPx8OBAwfoTApi5DQzMwM+n4/R0VGEQiEUCgWo1Wp6jmazmQoQ7/alTwLLZDKJoaEhuN1uXLhwAbOzs9REa//+/ejv76d+FI3kUwOAruvNzc34/Oc/D5/Ph56eHvj9fhw5cgTRaJS2W8/Pz9Ndv1AohNlsRnd393372Kw0XC4XCoUCWq2WBsVyuZy2xTY1NdFZPtcfO8lAkpEF1ZDGgvb29lXdINzTX7py5QqNDElQMTMzg0QigdnZWRw9ehSvvfYalpaW8Gu/9mvgcrn45je/+UAHSNo5w+EwTpw4gddff53uHLq6uvDSSy/BbrfDZDKtarqOxWJBIBBAKBSu2guIiA7n5+fpDWYwGGC1WulskvuBw+FAJBJRO9dUKoXR0VFEIpEbbtR6QFJ4drsdGzZsQDabxc6dOxEIBPCTn/wEk5OTCIfDSKVSUCgUMJlMsNls6OjogEQioXVxMhKZoFar0d/fj3Xr1mHPnj1U9NTIAQXwidJdpVLRIOD6YLpcLlN9ya9+9SuMjo7i/PnzuHr16k133iTrpdVqIZPJ8Mwzz+DAgQNoa2uDTCZb1YU4EolgfHwcZ8+exblz55BOp28IKnbt2oW9e/eir68Pra2tNd09dwPZvdrtdgiFQhw+fLjm64lEAm63GwqFYkUyFQKBABs2bEC5XEZbWxt8Ph8+/vhjiMViLCws4NKlSyiVSnjvvfcgkUjw5JNPwmw246WXXkJ3dzetkd8N1eXjv/iLv8D09DRmZmbo1GaZTIbHHnsMX/3qV6mgudFevNVZS6PRiHw+j8ceewwejwfBYBDDw8Pw+XxUn+ByubC4uIjZ2Vls27YNra2tYLPZdc++3QwulwulUlmjkSATVR0OB81Q3CoTGQqF4PP5agatAdc2Y+vXr4fJZGrcoIKUPoxGI6xWKwDQYU4WiwWPPfYYfu3Xfg0bN27E0tISXnvttQcOKorFIlwuF513QLoduFwu1Go1mpuba6y1V5PVfvBIK5xarYZQKKRjj0lJoHoyJXkISUbldsdanXWpNrtqpDki1cdPzL64XC527tyJ5uZmpFIpZLNZagIkEomQSqUQjUZvudOMRqNwu93g8/kYHx+HRqOpaz/+vUBS4mSxqPZWIVM4XS4XgsEgZmdnsbCwQI3DCNW6lWrzp0qlQruMuFwu7HY71Vis1D1f3d7o8Xhw7tw5zM3N1QxL4vP5aG5upqJMh8NBbcnv97hu9bPEdEipVK6YWJn8bblcjkqlQoMMYkEfiUTgdDoBgM75IOPIjUYjDAYD3QwQa+qbnUsikcDCwgJGRkbgdrsRiUTAYrEgEonQ398Pm82G3t7eewpU6gU5Pw6HA7FYDK1Wi+3bt8NsNuPq1atUyB6LxZBKpeB2u6HX6zE+Pg6tVtuwo8+rrxsxveLz+QiHw0in0+Dz+RAIBLTdmKz9ZNJuLBajWWWSdTabzdBoNJBKpav6frynoIJ0fvT399/ye4xGI55++mn84Ac/QDQafaCDA6716L/55pu4dOkSLXuQNlGbzYaurq5lrTc2MiKRCBaLhfbhk7pvuVzGsWPHqHqbCHTILkStVt82NUxeGCqVCkqlEslksqE/Tx6PR4d82Ww2qrmpDoL8fj8+/PBDLCws3LItljjyaTQaTE9Pw+Fw4I/+6I8aPqggwQAR712/IBUKBaolmZmZwYkTJ+DxeGpU5QDofcJms6mqnpQeDh06RCd39vX1QSqVrkhrZfVxJxIJRKNRHDp0CH/3d39H3TOBa4GvXC6nIsL9+/fDbDavmLHR0tIS7cy42TyR5YLNZtPnTq/XY/PmzdSWfmZmhpqyjY+PI5vN4uzZsxAKhdi0aRPWrVtHBaqk9fVmz+3ExAT+63/9r/B4PBgbG6NCZrlcji996Ut4/PHHodPp6PVttCzF9ZD7XyaTQSgU4lvf+haSySQOHTqEyclJfPzxxxgcHEQkEqGi5Gw2i9bWVvzO7/xOQwYV1ZCNdCAQwOXLl2GxWGAymaDT6WgwkUgkMDAwAI/HgytXrmBmZoZmKjQaDfr6+tDT0wOLxbLqTrv3lakgpY9b4Xa7AQDd3d33eVifQKK2akMQuVxOrVjrqey92cCylTwOItKRSCQwmUzgcDhYWlqiNtbVfhoqlQoajaZGKUxS+9drBqq7Ycg/k4FGxGSL/HcSMddz4blZ+2N1mynZWS4tLSEajUIgENA6NIfDQTKZRCaToda4yWQSbrcbcrmcWjY3sjMledGRbpfqe5AYwy0tLcHr9cLr9SKRSNBOKdIWrVarIZPJYLVaqd15LpejM27IUKt4PE7FgittBEY8RsiQJNLdQ8TIYrEYFosFra2tkEqlK5o54fP5UCgU1E11JSGfKRGYVyoVmEwmsNlsdHd3Q6fTYXx8HNFolLoCe71eKqRuaWmhayKfz6ddS8Sxc2FhAfPz84hEIqhUKhAIBGhvb4fZbEZLSwsVtzd62a8ack3IfcnhcNDW1oZyuQyPxwO32410Oo1kMkk1YmKxGLFY7L6tzFcKUooTCoXQ6/WIRqP0mSVdSAqFgmYpSPA9PT0Nn89H12hSIiTrP+laWu3zvOugolAo4OrVqwCATZs23fL7FhcXcezYMQDAM88882BHB1DXxLm5Obowbtu2DXv27MHOnTvrGlAQAxkCeXmv1EUkN4jRaMT+/fsxPz+Pt99+G8FgEP/6r/9KFwZyk/J4PLS1tVEjnV27dkEul0OlUt02Wi+XywgGg4hGo/jnf/5nnDx5EkKhEEKhENu2bcOWLVsa5oGsHl8PfGKY5PP58N577yGZTFLnOVIWOXPmDIaHh2l2I5VKUY+RQCAAqVTa0KlgUh9PJpMIBoPU55/L5SIQCOCv//qv4fF4cPz4cUQiEWr2JhQKIRKJ8MQTT+DZZ5+FzWajA/RI8PDaa6/h8OHDiMVidKzy0NAQmpubV7QbiGQqfD4fnT1Brimfz4fBYEBHRweeeOIJtLa2rvgAv6amJjzzzDPo7e1dtfuArB1CoZC6Z/b09FDzL6/XizNnzuDy5cvUm0UgEOCnP/0pdDodduzYAb1ej927d0Oj0eD8+fMYGBjA5OQkpqamAIC6DP/hH/4hNUVq9NkYt4OUcQQCAXbu3IktW7bAYrHAYDBgZGSEDt8aGhpCJBLBiRMn0NnZie7ubsjl8nofPoBPAiOj0YinnnoK7e3tOHz4MAKBAC5cuIB4PI6XX34ZJpMJ8XgcAwMDmJ2dxU9+8hMaPBHfEQAwGAzo6upCW1sbDbxX8x151yvEyMgIjYR+8IMf4IUXXrjhQEulEr71rW9Rm+OvfvWry3KQxPiIiLHkcjmamprqPuaZ7ASIlW91NmAlIOdKxhwTx0gyop2kt6t3rplMBjqdjr5cC4UC+Hx+jQEY+f5kMkmH0pDOgfn5eRQKBdqa1NTU1NA7+WKxiGQyScVL5XIZ69evh0KhgMFggFgsxuLiIhYXF2lNknQYxeNxJBIJZDKZhhikdjtIpoIo3okugozxdjqdCIVCSKfT9LkhGQrSRaPT6aDX68FmsyGVSpFOp2EymaDVaum0zEQiAa/XC6VSueIam2w2i0gkgmw2W6NjIL4pZPd1N0Zf98LNRJ5CoRAajQYqlWrV7vPq1kfS3cHhcCCXy6kImQz8IhqZTCYDv98Pv98PrVaLaDQKi8WCYrGIyclJjI2NYX5+Hrlcjp6T1WpFc3MzzGYzbdVcq5AXJimDCQQCWCwW2Gw2BAIBcDgcKlomGW+5XE5Hud+rwHelzoEEk1arFblcDhKJBOFwmA6x9Hg8yGQyiMVi1AjP7XbD7/ff8PuEQiF1lK1HOeuu7yZS+gCAN954Ay+88AL+6I/+CD09PYjH47h8+TL+23/7bzh37hwA4K//+q+XxYip5mD/rfZvNpuxfv16qNXqur3YstksnE4nFhYWUCwWqZLcZDLVDFRbCdRqNT7zmc8gEAhAIpFgcXERhw8fhs/no0EBIRAI4MiRIxAKhTh//jyEQiHtQSemOvl8HtlsFsFgEJOTk8hkMjRAGRoawsTEBO10kcvl2L17N0QiUV3HgV9PLpdDoVDAhQsX8MMf/hButxuBQACtra341re+BZvNRmuLe/fuxezsLE6ePIkf/ehH9GeJL4LP58O+ffsaXltRKpUwNTWF4eFhyGQyyOVyXL16FaOjowgEAnQGgEgkglAoxEsvvYQDBw6gubmZ7vbJS4y8XJ588kmoVCocO3YMv/zlLzEzM4Nf/OIXcLvd2LZt24qVvsrlMiYmJvDhhx9icnKy5mvEE6alpWVZny2ymJMXOJfLpeUzcq+v9LN8p+Mjx9XW1kZnmSQSCQwPD+PMmTMIBoO4fPkyNbRis9m4evUqJBIJbSUulUqQyWTYsGEDvvGNb1Bnxodtqi25ni0tLdQE7dy5c7RcEIlEcOTIEYyMjEAoFNLpris5BPJeEIlEeOqpp+gcFuBaJ5Tf70cmk8HFixepczPJJF4Pi8WCSqWiE1rr8X6856CCy+WiWCzinXfewTvvvHPD90kkEvzVX/0VvvjFLy7fUf4b1TVhMiCoXkFFoVCA3+9HNBqlQQXpPFhpIRApRQiFQmzcuBEymQzDw8P0Jqu2YiYWzwAwNTVVs9tUKpWQyWTIZDJIJpPUBr0a4mAoEAjA5/MRCoWQyWSoULARIC1z2WwWIyMjeOutt5DP55HP5yGTyehQKZIKFIvFMBgMCAaDtIuGWDMvLCyAzWZj+/bt9T6tGm5W5qtUKggGg3A6ndBoNFRbEwqF6MA14Nq1k0gkaG9vx6ZNm+h1r/595LMxmUzo6+vD8PAwACAWi2FoaAh6vR7FYpF2X63Ec7e0tISFhYUb5noQr4GmpqZlfwlWd0lxOJyaQXQrNRjwbqnOXJANGvHN4PP5iMViWFhYwOLiIsLhMObm5pDJZGpszAHQuUBWq5VOHW7k8t79Qu5J4gpLpjYTW4JcLoeJiQkEAgHMzMxAq9XSFtpGgMvl0vcHsdeem5tDMBjEwMAApqamqAbsZms1yUoQDZlIJGrsoIJ0fvzBH/wB5HI5fvKTn2B6ehq5XA5yuRxdXV04ePAgvv3tb6/IcKNqV0FSDqmHipe08/h8Phw5cgQulwuZTAYcDgdWq5XuiFcDgUCA/v5+dHd3Y926dQiFQgiFQkgkElSoF4lEaPaBuPaRhbPa1rr6vxM4HA66u7tputRut2PLli10ME2jZCkqlQrcbjcmJiYwPj6OcrmMnp4ePPPMM2hra4NKpaqxbRcKhdBqtdi0aROeffZZuFwunDx5krZiVtfzGwG1Wo29e/fC5XLh3XffRTKZBHBtd3/hwgVEo1Fq+uV0OqnIC7imRyBtgxs3boRGo7mlHoGkgqszGGSgl8/nw/T0NEwmE/R6/Yo8exqNBhaLBUtLSzUvxnw+j0AgALlcfkMHy4NA7gWpVAqJRAKxWEyzdE6nE0ePHkUwGLythmy1IQGdyWTC008/jUAgAKvVioWFBfzzP/9zzeROco0PHDiAr33ta7BYLDCbzQ99txwJkNva2vD8889jbm4Ob7zxBjKZDB1GNjs7S8uBOp2u3ocMAPTZI105u3btwgcffICBgYEap1cyVPF6yPOr0+nQ1tZWN63MXQUV5XIZV65cAQBs3LgRL7/8Mv7wD/9wRQ/sZhCley6XQzabhUQiQaVSWdVorFoIeOTIEcTjcRQKBeqAtpomXHw+n84YaWlpoa55qVQK8/PzGBwchNvtRi6Xo7tXIuoDQJ0lq7l+99rd3Y2Ojg5s374dvb29UCgUt7SErheVSgUulwsff/wxZmdnUSqV4HA48JWvfIVmtKoXUYFAAIFAAJvNhq1bt0Imk+H8+fM0cCUdII2CRCLB5s2bIRKJcPTo0ZqgYmRkBJOTk/TeI3V2cvxcLhft7e00OLxTSfL67iAy1XdpaQljY2MoFAp3FPreD8T6XqvV3jDEqlAoIBKJ3HT0+4P+TT6fD5FIBLFYDKFQSLUqfr8fR48eBZvNXtGW0nul2ttCLpfDYDBALpdjenoab775Zs04eLI2rl+/Hi+88MJDVeq4HaRMbjAY8Pjjj0MsFuPdd9+lGqp8Pg+32w2JRIK+vr56Hy6lutzV19eHrq4uqvGKxWIIBALI5XI37TokP0/8m4g7Zz24q786MTFBnRXXr1+/ogd0N5C652rtJiuVCh2n7fV6MTc3h48//pi2uIpEIigUCrS1tWHjxo3LriW5G8hLXiAQ0IFnQqEQsVgMra2tiEQiGBoaQiwWQzAYRCqVoiI/DocDgUCAXC6HeDxOU9xisRgbN27E7t27YTKZoFKp7jhXZTWpNkyanZ2lbXd6vR46nQ5yufy2JTI+nw+9Xg+v1wsOhwM+n4/Ozk50dnbW5RreCpFIhJaWFiQSiRsWiupJnj6fj7bKEsGa2WzG1q1b0dfXd8uhY6VSibbZEk0GqemSZ4xY/loslmVdrMgmIZ/PY3Z2FnNzc1haWgIA+sJvaWnBvn370NLSsqyj6El5MJVK0dZDEmQbDAbs2bMH69ata+iXcTabxdDQEKampqgo9/p1MRAI0JZppVLZMBnGlaJUKtFBl4ODg5iZmaGZOwDUmbOrq6thOkCuh6xHW7ZsgUKhQCwWQzQapULNRCKBwcFBJJNJRKPRmvOrN3f1tJDSh1AoRHt7+4oe0N1AsgWrMZabtI6Gw2EEg0G8/fbbeOONN6jTGVHO6/V6bN++HV1dXXURd5GFgojyZDIZLBYLyuUydu7ciWw2i8uXL8Pr9WJiYgLhcBiRSASJRIK68iWTSUxMTKBQKFAzrKeeegobNmyoqaM3SpaCvJBSqRRGRkZw9OhRmM1mOguGjDu/1fGSF6/X6wWfz6d2xe3t7Q1TZwWuHWdrayvy+fwNGQJSvopEIohGozVjtHfs2AGbzYYnnngCVqv1lp9DsVikL/Nf/vKXuHjxIhYXF2teTgqFAu3t7dRIbbkgyvx4PI7R0VGcOnWKZmIEAgF0Oh16enrw6U9/etlnkJTLZSSTSfrZVQvfOjo68LnPfW5FdBzLSSwWw6FDh+ByuejnRgIL8lk5nU58+OGH6OrqwrZt2xpqY7ASkMFsLpcLH330UY3HEQDqaUHmnDQaJIMGAOvWrUNvby8KhUKNd5DP58P//b//F7Ozs5icnFwWo8nl4q6eFiLS7OrqWvVaHJklbzab6dAYr9eLgYEB+gLn8XjLMoOj2gKVjCEm6bLJyUksLCxgdnYW4XCYigCVSiV27NgBs9lMRTb1rldeb2ZFFhkyzVIkElGDlVQqRR1Kg8EgdWP0+Xx0ABdpmW20HQ7RQFTvMI1GI7q6utDU1HRHQSGp1cfjcSpukslkkEgkdb+G1ZBMkkwmg81mQyaTQTgcvmF3Uh0ElEolxGIxSCQSLC0t0XOqFiMWi0UkEgmk02lcuXIFXq8Xi4uLiEQidBEmglxipLPcAWV1toBkz8h5kXuOlGOW6/4jz3kul4PL5aKjxwHQv0PEjfUUg9+OTCaDeDyO+fl5OuWU6ELa29uhUqkwNTVFn+OxsTHweDxs2LABAFbc52O1qDa9y2QytL0+HA5jcHAQPp8PsViMDk3U6/XUiVculze8WPV6Hx4yVVkgENB7eDU21/fCPQUV9Sh9cLlcbNq0CXw+H8ePH4fT6cS7776LY8eOUWGowWBAa2vrA0fghUKBDmGamZlBIBDAxYsXEQqFcOnSJeptkM1moVQq0dLSgs2bN+P3f//3odVqaRtTIz2s1T3QDocDlUoFfX19N9TlWCwW/H4/xGIxXC4XPX9isnS9LqERIBkk0nIlFAqxefNmfOUrX6FjhG9HNBrFqVOn4Pf7oVKp0NzcDJPJBKVS2VDXkMvlQiqVwmg0YufOnTCZTDh27NhNe9QJ6XQag4ODmJ2dhUKhQEtLCwwGA9RqNTXKITqUaDSKyclJJJNJJBKJGp8IqVQKq9UKvV5fI3ZdLkqlEoLBILxeL/x+P+Lx+IqXNUm5JxAI4Mc//jEuX75Myz2k5KJUKmkJrdGCaeBa9uH111/H9PQ0zp49S903rVYrfuM3fgNtbW34yU9+gjfeeANLS0v4/ve/jwMHDmDHjh3QaDRQKBQN/0K9G8jGIp/PY3BwEGNjYzh9+jROnz6NVCqFSCRCjQA1Gg0+//nPw263Y+PGjdSjZS1Q7YxJrACIBfn1urh6c1dBxeXLlwFcS8WsNmw2GxaLBfF4nA6RymazSKfTdJdRKBSg1WpptqJ6h052O2R3cjNIDY4YpEQiEVoiWFhYgN/vp05/AoGATsLs6elBS0sLNBoNzZg06k1KREC3g+x0iDkWMfe63gq6kSCdK9WtVGSIzq1efiSgymaz1CCKGCs14jWs7lppbm4GAFitVupCmc1mb/gZogNisVhYXFwEAMTjcahUKmpu5vV6MT09XROYkWeBZCg0Gg26u7vpnI3lhkyPXVxcRDKZrNl1kdkkQqFwWQIZct65XA5LS0vw+Xzw+Xzw+/30M5TJZHQEgEgkargR4KTsG4lEsLCwgKWlJeTzeYjFYthsNlgsFhiNRiiVStjtdjQ3NyOfz2NmZoaKtUUiUcNqCe5EtZ6OtJKTacqTk5OYm5vD7Ows3G43XfMFAgGam5thsVjQ0tKC1tZWmrlrpGt7O66f70NKv2R9BkANCus9RuGuggriVVAP+Hw+9u3bh02bNiEQCCAUCiEWiyEej2NoaAh/+Zd/CaPRiM2bN0OpVNLZ82QUOBm0k0gkqNV3NZVKBTMzM/B6vdS9LJVK0QiQ9ASz2WwolUps2rQJPT09WL9+PQ4cOACJRAKFQrGmbtC7gTyQZBFr1KAC+MTwpbm5Ge3t7XQuyq1egkTc6XK5cOrUKWi1Wmzbto2OwW5UpFIpvvSlLyGZTKKjowOzs7M4dOgQLl26dEPmibSfpVIpHDp0iDqE8ng8ej2JayYp95E6PJvNpuPjH3vsMbz66qtQKBQr0vdeKBTwwQcf0KFI1RDr6Z6engfKkpHPxev14sSJE3C73RgYGEAwGMTQ0BCSySRdF/bu3YuDBw9i/fr1MJvNt72PVptKpUK7YE6ePIkjR46gUChALBbD4XDgS1/6Es22CQQCbNu2DXq9Hu+88w6GhoYQDAZx4cIFtLe3Q6PRrEltBemEyOfzSKVSCAaD+OEPfwiPxwOn04l4PI5YLFazzvf09OC73/0ujEYjmpqa6DTmtbheVyoV6v4bCAQQiURQKpXAYrHgcDjQ3d2Ntra2uh5j4yqQ/g1iy83j8Wg9jLS5pdNpTE5OIhAIgMfjQalUIpVKQS6XQ6/X0xn1uVwOkUgEY2NjN7SHlctlTE1Nwel0wufzwe12o1gs1tSUiWhRIpHAarWitbUVXV1d1FCpUS2rH4TrMz2NSqlUojNhyCTNOy0YxCiL6ErIhEgyxKlR4XK5UCgUEAqF6OzsBIfDwfDwMG31vD5gJvqg682kbgbREhCPCo1Gg6amJrS0tMBqta7YTJtyuYxAIACn00lnlBBIKaJ65POdsk83C37Jf49GoxgbG4Pb7ca5c+douYf8XmJg197eDp1O13BDtsicGr/fj6WlJYTDYZrNIUGgwWCgxy2Xy2tKV7lcDoFAAAqFoqFapgnVwwDJdaz+/EkgTGbDxGIxzM3N4eLFi5ifn6d6HHIfkwGCVquVBlJksOBahQwTJBk38j5jsVhQKBTQarXLLmi+Vxr+0yVpbS6Xiy9/+cvYu3cvTp06hYGBASwsLGBsbIzahPN4PAwPD9OJhsRaWigUIpPJIBAI3PAwEbe1VCpF/S/IZES5XI4nn3wSer0eGzduhN1uh16vp+n1hzWgID3eMpkMZrOZnmujQIRZ+XweAwMDGBkZweLiYk1aFLixS4V8jdShp6en0dPTg+7ubjz77LNQKpUNPxaZlLHWrVuH1tZWsFgsNDU14dKlSzhz5sx9ZZR4PB6sVisUCgV6enqg1Wqxfft2bNy4EUqlsqasuBLnQ9Lx1R4LwLVpxx9++CHcbjesViudunuzaxSJRHD16lU6Frr6OSf3y9TUFE6dOoV4PI5QKIRyuQyRSAQOh0MX4+bmZmg0mmVtXV0uKpUKjh8/jp///OfweDzU2OyVV16BzWZDa2srtVsnzrF8Ph92ux1yuRyFQgFDQ0NIJBJ4+umn6306FBL8xmIxTE5OIh6PY2ZmBiwWC5s3b4ZcLkcymUQ2m8XAwAAuXLiAQqFAd+zz8/PIZrMwGAyQSCTo6OiA0WiEwWCAw+FAc3MzneDaSEHicsJisWCxWNDZ2QmNRlPXY2mcN8UtIIYeZLYGMXsi7XPj4+PI5/M3LEgP8vfIwi2RSNDZ2YnW1lbs2bOH7gIa/cXzoJCAitToGlFnQOqp8/PzmJ6eRjqdBvCJPuZmL0HycnG73Th79izK5TL0ej2sViusVuuamYXA4XCgUCggk8nQ19eHYrGISCSCc+fO3XWpqvqz4fF4MBqN0Ov16OrqQktLC7Zv347W1taVPI2avy8UCm/IhMRiMYyOjiKXy+HSpUuwWCxob2+/qYfIwsICPvroIyQSCYTD4ZoWQnLdXS4XZmdn6efD4XAgFoshEono+RPr5kbTUgDXNkBjY2N49913IZFIaNv4nj176P1Q/Rny+Xzw+XyqDymVSpiZmaEmX40CyUJFo1FcuHABS0tLuHz5Mu1UstlsWFxcRCwWw4ULF/DLX/6yJqsBXNsIKZVKGI1GtLW1ob29HV1dXWhvb6fmZg/jBrAaiUSyKrOn7kTjr6D/RrUXf1dXF2QyGRYXF7Fx40aEw2EagVcLV4Br5jDZbBY8Hu+m7WEsFgtGoxFarbYm3apQKKBUKrF3717qUMbn89fES+dBIWlGkk68Xdq5npDjJCOzY7EYjhw5AoFAAKPRCIfDQa9XsVikqW+Spejv78dTTz0Fs9lMTcPWEiwWi46utlgs6OrqQiwWg8fjoUFGMpmk963FYqGGWHa7nZY8hEIhWlpaoFAooNfrIZPJoNVqV+Uc+Hw+nn/+eaxbtw7vvvsuzp8/TzVTJNXr9/tx7NgxyGQyaDSamwb14XAYo6OjNZNnqyFus5VKBXw+nxpBHTx4EDqdDp2dnTAYDLBardBqtQ3VGVEul5FIJKinBgBs2LAB+/btw8aNG6kp3a2eUYVCgW3bttGZFzfL2NaTUCgEp9OJoaEhHD9+nArkyZovk8mQTqeRTqcxMzNTEzjzeDyYzWZotVq8/PLL6OnpgcFggFKphFQqpa3QD2tAUT0wsKOjA5s2barroE1gDQUVAOiDbjabYTabkU6nsXHjRvh8Prz++uvUorg6Co/H4wgGg3SI1PUfNpvNRltbG+x2O7UJFolE0Ol0dEbEWhQ0PQjVVujEr6NcLjdcSylwLWiMx+NYWlpCIBDAu+++i/HxcWzevBmvvvoqncOSTCbxgx/8AKdPn0Y6nUYul8PBgwfx+OOP0/rrWlt0WCwW1Go11Go1rFYr+vv7EQqFcPHiRczNzWFychL5fB4SiQQCgQAbNmxAZ2cnNm3ahP7+ftqmxuVyacvwcvpB3A0khd/d3Q23241gMIjZ2VkaVJCW0/fee2/Z/ibRZ7W2tuK5555Dc3PzXVmY14tyuUzHm8diMWq9/fLLL9POs9s9m3K5HN3d3RAKhRgcHITf72+YoKJSqcDn8+H999/H+Pg43nvvvRptkNPpvO3PCwQCtLW1oampCZ/97GfR0tLSUOLalYbFYtGsVVdXF+x2+7K3fd8rayqouB4iXGOxWNi/fz/i8fgNZiDEgpfP50OhUNxws5FaFKkdi0Qi8Hi8Gs3Eo0a5XEY6nabWzUT81EiQXUxbWxsymQxSqRRCoRBtF5yZmcEHH3xAd7XZbBYLCwvIZDK0Q4SYua3lXQwxNiNeJCqVCr29vbTllBh7cblctLa2wmazwWg0QiqV0vOuNpha7c+BXEeRSIQtW7aAw+FgcHAQuVyOTmS81+4jcg6kbEd8GSQSCVQqFTQaDTo6OqDX69HS0kJb1RuVanHe9e3dd3O9iDCZZB0bzSyJmLndzqOEBP5isRhSqRQKhQJ2ux0KhQLr16+HwWCg6/tafZYflJXwkbkfGvdJugv4fD6d6ka0FtdDREBk4bzVB369BXWjWVKvJuVyGfF4HPF4HJFIBNlstqEWXaLU5/P52LNnDxwOB4LBIGZmZmia1OVy4ciRIzdoB3g8Hvr7+/Hcc8+hqampIfUi9wqx9SUlPiLU2rVrV80iTc7zZh099brPq4999+7d6O/vxzvvvINIJAKfz4e5uTna8nq3kGedZGj6+/vhcDhgMploQNnT00M7uxphIb4dxFMlHo/f4ElyN8FWsVikgwYbqTWcCKfj8ThcLhe8Xu8tAx6xWAyxWIzm5ma0tLSgpaUFBw8ehFwuh91up1q3RsymrjTkeV7tLOOtaJw3xX1yp7bH6mDiUbzh7pXq1rzbtek1AqSNqlQqoaWlBevXr6dBBfEaAUBNjFpbW6HX69HW1ga9Xt9w01YflOtfjmvlfq/OLLBYLLS2tqK/v5+2P5IXaj6fh9/vrxFhXg8ZEieRSNDe3g65XI62tjaaoSGOqWT8dyMswneCpLhVKhUt50UiEczMzCCfz9OWexIgk/uAZCai0SiCwSAymQzMZjNaWloaapNAhLpkhACXy6UjDyQSCbhcLhWbGgwGWCwW2Gw2ep1JJm4tXMuVgM/nU0F9I9A4d9YK8Sinw+4HkmputBTpzSDTBvV6Pb72ta9h586d8Hg88Hg8mJ2dxVtvvQUWi4W2tjaYzWa88sor6O7uhs1mo9MamXujMajOWGzbtg2bNm1COBzG5OQkYrEY5ufnEQwG8a//+q+Yn5+/5e9RKpX47Gc/C5vNhueee67GCK263LOWNDQcDgcWiwVqtRp6vR4AcPz4cczNzaGvrw9f/OIXodFo4HA46GfIZrMRjUbh8/lw8uRJfPjhhzCbzfjc5z6HlpaWuncIAJ8EkzKZjA5uCwaDaGpqwgsvvACj0YiWlhZIpVIqNhaJRLQN+Prrulau53JCyntNTU004Kw3D31QATyaJYz7gQzlIeOzG53qdmO1Wg273Q6JRAI+n0+zF6RbSKPRoKWlhQ5Va5SonuETqu3IiTW31Wql3Q0KhQJdXV23/R1WqxXNzc1obW2ldfa1DrnPhUIhFAoFVCoVbY3WaDSYmppCKpWCQqGgLbIcDgfBYBButxuBQADFYhECgQA2mw12u72hslgSiQRGoxEA0N3dDYPBgI6ODmg0GpjN5hoRMY/He+SE87eDuAmTZ6QReCSCCoa7I51O4+LFi5ienkYikWiohed2kC4IhUIBh8OBzZs3I5/P41vf+haAWu0NEXwxND5k1km5XEZ7eztKpRIOHDhwW31F9XVuRAOr+4H45rDZbOzfv5+Oqh8YGMD09DRee+01CAQC6rGh1+shEongdruxsLAAPp+PrVu3oq+vD0899VRDmbyxWCy0tLTg1VdfRbFYpO3/xHOj2mDwUc1G3A6BQIAtW7Zg8+bNMBgM9T4cAExQwVAFcbWLx+Pg8Xh0XsRaeJBvtkiS3Q/D2oQIKatRq9V1Opr6QvQCer0ePT09yGazkMvliMVimJ2dpS2iHA4HZrMZcrkci4uLCIVCaGtrw5YtW2CxWKDT6Wgmo1EgJQ2Ge4fD4UCn08FqtTbM3CImqGCgGAwG/Of//J+RyWSQz+fB4XDQ1dW15v3yGRgeBlgsFrUR37x5M5577jnE43E6ZTUSiVAzuGw2i/7+fvB4PPT09GDXrl10yOJa0pMwrD2YNwUDRSKRYOvWrfU+DAYGhpvAYrEgl8vpwMS2tjYUCgUkEgnkcjn4/X6kUikMDw8jHA7DaDRCp9Nh/fr16OjoeGS7IxhWFyaoYGBgYFhjkPIFCTTIcLRCoQCdTodsNguxWAyhUFh322aGRwsmqGBgYGBYY1SPqie1dLlcDgCw2Ww138sEFAyrCRNUMDAwMDwEMMHDww+ZUNzU1IQXX3wRiUQC/f39MBqNjFCTgYGBgYGB4e4hjrFarRb/8T/+RwCoyyDA28EEFQwMDAwMDGuA6rEUjdqRx6rUabCDQCCATqerx5+uKx6PBxaLpd6Hseow5/1owZz3owVz3o8WgUDgljN46hZUMDAwMDAwMDxcNEYRhoGBgYGBgWHNwwQVDAwMDAwMDMsCE1QwMDAwMDAwLAtMUMHAwMDAwMCwLDBBBQMDAwMDA8OywAQVDAwMDAwMDMvC/w/JI8gw+SgdQAAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -316,7 +315,7 @@ "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAf8AAABnCAYAAAAOs0skAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAAxOAAAMTgF/d4wjAABgA0lEQVR4nO19eXRU95XmV/si1b6otO9ICBAGBMY2DmDAu7vjZDJ2etKxHXfS6cRJpiczk5npOUlmknRn6550n0nSyWlnsTtud2zHe5J2cACDAQNiE4v2vaRS7fumqnrzB3Ovn4TEWpIget85HKC29+77vfe723fvlQmCIECCBAkSJEiQsGwgX+oTkCBBggQJEiQsLiTlL0GCBAkSJCwzSMpfggQJEiRIWGaQlL8ECRIkSJCwzCApfwkSJEiQIGGZQVL+EiRIkCBBwjKDpPwlSJAgQYKEZQZJ+S9j9PX14amnnkJbWxtKSkqg1WpRVVWFjRs34qmnnsJLL7100Xe2bdsGmUyGvXv3FuUcvvrVr0Imk+GrX/3qFX/n8ccfh0wmw89+9rOinMONiuUi50Khrq4OMpkMw8PDi3K8vXv3QiaTYdu2bYtyPAkSrgeS8l+m+NWvfoU1a9bg+9//PrxeL+644w58+MMfRnt7O9xuN77//e/jz//8z5f6NCUUEZJyuj7IZDLIZLKlPg0JEooC5VKfgITFx9TUFB577DFkMhl88YtfxNe//nVotdoZn+ns7MSLL7540XefeeYZJJNJ1NTULNbpLlv8zd/8Df7bf/tvKC8vX+pTkXAF2LRpE86fPw+9Xr/UpyJBwmUhKf9liDfeeAPxeBwVFRX47ne/O+dnNmzYgA0bNlz0uqT0Fw/l5eWS4r+JoNfr0drautSnIUHCFUEK+y9DTE1NAQAcDsdVf3e+nL84Pz00NIQ//dM/hcvlgkajQWNjI/7n//yfyGQyV3WsY8eOoby8HAqFAn/7t38752eu5VjPP/88duzYAavVCo1Gg9raWnziE59Ab2/vnJ8X545fffVV3HXXXbBarTOugzgk/NJLL2HLli0wGo0oKSnBHXfcgV//+tdXJTswf85fzJPw+Xz47Gc/i+rqaqjValRXV+Nzn/scwuHwjO9s27YN27dvBwDs27ePz1cmk6Guru6iY7/99tv40Ic+hPLycqjVajidTjz88MM4dOjQnOcqlv+nP/0pbrvtNphMphk5d/G9s2/fPtx9992wWq3Q6/XYtGkTnn322XmvRS6Xwz/+4z/i9ttvh8lkglarRXNzMz7/+c/D7XZf2QX9/xgZGcG3vvUt3HXXXaipqYFGo4HZbMaWLVvwox/9CIVCYcbn6XrPlpX+kHyXS6t0d3fjiSeeQG1tLTQaDaxWK3bs2IFf/vKXc37+WtZZgoQrhiBh2eHZZ58VAAgKhULYvXv3VX1369atAgBhz549M15/7LHHBADCF77wBcFoNAq1tbXCv//3/17YuXOnoNPpBADCBz/4wYt+7ytf+YoAQPjKV74y4/VXX31V0Ov1gk6nE1566aWiHKtQKAgf//jHBQCCUqkU7rrrLuHRRx8VVqxYIQAQ9Hq98Jvf/Oai79XW1goAhKeeekoAIHR0dAgf/ehHha1btwrvvPOOIAiCAEAAIHz5y18WZDKZcMcddwiPPPKIsHbtWgGAIJPJhF/96ldXda1Jzp/+9KdzXrNPfOITQlVVlVBWViZ86EMfEu6//37BZDIJAISNGzcK2WyWv/M3f/M3wj333CMAEMrKyoTHHnuM/3zxi1+c8ftf/OIXBQCCXC4XNm3aJHzkIx8Rbr31VkEmkwkKhUL4yU9+ctG5kvxPPfWUIJfLhS1btggf/ehHhVtvvVUYHh4WBOH9e+fzn/+8IJfLhba2NuHRRx8VPvCBDwhyuVwAIPyn//SfLvrtdDot7Ny5UwAgaLVa4b777hMeeeQRobq6WgAg2O12obOzc951GxoamvH61772NQGAUF9fL+zYsUN49NFHha1btwpqtVoAIHzoQx8SCoUCf/7ll1/mtQAw49o99thjgs/nEwRBEPbs2SMAELZu3XrRubzxxhuCVqsVAAgtLS3Co48+Ktx1112CQqHgtZyNa1lnCRKuFJLyX4aIxWJCZWUlK6Vt27YJX/va14Q333xT8Hq9l/zu5ZQ/AOGv/uqvhFwux+91dXUJJSUlAgDh4MGDM743l/L/h3/4B0EulwsOh0M4dOjQRedwrcf64Q9/yMrixIkT/HqhUODzMJvNF10DUiIKhUJ49dVX57wudD5ms1k4fPjwnDKuWLFizu/Oh8spfwDC448/LqTTaX5vdHSU1/a5556b8b1LKSfCj3/8YwGA0NTUJJw6dWrGe/v27RMMBoOgVquF3t7eGe/R+RiNxjnXTBDev3cACH/913894729e/ey4fbb3/52xntf+tKXBABCY2PjDEWezWaFJ598khV5JpOZ8b35lP+RI0eErq6ui87P7XazsfbLX/7yovfp3OfDfNfX4/Gwsv76178+w7A4evSoYLFYBADCj3/84xnfu9Z1liDhSiAp/2WK7u5u4dZbb+XNRfznlltuEX74wx/OUKqEyyn/DRs2zNjcCJ/+9KcFAML//t//e8brYuWfz+eF//gf/yMryv7+/jnP/VqP1djYKAAQ/uEf/uGi7xQKBaG9vV0AIHzjG9+Y8R4pkbm8MwJdu7l+O51O8+Y/Ojo672/MxuWUf1VVlZBIJC763je/+c05z/dyyj+fzwsVFRUCAOHYsWNzfubb3/62AOCiaAHJP/uai0H3zrp16+Z8nyIOu3bt4tdSqZRQWloqABBee+21i76TSCSEsrIyAYDwi1/8YsZ78yn/S+Hf/u3fBADCRz7ykYveu1blT5GGDRs2zPm97373uwIAobm5ecbr17rOEiRcCaSc/zJFS0sLDh8+jPfeew9f/vKXcc899zAH4OTJk/iLv/gL3Hvvvchms1f1uw8++OCc5VArV64EgHnzs8lkEh/+8Ifxve99D1u2bMGhQ4fQ2NhYtGONj49jYGAAAPDYY49d9B2ZTIYnnngCALBnz545j/fv/t2/u+T5AMBDDz100WsajQYNDQ0XndP1YseOHXMyyy93refDiRMnMDExgcbGxjnJngA4n33w4ME537+Sa/Txj398ztdpXQ4cOIB8Pg/gAu8jHo/DarXOeW31ej0effRRAPOv21zIZDJ4/fXX8eUvfxmf/vSn8cQTT+Dxxx/Hj370IwBAT0/PFf/W5UC8kLnuOwB48sknAVzouzExMXHR+8VeZwkSAIntv+yxadMmbNq0CQAgCAJOnDiB73znO3j++eexe/du/P3f/z3+y3/5L1f8e/NVAxiNRgBAOp2e8/3/83/+D3K5HFavXo3du3dDo9EU9Vi0QdpsNn5/NsjYmG8znYsYdz3ndL0o9rEGBwcBAAMDA5etZ/f5fHO+fiXXqL6+/pKvp1IpBAIBOJ1OXov5vgNcft1m4/Dhw3jkkUcwOjo672ei0egV/daV4HIymM1mWK1WBINBjI+Po6KiYsb7i3lPSVg+kJS/BIZMJsP69evxL//yL0gmk3jttdfwyiuvXJXyl8uvLZj0wAMP4MCBAzhz5gy++c1v4itf+cqCHetaodPpLvuZxTynYh+LWO4ulwv33HPPJT9rt9vnfP1KrtGVQBCEovzObCSTSXzwgx/E1NQUnnjiCfzFX/wFmpqaYDQaoVAo0Nvbi5aWlgU7/rVgse9zCcsDkvKXMCfuvvtuvPbaa/D7/YtyvFtuuQXf+MY3sGvXLnz1q19FLBabtwfBtaCyshIAEAgEEI1G5/T+yfOlzy43VFdXA7gQHVnIlsJDQ0Nzvk4lc1qtFjabDcD7azHfd4CrW7d33nkHU1NTWL9+PX7yk59c9H5fX99lf+NqUVlZie7ubj7P2YhEIggGg/xZCRIWA5JJuQxxJV4NhUSrqqoW+nQYq1atwv79+1FXV4e//du/xac//emLaq6vFVVVVRwenkuxCYLAr1M9/B8a1Go1gAs183Nh48aNsNvtOHfuHM6ePbtg5/HP//zPc77+zDPPAAC2bNkCpfKCX9LR0YHS0lIEg0G89tprF30nlUrh+eefB3Bl60ZKdr5Q+nznBgAqlQrA/NdvPhBP4uc///mc75MR0tzcLCl/CYsGSfkvQ/zgBz/AY489NidpSxAE/OpXv8L//b//FwCYTLVYaGxsxP79+9HS0oIf/ehH+PjHP37Vm+18+M//+T8DAL72ta/h1KlT/LogCPj617+OkydPwmw245Of/GRRjnejgQy5vr4+TE9PX/S+SqXCV77yFQiCgIcffhgHDhy46DP5fB6///3vcfjw4Ws+j87OTnz729+e8dqBAwfw/e9/HwDwl3/5l/y6VqvFZz/7WQDAF7/4RYyMjPB709PT+MIXvgCPx4P6+vorIhsSSe7tt9/GuXPnZrz34x//GP/6r/8673fp+l2tYfTJT34SRqMRx48fx1//9V/PML5PnDiBr3/96wBwVek1CRKuF1LYfxlienoazzzzDJ555hk4HA6sW7cOdrsd4XAY586d4/Drxz72MWYiLyaqqqrwzjvv4O6778YvfvELJBIJPP/881dEArwU/vzP/xwHDx7Es88+i46ODmzduhVOpxPHjx9HT08PdDodnnvuuWvqfHgzoKamBh0dHTh27BjWrFmDjo4OaLVa2O12fPOb3wQAPPXUUxgdHcV3vvMd3HnnnVi1ahWampqg0+ng8Xhw8uRJhMNh/PCHP8TmzZuv6Tw+//nP47//9/+OZ555Bu3t7ZiYmMD+/ftRKBTwhS98Affff/+Mz/+v//W/cOzYMbz99ttYuXIltm/fDoPBgEOHDmF0dBQ2mw0vvPACRzYuhXXr1uGP//iP8eqrr2LdunXYtm0brFYrTp48iZ6eHvyP//E/8I1vfGPO7374wx/Gd7/7XezcuRN33XUXDAYDAOBb3/oWpynmQllZGX7xi1/gIx/5CP7qr/4Kzz77LNatWwev14t9+/Yhl8vhiSee+IM1OiXcmJA8/2WIJ598Eq+88go+97nPob6+HufOncMLL7yAPXv2QKFQ4KMf/Sh+85vf4Nlnn10yspHT6cTevXtx22234ZVXXsFDDz2EZDJ5Xb8pk8nwzDPP4LnnnsOWLVt4eFEymcTjjz+OEydO4L777iuSBDcmXnrpJfzJn/wJotEo/vVf/xVPP/00h80J3/72t/Huu+/iP/yH/4B4PI7f/va3ePPNNzExMYFt27bhn/7pn/DII49c8zk8/PDD+N3vfgeXy4Vf//rXOHLkCNavX4+f/exn+N73vnfR5zUaDX7729/iBz/4AdauXYv9+/fj5Zdfhkqlwuc+9zmcOnVq3tLEufDCCy/gO9/5DlpaWnDgwAG89dZbqKmpwb/927/hz/7sz+b93te+9jX81//6X2E2m/HKK6/g6aefxtNPP41YLHbZYz744IM4fvw4HnvsMcTjcbz44ovo7OzEnXfeieeff35O/oEECQsJmXAj0VolSJDwB4tt27Zh37592LNnjzRWWIKEJYbk+UuQIEGCBAnLDJLylyBBggQJEpYZJOUvQYIECRIkLDNIOX8JEiRIkCBhmUHy/CVIkCBBgoRlBkn5S5AgQYIECcsMkvKXIEGCBAkSlhkk5S9BggQJEiQsM1y2va9Go/mDbXd6Kfh8PknuZQRJ7uUFSe7lheUsdyaTmfO9yyp/h8OB8fHxop/UjY6qqipJ7mUESe7lBUnu5YXlLPd8kML+EiRIkCBBwjKDpPwlSJAgQYKEZQZJ+UuQIEGCBAnLDJLylyBBggQJEpYZLkv4uxEgCAIEQUA0GkUoFEI2m0UqlYLJZEJlZSWUSuWSzZ1fSJDcPp8PHo8HhUIBuVwOZrOZ5f5Dkp06TefzeQiCgEKhwK/J5XLIZDKWlf4vQYIECRKuHjeF8i8UCigUCujr68M777wDv9+PiYkJrF27Fp/85Cchk8n4zx8SSAEeP34cL7zwAjKZDBKJBNasWYNPfepTMBqNKCkp+YNR/sAFmXO5HPL5PKanp5HNZqFQKNjIUalUrPj/0NZbggQJEhYLN6zyFwQB2WwWhUIBqVQKmUwGw8PDGBkZQTgchsfjgd1uh8fjgclkgslkYgVxMyuFQqGAdDqNQqGAbDaLbDaLoaEheL1eZDIZpFIpeL1eTExMIJPJQKVSAbjgCd+sRgAZOblcjv/O5/NIJBJIJpMALlwXtVrN66zT6QDgpjcCKLoDALlcjv8v/kOQy+VQq9VQKBRLdbpFRaFQmPH/2fISxBGfm32txbiZZZFw8+OGVf65XA7BYBDxeBxdXV0YHx/HkSNHcPToUaRSKcTjcYRCIVitVtTV1WHnzp0wmUxQqVQ37eZIBs/Q0BAikQjGxsYQCoVw4MABnDx5coaCf+mll1BRUYGHH34YVqsVGo0GSqXyplOGhUIB+Xwe+Xwe4XAYuVwOwIVrMTU1BY/Hw8aO3W7Hxo0bYTAYUF5eDrVaDaVSeVOvN0U6CoUC4vE40uk0pqenkcvl2NiTyWRs8NTW1v7BKEIyduieJaOP3gfeV/xqtfqmlpsMXPo3ACgUiptSFgl/GLhhlD+F9incm8lkMDk5iWg0ioGBAUxMTGBychKRSATZbBaZTAaRSAQejwdKpRLhcBgKhQIGg+GmUgZiuSmsPz4+zgovGo3C7/cjnU5DoVBALpcjGo3C6/VCLpcjEAhALpfDbDYDuLCh3Azyi5U+rSd1o6JN3u12Y3x8nNM8iUQCdrsddrsder0epaWl0Gq1AG4eDgApfHGkI5vNYnp6GpFIBKlUCul0mnktgUAASqUSer0eBoMBTqcTAGZEuW42ufP5PAqFAqanpyEIAstBzz29B4ANH4r40DNwo8tNCl4sM8kLvC8XPatkBM3+DXqNDIUbWWYxxJErMuhm83XEkR6Sa/b/xf++mWSnv2/kNbthlH8qlUIoFILH48GePXvg9/tx9uxZBAIBeL1epFIpJJNJpFIpABduoFAohGPHjmF0dBSZTAYVFRV48MEH4XK5lliay4Nu/FQqhVgshrGxMbz22msIBoPo7e1FLBbjz3k8HmSz2Rme4vHjx2Gz2TA9PQ2Hw4Ft27ahsrISFosFpaWlSyzdpSEIApLJJAKBAEKhEM6dOwe/34+TJ08iHo+znMFgENFoFLFYDJFIBEqlEq+++irKyspw3333obq6Gh0dHXA6ndBoNGwI3KgQBIF5DPF4HKOjo0gmk+jv70c8HkcikcD09DQSiQTS6TR8Ph/6+/thMBhQW1sLq9WKcDgMp9OJmpoaGAwGqFQqqFSqG3aDIdLm9PQ0y0bPazgchiAIsFgsUKvVCAaDCAaD8Hq9cLvdrDRNJhPuu+8+2O122Gw2aLValvtGBRk42WwWyWQSwWAQJ0+eRD6fR0lJCTQaDaqqqlBSUsKfJQIvXS/gfWPebrdDq9XOSIHciKBnl6JXdB/L5XKUlpZCpVKhpKQESqUSuVwOuVyOFSQZCjKZjI0d8d9K5Q2jruYFOTR0HWQyGdRq9Q35fC7p1RRbh5lMBqFQCKOjozhz5gy8Xi9OnDiBWCyG6elp3kQEQeAHIp/Pcy7cbrcjlUohkUggn8/fsJ4gWYXk8WezWUQiEbjdbpw+fRpTU1MYHh5GJpOBVquFQqFgb5huqlwuh0gkglwuh9HRUUSjUbS2tsJgMKCkpGRO63mpIZabjJ5gMAiPx4Pe3l74/X6cPn0aoVCI1zmdTrMXnE6nIQgC3G43PB4PVqxYgWw2i4aGBuj1eigUijm9pxsBs739dDqNWCyG0dFRhEIh9Pb2IpVKIZvNMtchlUphZGQEfX19MBgMKBQKSCaTGBoaQjabZYUpl8t5U7zRZCeZyZNPJBIIh8MYHR1FIpFAMBhEoVCAy+VCSUkJJiYm4Pf74fV6MTw8jFwuh2w2C4fDgba2NgiCwBspec7AjSW3uGIll8shlUohGo3C5/Ohp6cHuVwOpaWl0Ol0UKlUMJlMrChVKhU0Gg3vCwBY6en1el7rG9UTJuVN602OjdfrhUwmY94OGW30fFM6k74nk8mg0WhmEHwBcJTkRpMbuNjIpXtfXKUkNtrE/I/55FnodV4S5U8XKZ/Pc073xIkTeO+99+D3+3HmzBmk02lEo1EIgoCKigoYDAb4fD6EQiGUlpbCbDYjl8shEAggEolAoVDA4XDg+PHjKBQKcDqdMJlMSyHenBBbxPl8Hj6fD4FAAH19fTh9+jTGx8fR2dnJEY58Ps8Phs1mQ3l5Oaanp5FOpwGAc6RjY2MIBoM4evQofD4f1q9fzzlSnU635A8KyZ3NZpHL5TA5OclRnRMnTiAUCuH8+fNIJBLw+/28OapUKqTTaSQSCZSWlsLpdCKdTsPr9SKdTuPMmTOYmpqC3++HzWbD/fffjzVr1sxQhksN8Wbo8/ng9/vh8XjQ39+PiYkJdHZ2IhaLIRQKIZ/PsycfCAQQj8eZ6FooFHDu3DkMDw8jFArBYrGgp6cHtbW1WL9+Perq6qBQKG4YT1hcsTE5OQmv14ve3l50dXUhHA5jaGiI+QwKhQKVlZXQ6XSYmpqCz+dDMplELBbje0elUsHr9cJsNqOpqQlOpxPbt2/HypUroVQqbxjPSvx8nz9/HufPn4fX62W5Tpw4genpaahUKqjValRXV0Ov1/MzT4qeDCbgwnOuUqnQ0tICl8uFW2+9FStWrIBareZIwFJDvN4TExPweDw4c+YM72fBYBA6nQ51dXUwGAyor6+HyWTC6OgopqamoFQqodFokMlkEAwGIZfLUVZWBr1ej/LycpjNZtTU1MDlckGlUrHcN4LswAX5vV4vvF4vTp48iXfffRdKpRIlJSUwGAxYs2YNzGYzy0/PttgooD2LUiRGoxEajQZqtRoajWZBznvJdknKgU1MTOD06dN477338MYbb3Dem7w4lUoFi8WCsrIyJJNJxONx6HQ62O12hMNhTExMQBAEjI+PIx6PY2BgAHa7HSUlJTeU8gfef0imp6dZCZw6dQpHjx5lchstPgC2glUqFRwOBzKZDJLJJHK5HJLJJAqFAkKhEPMEpqenUV5eDqfTCaPRyIz4pQRZwGS4uN1unDlzBqdPn8aBAweYt0GbnVwuZ/IieU8qlQpOpxPRaBSBQAC5XA7j4+MIBoOYmppCaWkp2tra0NbWtsTSXgwKXft8PvT19aGnpwfd3d0YGxvDoUOHOLwrl8s5tBsKhTjtA4ANQYVCgVgsxukNn8+HyspKHt4h9gqXGrR2U1NT6OrqwvHjx/H73/8esVgMU1NTM9Z7ZGQEer2eDfm54Ha7oVAosHLlSlRXV6OyshINDQ0AALVavWhyzQdxfjubzaK3txcHDhxAIBDA6Ogo/H4/hoeH+fmWyWTo6emBRqNBLBbjqpa5oFAosHr1ajYWysvLYTAYbqg0F+3nk5OTOHXqFA4ePIhXXnmF01x6vR5r1qyB1WqFz+eDyWTCwMAABgcHodVqodfrOSWkUCiwYsUKGI1G1NbWwmazQalUorS0lFMmN8p9DoBTlD09PdizZw9+/vOfQ6fTweFwwOFwIBAIoKysDFqtlh0BMnKIoE5rmc1mOUpSWlrKjtxCyLskyn96ehpnz57F6OgoDh06hMHBQfT19SGZTHJYHwCTgchLuPfee1FXVweNRoOSkhL09fXhpz/9KWKxGBKJBHK5HLq6upBIJGAwGOBwOCCXy5ecAEdKP5vNorOzE4ODgzh79izcbjfcbjeGhoZYmYuh0+mg0Whw6623Yt26dSx3IBBAV1cXh8Tz+TyCwSDC4TCqqqpQU1MDhUKBkpKSJWUUi5X+r3/9a/T09GBsbIw3RErZzC6B0ul0MJlM2LRpE1wuF6xWK1wuF/MDkskkwuEw0uk0gsEgJicn8d5778HlcqGyshI1NTVLmhcVh/9OnTqFsbExnDhxgpWA2+1GLBabYegBF5S3VqvFypUrUVpayp4geTuZTAYnTpxAPB7HmTNn4Ha7UVpaytGx2tpaToktxZpTWiObzc6o0BkaGsLw8DD8fv+c661QKKDRaFBZWYnq6moYjUY4HI4ZytTtdnMkqLu7G52dnSgrK0NVVRXq6+tnEAEXC+K0BpXl0rq8/fbbnLaMxWJcvkuQyWTsGep0Ou5nQXJoNBrmxlAlDBl+AwMDuO2227B9+/Yl5XyIo3rd3d2YmprCyZMnMTY2hrGxMWSzWSiVSlgsFqhUKvh8PsRiMc7/BwIB5PN5JnSKFTspvoGBAfT29nKKoKKiAg0NDRwtWKrnnO51t9uNcDiMffv2cbQHuKDEw+EwMpkM3nvvPZhMJrjdbpjNZl5Ler7pGafvAYDBYIBer0d7eztWrFgBs9kMu93O3ynGei+J8s/lcvj973+Pzs5OHDlyBMPDw/we3fj0OZlMxh7vnXfeiXvuuYc3jzNnzuDVV1/lsFkikcDhw4cxMjKC9vZ2rF69GgBuCOWfz+eRTCbx+uuv48iRI+jv74ff72eCyGzIZDJmtLe1tXEpo9VqhdfrhSAIfBNRGHxiYgKNjY0cZisUChxaWqrNgfgJzzzzDH77299e9jsymQxarRYmkwmbN2/GnXfeCaPRCJPJhGg0ipqaGiYH+nw+DA8PY2pqCseOHYNKpWLi41KGBcnoicViePnll3Hs2DF0d3fD4/HM+x2K8Oj1erS2tqK2tpY3OLVaDYvFgkAggIGBAQSDQfT19TE5KpFI4Pbbb0d5efmSlrqS0ROLxfDmm2/i5MmTOHnyJNxu97zfIblJ+ZvNZjidTlRXVwMAR7l6enqYNOf1enHs2DEolUrccccdHPlYbEUg5nEQKfX111/H+fPnceLEicvKbTAYYLPZmMujVquh1+uZFEdeIpFCg8EgBgcHAQBPPvkkOjo6UFpauqQRHzLO9u/fjzNnzsDv98Pv92NkZIRTHGazGYIgMJ8nlUqxY0KebUlJCYAL4W5xtPDUqVOYnJzkEtjVq1dzZHepIj7i8+vs7MTZs2dx9OhRnD59mlPVtO9RZFMmk+Gdd95hDtf09PSMlJVcLmdDSlwF8cADD2DLli1Yt24dDAYD9/m46ZQ/EWDC4TDGxsYwODiIVCp1UZ5LXCohCAJ78WazmS1iALBarVi3bh2sViuTpnK5HEKhENxuN/x+PwwGA4xG45KGifL5POLxOILBIHw+H9xuN1v0dPOTESCXy2EwGKDRaNDQ0ICysjI0NjbCbrdDp9PxBlFWVgalUolUKsU3TSaTgdvtxpEjR7B+/XqUlZXxxrrYHhEpAr/fj6mpKeRyOY5kUH0+kZtyuRznrHU6HdatWwe73Y62tjaUlZVBo9FwCqO5uRlOp5N/m4iP5B263W7Oky92Lpg2hVQqhe7ubkxOTmJ4eBhut5urVMi7E8tNVv6tt94Kl8uF5uZm1NXV8eZA0Sun04lt27bB4/Ggs7MTU1NTCAaD6O/vh8vlQiwWQ0lJyaIbAGTcZjIZjI2Nwev1YmRkhKsZAHDukoxgMvK0Wi3Wrl2LiooKOJ1O2O12WK1WVFRUsEedSCS4nDeZTEIul7NCLCsrw6ZNm6DX6xddCZLcqVQKw8PD8Hg8GBsbw8jICAqFAj/HOp0O09PTiMfjUCgU0Ol0fJ87HA6uVJm93uT5ZzIZlJSUoLe3F5FIBPF4HLFYDB6Ph3t8LJaxK1Z89MwFAgGcO3cO/f39CIVC3K+Cnr9EIoFCocDE5ZKSElgsFmg0GqhUKo7gkJNSKBTYqbHZbIjH4wgEAujp6UE+n4fdbofD4UBLS8uidnelZzaTyTBZ9/Tp03C73YhGo2zMEOjf5ISZTCbo9XqO2JJzR8ZPLpfDxMQE97HJZrOYnJzE4OAgCoUCp38bGxuLYvgsqvKfnp7GwMAAxsbGcOTIEZw4cYKJaZlMhkMexJwkZeh0OlFZWQmHwzEjj11eXo4dO3ZgdHSUQzAUAejt7cXp06fR2NgIg8GwpMqfctSjo6M4e/YshoaGWEFqtVoYjUZmQ2u1WjQ1NcFqtWL9+vUoLy/H+vXr4XK5uOTFZDJhxYoV8Pl88Hq9iEajSCQSiMViePfdd9HT04NAIICGhga2FhcT4khHV1cXM7ctFgucTifMZjP/Ifa7QqGAxWKBXq/H7bffjsrKStTV1cFqtQJ4PyJQUlKCbDaL8vJyDv0DF5jDnZ2dcLlcCAaDMBgMi94AiDZFn8+Hf/zHf0RfXx+6uroQiUTYsCXjjcr91Go1amtrUV5ejgceeABtbW1wuVwwm828qYl7ITidToTDYfzsZz/D3r17MTk5ib6+Puh0Omzfvp17ICym3HR+wWAQr7/+OkZHR7F//36MjY2xoU6kTdrU5XI5R7K2bt2KdevWsdzECCejNpvNorGxEbFYDDKZDEajEd3d3Th16hTUajW2bt3KxvFiev9knAQCAbz55psYHR3FO++8g6mpKdjtdrhcLrhcLlRUVCASiWBgYABKpRJ1dXWwWCy466670NjYCIfDAaPRyMqDlAxwwVjM5XIwGAywWCzo6upCX18fxsfHceDAAbS2tsLhcCwKyZXWkqI7ExMT+MlPfgK32413330Xfr+f9zVKUZKRTq+r1WpYrVZUVVVBq9VCo9GgpqYGq1atYiJkJpPB4OAgotEopqamkEwmMT4+jlOnTqGmpgZutxurVq1CdXX1RRUQCwmKQo2Pj+Pv//7v4Xa7MTk5OcPApeeOnDraExQKBWpqalBRUYFUKoVUKsWlq3q9Hna7HZlMBp2dnfB4PBgeHkY6ncbAwADC4TAsFguOHDmC9vZ2/OVf/mVRUj2LovzFpT59fX0YHBxkhvPs/B/wvndktVpRWlqK1tZW1NfX84bIJ///80nhcJhvfjqWuFPcUoHy3eFwGN3d3RgeHp6hCID3z1fcCIM2xaqqKjQ1NcFgMMxobCKXy6HT6WA0GmE2mzE9Pc1GER0vkUgwH2AxrwF5/FTH39PTw3kxquulZi5ib1ij0bBB4HQ6YbVaudRRXLo4u2RG3ByJSj1TqRTUavWiyS0uVyUSqsfjgcfj4SgEwWAwoLq6mptUlZaWor29HQ6Hg40dcSh3dmtbk8kEuVwOh8OByspKjI6OIhwOz2h45XK5FqUpDF3fdDqNyclJTE5OYmRkBB6Ph7098nYolJ9MJmG1WqFUKlFRUQGz2YyGhgZWgDqdjp9/+n3KEctkMpSXlyMej8Pv92NsbAyxWIyNS7vdDmDhiY/iHh3j4+PcjGpychKZTAYA2Juz2WxwOp0c1qb1MRqNqKqqgt1uh9FoZAdFXO9Osk9PT6OiogLV1dWYmJhAf38/wuEwBgcHoVarsX79eq4nX0jjh8ir8Xgcg4ODGBoawsTEBNxuN3MYyHEjQ5+MXHpO1Wo1mpubUVVVBb1eD51Oh4qKCpSXl3M+O5fLQS6XMxHSZDLh3LlzSKVSXEFisVjYqKAqmYVac4pKJ5NJjIyMYGhoCJOTk+ylC4LAfAWNRgODwQAAbLgnEgmoVCq0trZyRIsIfbTP2Ww25sNEIhHU1NRgamqKIyzZbBbj4+NwOp0IhULsDF2P0bcoyp+6tw0MDODv/u7vWPkD73v5BLLkrFYr7rrrLlRWVuJjH/sYqqqqLvJgqUwon89z6DydTiOTyUChUHBIZbEhblJz+PBh9Pb24tlnn8XU1BTLTaAQENUtazQaVFdXo7q6Gjt37kRVVdWMyX1EiLTZbNDpdFi/fj28Xi+H3UgBUmtkrVa76EowHA7jyJEj6O7uxk9/+lNMTU0hHo+jUCiwJ+P1epHP52GxWLhWf/Xq1XC5XGhqauJWzZQLE0eDKBQcCoWYQEdyUydIQRA4arDQIKNmdHQUL7/8MoaHh3H06FGEQiE2QgmrVq3Crl27OAxss9lw9913w2w2w+Fw8AY+uyZYoVBwusRkMuHWW2+FRqPBwYMH4fF4MDg4iN/85jdobW1FVVUV3/sLqQRJGfT39+Pv/u7v4PF4cOrUKW7GJZfLsWHDBvZOKysroVarUVpaCo1GA6vVyi2LqY5dfL5E+KX9wGAwYOvWrVizZg2USiVH0/75n/+Z7xmbzQaTybSgzz2t9/nz5/G9730PExMTOHv2LPei0Ol0sFqtKC8vR3NzMzZt2sQEMOrGaTQa0d7eDrPZzGF+sfIX533z+Txuu+021NXVIZlM4tixYxgaGsLPf/5zbNiwAatWrUJ5eTkqKioWtAKAjPrOzk584xvfwNTUFCYnJyGXy9HU1ASj0cglvW1tbVi1ahU7A1Tpo1Kp8LGPfQz19fXcrGl2rwpBELBy5UoUCgV84AMfQCqVwptvvomXXnoJXq8Xhw8fxsTEBFwuF+rr65kPNTvsXixQn5HTp0/jBz/4ASYmJtDV1YXp6Wk4nU6UlpaioaEBFosFVqsVDoeDnbBMJgOv1wuNRoMnn3wSjY2N3ONBnLKg+/7BBx/kKq54PI69e/finXfe4f43yWQSb7zxBpqbm7F582Y2NK4FC6r8xWzQQCDALFCv1zvjM2IDQK1Ww263M3u5uroaVquVLWcxyNql8Aq1S6X3lmrYjbhJzejoKHsGsxU/kZ1IhtLSUrhcLpSXl8PlcnHOUHxzUAkkGTtarZbDneJ6W/ozu4JgoWWmnPfQ0BDGx8fh8XhYbnFzFpLLYrHAbDbDarWirKwMFouFHw7aEGcbL2KPmNZd3A9f3CN+MUBeTSgUwvDwMLcipnCfXC6HXq/nrm41NTXsEVD+ktIz89Xq031M185iscBms3E3x1QqBZ/PB7vdvmgRH7ruRED0eDwIBoPI5/MoLS2FXq9HdXU1ampqWPnrdDqObJCBQlwW8fnOjvaQkjCZTHzfEDlsZGQEBoMB0WgUOp3uujbES0Ec9k4kEvB4POjp6WHuiSAIrAxqa2vhdDr5Wc7n86zoyeMlct/s6A4dS2wAlJSUwGq1wmq1wmQyzah0mZqagl6vX7DOpuL9LBAIYHBwEP39/YhGo0in09BqtSgvL4fD4eA9p7a2Fk1NTaz8idtFZbvUrnn2PBKx/AA4ytnQ0ICGhgbI5XIMDg4ikUhgcnISSqWSqyCKzf6nc8hkMvD7/RgaGmLiaTKZhFKphMPh4HWmKE9lZSVSqRS3ZSeuE6U8L/Wc03GJC9TY2Ij+/n7miEWjUYyMjEAmk2HNmjVcFn0tci+o8qdyp/7+fjz99NOYnJxEKpWCSqXioR60UdOFbmlpwSc+8QlUV1dj/fr1XNs5H0g5RKNRRCIRqFQqbhKzlAx38m6PHz+O8fFxDgcSSOG3tLSgvb0dZWVlWL9+PWw2G5qbm7mX++xFpbw/zTBIp9Pw+/38IIotyYWyhGdD3N0ql8thYGAAzzzzDLxe74x6dZlMhrq6OjQ3N6O+vh5tbW0wGo1wOp3Q6XTcxGM2o5U2R1J8RH5Tq9Uwm82cWwTAZXGLRfYTBAE+nw/nzp3DoUOH8NZbbyEWi3EDm4qKCphMJuzatQttbW1obW1FQ0PDDKVGZLVLVWWI6+KJJLVixQqcPn0aABAMBnHkyBEAwNTUFAAsKOeBGpucPXsWBw4c4FJdUvx/+qd/ioaGBtx6660ctdNqtTNytHRu4meVjFsyZsXd3QRB4FTfunXrMDIygrGxMRw/fhxyuRynT59mvsxCeMBkUJ87dw6/+93vcP78eQwNDSGdTrNy/9SnPoVVq1ahoaEBVqsVer2eu26K15AM+PmqccT3vpgstnXrVsTjcfT29mLPnj0IBALYvXs36urquA9AMUHprFwuh3379uG5557D8PAwAoEAgAtpLJfLhT/5kz/B6tWr2ZATt9sm2TOZDMsi7tQolne2/CqVCkqlEps3b8aKFStw+PBhxONxZDIZnDx5EiMjI9BoNKirq8Ott96KsrKyoslOhsz+/fvx9NNPY3x8HP39/QDA+9ZnPvMZrFq1iteazpeilOI5FeKunJeCTCZjg+G2225DW1sb9u7di7NnzyKTyeDFF19EeXk5TCYTVq5ciYaGhmvqabOgyp+8smAwiNOnT8Pr9TJrkRQ+eef0EFRWVmLt2rUoKyvjnu3zQXzDUPkEhUZp81hMiKsUiAXs8XiY7U7nTGV8Wq0W1dXVaGhoQH19PdauXYvS0lLY7fZLGi8UJlSpVHyNKeohvp6LzQCmG93r9aKnpwfxeHyGIaJSqWC32+F0OtHU1IRVq1ZxWR+VN82lrGYbAdQUg7xpsZFDymUxjB7yiBKJBAYGBuB2uzE1NcU122q1GjabDWVlZWhtbUVHRwfsdjssFgtHQK6mFFPsIep0Oq5+AS7c/4FAAH6/nxniC3X/k9yxWAx9fX2YmJhALBZDLpdjj76pqQm33HILN2mh+1J8f4rlmi2nOAQOzCzjIz6Q1WqF3+/nSppAIACLxbIg0S56pqkr6ZkzZzAwMMD5bKVSCYPBgJUrV6K9vR0ul4tTGWL+wuzfvBRIfooQEvm5rq4O0WgUMpmMo4sKhYIJ08UGpdr6+/uxb98+pNNp5HI5Jq+Sw1JbW8uh/NnpG3EU42pKj+naUevyYDAIl8uFQCAAj8eDcDjM6c41a9YUVW5xWmv37t2ceyfiscvlQltbG5qbmy/pcIij0VcjNzl5JSUlaGpqgsVigdfr5UqDvr4+bvp0wyl/2qyok18qlYLZbEahUIDX68X09DR27dqF1tZWDpfU1taitbWVwxmX+32yMElpiCdoLXb4l0AbRSgUQk9PD5PdlEolpzF27NjBXbtcLhfnCcWhsPl+WxwSpeE+FOoUk4UWq/EJKf90Oo1QKIRIJMJrYjKZUFpaylGNbdu2obm5mWv3xY1NLqWwxYqfjKd8Pg+FQjGD4Dk7/bOQIIOHSu9isRjWrl0Ls9mMVatWwWg0orm5GRaLBc3NzWzUzZfvI4iVwlykPyL7kPFDr9N9M1/viGLKTY2l+vv7EYlEWBnv3LkT5eXl2Llz54x0xqW8PDHme48UIV0Huo4UPqV0EzUKKzYEQUA8HkckEuG0pUaj4SqGBx54AFVVVdiwYQMcDgff//OlrS71/7lAz4jT6cTGjRu5pXkikcDJkyfZ4CsmyHOdmpri4WptbW2IxWIIBoOoqanBvffeyxwlMsTnkud69iDx/W+xWLB27VqMjY1hYGAAfr8f77zzDpxOJ+666y7U19df83HEKBQKbFCOjo7OaNLkcrnwxBNPoLGxETU1NUxSvdS9K/77akD7d0VFBT7ykY9gcHAQL774IhKJBN544w2cPn2aWx9fLRZc+ctkMm54QDWaALi7U2trK+6++27U1taisrJyRuemK/HeZvf2prD7YiqB2RCzYr1eLxKJBIALof6KigpUVVVh165dWLduHYf/Z5NeLgfyCAwGAw8Botfpz2KF/cU5QWLoklwulwsOhwMbNmxAQ0MDNmzYAKfTOWN9r/ShEG8C1A2P7i9x1IWU8kJHfsjQmpqaQk9PDwwGAxoaGuByuXDPPffAYrGgoqKCc7xXk4qg9SWICZ8AOD0ivmfomi4014OuL42WjkajKCkpQUNDA3bs2MFGPG2K892Dl1OKs1+bndIS9zOg8PRCRjzi8Th8Ph+i0SgAcASqpqYGd999NyoqKuByuTi9MXvNrkXpE0hOo9HIPRHkcvkMIh2Fl4sBeqbz+TzPIMlms6isrEQymeQ+JGTkXYpkeT1yi79D+f/6+npeayLGjY6Oztsa+mpBsodCIQwNDfHMEQI5MVVVVVdELr1ew4dSJZs3b4ZOp8Prr7+OcDiMY8eO4ezZs/jUpz51Tb+9oMqfHtC2tjZ86UtfwvT0NLRaLZdrZLNZ7Nq1Cy0tLTzIYK6w4HygqX7j4+PQaDQ8IYu8nsUK/xLIyifFtHr1ajz11FPczYlyU1arlVmfs9uxXk34V7y50P+pgYh4VOhCg9aLypXuuOMOLnEzGo0oKSnBLbfcwixn8fpejTKkz4ujADabDfX19RgeHp6RZlisroYymQwtLS14+OGHYTAYUFZWBpPJhOrqami1Wi5DutquXJf7LCkeUv56vZ6VD5XMLbT89fX1uP/++5FKpRCJROB0OtHa2spEvivJbV4taG1LSkpQVlYGr9fLoVG69xdKbuJnEKvbbDajpaWF2xKbzeY51/l6N38x5PILHVD1ej30ej3n0hcC9MxROk+n0/GQsTVr1qCyspJJp1dryF8LKNJbVVUFv9/P+7vBYChahQdFkDKZDA4ePIj33nsPZ86cAQBUV1fjtttuw4oVK1BTUwOLxbJoQ8TE606pldmVRFeLBVf+CoUCdXV1+OxnP8t58Fwuh2g0iunpadTU1PDNc7WKmpoGjY6Ocp14MBjkiUmL3eQFwIzjNjU14TOf+QzftGQUkJd/LSMq6bM00pisVPKGTCYTD/ZZLA+Y5KAwtMlkQmNj4wzyFnWeuxYeglhGYCYBkEoDqemIODWw0KDzqKurw4c//GEmaYplFI8kvRyuZp2I2Ehhb71ej8bGRpSXl7PBtdCGb1VVFZxOJwCw0UuVJ2LeyewoxmzM5xnPBfpMSUkJ10KbTCYmxC3EpDtxtIVy3NSNsLm5GWazmcsYxez1y8l9LaAOgdQbQDz+dyFA3j916KSmRStXroTBYIDVar1i47YY10KtVjO7npS/yWRCWVlZ0ZR/OBxGMBjE7373O7z44ovsQLW2tuKRRx5BTU0NysvLF2za3lwgx46MXOqOej1YFLNFrBBlMhnXe1N9/rV6aZlMBsPDwxgfH0ckEuGQH41TpFr4xYZYQRuNRv43KSZxKPR6HohCoYDh4WH09vbypDQqdwMwo550MSD2yMVsazJMrlXx02+IX6PyOor8UMiPZp8vRktjkocqTObruz2fUr+WzZIMIWqTLZ4GR73+xUpoISCOPBGzmV4XG5viCMylFOFcefHLIZvNzqhymd1MqVigNBLdb9RBNJPJ8LAZKnmkJjULDVLGREBbCOOemtokEgmcP38eZ86cYTmVSiXa2tpm7GWLsceIqwaop4JMJuPyzmI4evl8HuPj4xgeHobP54MgCJxic7lcqKurYwLrYkL8zFF0m/bVaz2XRVH+4hOk3vziLkjXeuMkEgns27ePWyBms1kOj1BufSlK/kgm6mNPr4nfL8YxMpkMXn31Vbz33nsYGRnhyAopQiLbLabyp7WeXcd6vR7/7O+mUinEYjF0d3dj//79nOoh4uRitHQWs9fn6scwWw4xLnVu870nzsPSiGAquaJNURAE5hcs1AYlVuh0bFK85IWSVyY2+i+Fy70v5nQIgsAsb7fbjWAwyATQYhsA4hLWcDjMjW1oJGswGIRKpZrR4vVKZboWkCGSTCYRiUR4/HmxkcvluIPfa6+9ht27d3Ma96GHHsK2bdtmTKRb6GeN1j2bzSISiSAcDjPhlyqIiuH5Z7NZvPnmmzh9+jR6enogk8lgNpths9nQ0tKClStXLngnxbkgjh5T0yEySm5o5Q8UV/kRmzmZTHLNJ3k8FouFBwFdS661mLgew+ZSoFAfDYAIh8NM9hErv2JOgLoSXM7DuxoFON/3xWWUNECEZKcI02LJLT4fssSpxFQ8tx3AjDD49ZwXHZPaJ9NsA5VKxWNRF4roSV49dXGjEjDyCIH3U30LZXSS90fd5lKpFA9OobBosY5Lx6Je7B6PB/39/ZiamkI0Gp0xdnk2d2ehIB6qMzvKUuzjTE5OoqenBxMTE0ziJWOX2tmK7+vFwuwqrqvhiV0ORPSjEdKCIHDTJqrGWqqpmeJIWjFkXZKRvteLVCoFr9eLgYEBxGIxJpZptVrce++9aGpqwoYNGxZ9qttCQqw4qc/z5OQkVxPQ5kvtRDs6OphsthSkx9mKfrYnLL6BZ/89+/cIZPRkMhlu8OL3+wG83/ugtLR0RmfEhQIpQWopHYvFOA+fzWZ5dgVZ6zTFjLyn+eS91PHE+dehoSF0dXVhdHQUAGC327FmzZoZnkkx5afIQi6XQ19fH8bGxphzEg6H4fV6odVq0dDQALPZjJUrV3L3wWJFuqiSh3qH+Hw+BINBjiTSsJdLdU+7UtCxqJnM4OAg3nrrLXR2dvJntmzZAqPRyGWr4h4jCwXqNkdyU7St2GmeTCaDX/ziF9i7dy936CSyY1VVFaqrq2cMslksUCRmoTpY5nI5DA8P49y5cxxJW7duHXbs2IGOjo5F9/gJtM7ULIsi6NdzDW4q5S+uJ/f5fDxIg5pslJSUwOl0orGxcVEIT/Od45X+/0qU31y/T2OLp6ameLAE5V5tNhtcLhdbqYvV4W/2/2ePZybMtlovFS2Y6z3yfILBIE/8AsB172LPd6GUv9gDzmQyPMudKllowl0+n2ejNJfLFcUgIQMgEokgFArxiFtqDmW1Whdkzem+S6fT8Hg86OvrY2+Xat9LSkpgt9vnnCdRrLWgsHc6nUYqlZqR6iMyVLE8QLrWHo8H4+PjGBwcxOjoKAwGA0pLSyEIAjd3WSzPnybLkWIiL1zMuC8GqL5/cnISwIWITklJyYyyvmIYWddyXtlsdka0j7rqFevaJ5NJHjstk8l4GNdSTocVlzAD7++j12N83VTKPx6PIxqN4vDhw3j22We5rzUAVFRUwG63o729HWvWrIHFYln086Oco7ivPtWgT09Ps+IiZU3MXfIIL3UDy2Qynobndrvx/PPP88AcajWpVquxYcMG7Ny5E06nc1FCkIVCgSfp0XkmEgmeuEUbIslnt9u5t7u4uc+l5BYfb3p6GpFIBHv27EF3dzdGRkYAAHfccQfuuOMOfOADH1hQshvJm06nsXfvXpw+fZrHUdtsNjQ1NSGVSmFkZARKpRJ33XUX9Ho99z24Hu5DPp9HLBbjznonTpxAOp1GWVkZVq5cia1bt3Lv8GIpIjo2ten2eDx46aWXcPToUdTW1qKsrIw3JursR5MJxWHhYoHC/R6PB0NDQ1x73tDQgLKyMpjN5qKS7qanp3H27FmcPXsWk5OTyOfzKCsrw+rVq7FixQq4XC6etLgYiiEej6Ovr48JvjabDffddx/q6+uLOtNALpdzVQPtTzt27MDtt9+O9vb2RSntE4Puf5oMOzExwV0Gq6urUVdXd8k28FcLmUw2Yx4H3ddLhUwmg6GhIfT19XE3TYfDcV2trBd8sM/1YDbDmzrInT9/Hrt37+ae2hQGMRgM3D51KWbYk+IXj66lzlDiEDGdM53jlRJmyKsMhUI4d+4cJiYmMD09zRPSDAYDKisr4XK5FpzoJ7ZEE4kEtxslElRvby8EQWBFpNPp2DuncjBq3Xml50lKKJVKYWxsDGfPnkU8Hue20Bs3bkR5efmCe/2pVArRaBQnT57EgQMH+FrU1tZCJpMhm81ibGwMWq0W09PTfF2ux0oXR73i8ThCoRC8Xi8PyqFpYnq9vugeqNgD7uvrw5kzZ9DV1cVRBwpFUnWLwWBYkNyouMqDuC4038FgMFx1I6UrQaFQQCAQwMjICJLJJAqFAnOKaBb7YhHAaA8MBAJM9NPr9WhoaEBTU1PR9zxxPwFqZtTe3r4ojsVsiAl/wWAQ0WiUnQuj0QibzVY0o4/2D0rjGQwGjuAtleefzWZ5KizxPbRa7XUZuwui/Cknk06nuSwmGo3OGGxBSmF2VzLxRSfSHnBh8T0eDw8RIU8auBDyXbt2LWpra3ks6mLlomhDonaQsViMCTmRSAQDAwMoFAocgqfhD9SMhULIs3PiwPvGD3UqJKXa1dXFE9TS6TQUCgXWr1+PhoYGrFy5kpuNLKTMpOAikQh7RTTQY2JiAufPn4dMdmHGt8FgwJ133gmTyQSNRsNEzCtV0mRMJRIJ9Pb2cr4/Ho/DarVCrVajtbUVra2tMJvNC0ayzOVySKVS2L17N/r6+nD48GH09fXxZ8xmMxs309PTPGmN2vpeLcQGFj1P586dw+DgICujkpKSGXXuxVZ+ZHAlk0mcOnUK3d3d3N6WIk4mkwnl5eWorKzkBkPFUob0LJBRferUKRw5cgTHjh1DMBhEbW0tbrnlFjQ2NnIPjWJFPOiY+XwecrkcWq2WpxUSy32uyXTFhphUOjw8jHfffZcjXjQTo9hhf0G4MCwtEAggEolAqVQyw/xay3av51zEUc/Dhw8jFArBYrHwSPCWlpailXXTnpxMJpHL5XhQ12LW9RNIt/h8Puzfvx/j4+M8IZGaS90wnj/lBslC83g88Pl86O/vn9Fz3mw2s/erUqm4ZpZG21JIQ6VS8QZAPdQHBgZmtHTVaDRobGxkxbfYXj89mD6fD263m6cZDg8P4+TJk5w3olpRIqXpdLqLiCtkCFA+TyaTcbjb7/dzWR/VOCeTSR6TSXO9iz3ZSww6t1wuh+7ubvT39+PUqVMYHR2F0WiEXq/H4OAgjh49CrlcDrvdDpfLhY6ODg59kyF0JZsVbXxkTHV1dWFsbAxTU1NIJBKorKxEZWUlmpqa4HK5FizvSjJHo1Hs3r0bR48eZeOHEA6HodPpWBkYDAaeOngt1rk4mkRjZE+dOoW+vj6MjIzwFEfqcCeeG1BMuam6oq+vDydPnkQ4HAYAbvJCA04sFgufR7Hzz3QNDh06hNdeew2jo6NMsqyrq0NtbW3Rq3vEaTsAPMiGohzi1FUxKjnmwmzjh5Q/9fJQqVQ8HrjY606TUoELjohY+S8WSH4y/kdHR5l02draiurqak4/FVM5k4NDhi8NjruaSGUxQPeg3+/HwYMHEQgEkMlkoNFo4HA4YLfbr1nuoil/mjWcSqUwPDyMyclJVvyRSIRzVACYoCRW0vSAk0VtNpuRyWSg1+sRi8WQSCRw5MgRhMNhDvkYDAa0tLSgvr4eGzduRHV19aI19RGXHNFksQMHDqCnp4e9tWAwiMHBQd6gacpXNptFOBxmT02pVDKZCgCHEekh83g8PDv8/PnzCIVCUKlUsNls2LBhA+x2OzZv3oz29vYF5zqIPcHjx4+jt7cXAwMDCAQCKC8v5/nsFRUVUCqVKC8vR3l5OTPwxddOHPEQ/5s2cDKiwuEwJiYm4PV6meGv0+lgt9vR0dGBxsZG1NbWXlOXyKuRm9abPCKqaXc6nTxilyJPNNddqVTy/TA7jXU5kLeTyWQQDAYRDAZ5TDKRWzs6OtDe3o5Vq1ZdttTyWkAyU/8Iqq7Q6/XcrplGaF9Lx0pgblKsmG1PxkcqlUIwGOTNT0xuNRqNvEbF8P7J6CGOidfr5WoHCrfSVMWFqHMnWUh+t9uNQCCAnp4eVsIGgwEVFRXcUnqhUw9URruQnBqCOMxP7eBHRkbQ19fHeoJ0hTiVWAwQeZP0FT0DizUnhtI72WwWk5OTGBwcxMGDBznkT07FmjVr0Nraek0T/YAiKv9cLofe3l6MjY3hlVdewdGjR7kTE+XoxOFsYmWTkOI5yEQeOnjwIDQaDU6cOIFgMMh5tmAwCABwOBy455570NDQgI6ODhiNxkUPRaXTaZw9exaDg4P4l3/5Fxw6dGjGZ2gzVqvVKCkpgVqtRjqdhtVq5fBwOp3G1NQUDh48CJlMhsrKSmi1Ws4x7dmzB4cOHeJcM3Wxczqd+KM/+iM0NDTg9ttvh81mWxSSHxkvu3fvxsGDB2ds3uT90bheh8MBi8XCLHDK84tbE5NyJGVKeetwOIxwOIyenh7s27cP8Xgcfr8f6XSa83zbt2/Hxo0b4XQ6F7y6gSIQbrcbHo+H5aa59XV1daisrGTmOXni8xk6Yuau+G/6HA0uCYfDOHXqFHw+H8bGxuD1emEymWA0GnHbbbfhgQce4A1AfM8VA8TpCIfDbNRTaZvFYplBshN7wVdzHuLrQsek3P7k5CTS6TQb/TRdjngOFRUVqKiogNls5vuqGJ4p/RaNzB0fH+f1pDa3dE9fa9vqyx07n88jHA4jHo/jzTffRFdXF44fP45EIgGbzYba2lrU1NSgqqoKNpttwT1y4nYsRhdDej4SiQTi8Tg6Ozuxb98+9Pf3I5fLsb6gSDHtGcUARSipokKc/in28zUXBOFCrwG/34+XX34Zv/zlL7nBlEajgcvlQmNjI3bt2oXm5uZrJnpe9yrSpp1MJjE4OMhjFslSJ8tVJpNx728A7NXRcAJx4wbyjj0eD5RKJT/QFAKfHSankJBKpbqiUcDFAG0MkUgE58+fx/DwMKamppiBqlar+cahUiS1Ws2duSYnJ7nNsUqlgs/nw9DQEAAgGo3OCOGOj48jEAgwl0Ame3+kK93wFKJa6JIjavsZj8eRSqVYPormmM1mVpLizSIWi0Gr1TJRhRQ+5dGphKlQKPDAo3A4jFAohJGREQSDQW4oI5PJUFpaysZTPB7n/1Mt7EI0PaF0Ft2D4t4C4g2I7l/KG2o0GkxPT3MDILFiEc9fIMVJbVuj0SimpqZY8cbjcSiVSo6KkUcSCoV4wAkNkCqm/OK2tXRMhULBXAx6vsljAd5v9Uv/JtDzKv6/OLRL15jusf7+fl53qqqQyWTcape8ccqNXi/JWAwiatJ50nUQ85SoydBsxTNXOet8EEc6aE+MRCJIp9MYGxtDIBDAwMDAjDQTcaOIZ7MY7axpvK24gdr1Rj3EynU2MTqXy2F0dBRerxdnzpzB+Pg4fD4fO4/19fWoqKiYUVlyvZDL5TCbzbBYLAiFQkilUpicnERnZyfa2tpgNptZz1zv9aY9hZ4pmgSbzWZ5Zs3Q0BBCoRCmp6e5Zf0HPvABVFVVsfG5ZB3+stksJiYmMDU1hddeew1nzpxBNBrlFoSJRAIajYaHjTQ0NKBQKOD06dOIRCJcTkF1uwqFAhqNBvl8HgMDA5DL5XA4HHA4HDM8RplMxqVPXq8Xvb29sFgsaGxsLGrJy3yYnp7G+Pg4xsbG8MILL6Crq4vL3SwWC8rLy7n5i1Kp5I15amqKuQEajQaTk5Nwu92sMAFcNNqXNj162DQaDcrKymC1WrnToc/nQ6FQYNKZeOMqttxutxtDQ0M8qpi4Gw0NDWhtbeUQsUwm4zKhwcFBTExMwOfzcWOmQqGAWCwGj8eDZDKJiYkJzq1RRQTNJyCDitJFNpsNGo0GgUAA586dYwVRrOles1EoFLjUlNaJyjTtdjvKysr4nIghDoA5GXR+JDeRQcmrpT4AcrkcY2NjmJyc5HSZWq3mVs0NDQ1wOp2IxWLc3/6dd95BXV0dM9FdLlfR1p6IbkRyI5mo6oBIh5SmCQaDM+53MTlMJpNx6F7cipeUXyAQgM/nw8TEBEZGRhCJRNDb2wuVSoWVK1dCp9MhHA5DoVCwZ1RbW8tdPqnvfDEMgNnNcyh6R56+UqlENBrF+Pg4DAYDD5qhz87uxDY70iNWHGS8hMNhjI2NYWJiAm+//TZCoRDOnj2LcDiMSCSCVCo1o5MiEaMtFguMRuOCk3wPHTqETCaDtWvX4t577+Xqqmt1tmjdqYKByMORSISN3aNHj2JkZITTX7QOlZWV2L59OxobG1FRUXHNvJrZUCgUqK+vRywWw9GjR5FKpfDrX/8ae/fuxYMPPojPfvazsNvtqKysvO4+B9lsFvF4HPF4HCMjI/D5fNi3bx8bOxTlSqfTMJlMqKurw4YNG/ClL30Jdrudm7hdqxFy3VeL8jKpVAqBQIDruwVBgNFohN1uh06ng8lkgtlsRk1NDW9+Xq+XQ3zi0CflScmLIIt+dvg0l8shHo9DrVZjcHCQQ8zUW34hiX/isjOq+wYuKG4a2ZvJZJBIJKBUKmEymSCTyZBMJpHNZrk8zufzcR51NsjLEMtN14GiKhSS9fl8Mxp/UEhsIXKR5J2REUYGCeWBxaFQ+ptqdIPBIAKBAOdTo9EoRkZGkEqlOKdF0SLK5RGjnTrm0XmQwshmsygpKWEOxUIx/oH3lYI4xE2e4OzZ8pR/JqY0yUV8EJ/Ph1QqBb/fP2PI1djYGIaHh5FIJBCJRKDRaDiyQ+tL3kIsFoNMJoPVai2650vykgxkwIvbGYs/R2FaUvhiL5l+J5FIIBAIMMeFIkcAMDo6Cp/Px+kN4nqoVCrU1NQAAEcN1Go15z6pDIuOV6y1p7Umvg4pd4o2Ud4duGDwizfi2UaPuMcH/TZ9ljZ4n8+HgYEBjI+PY3R0FH6/Hx6PB4lEYkYLY4VCAYfDgerqaia6FTvaR2Rdp9PJ3ihVbYVCIUSjUQBgpTv7uovlnt3qmkAGcTgcxuTkJAKBAPr7+xGLxTA+Po5YLMZcB41Gw9UWdrsdLS0tqK2thc1mKyrRUy6Xo6ysDJWVlTh79iyA95v+uN1uDA8P834jvu6zSZ+zG/IAM8mb5EwEg0E2cv1+Pw+qm5ycRCgUYt1ZUVGBVatWob6+nscoXy+xtihh/3Q6zfm4QCDA5WwPP/wwHnroIc5fa7VaOJ1O5PN5HDlyBCMjI9i7dy+OHDnCCoSsWVIW+XwegUAAsViMPS1i4EYiEbz33ntQq9X4/e9/D41Ggy1btqC+vh47d+7EihUrrle8eUEbg06nQ21tLZf45fN57Nq1C3/0R3/ECpjC/vl8HkNDQ/D7/di9ezfeffddxOPxOX9fLpejvLwcpaWlCIfDvMnL5XKkUimcOnUKer0egnCh9/Rbb72FVCqFzZs347bbbkN9fT2ampqKrgTFHhH932QywWazob6+Hu3t7Sw38H5b2K6uLvh8PkxNTTFBkDxAMlpoc6SHhvggpCBIGVJ1iFKpxJEjRxCLxXD77bdj7dq1uP322+FyuYoqMwAu0zQYDLBYLDNKq2hjp/OnDY0UPhmp6XSaFWk8HueIx+DgINLpNG+kw8PD8Hq9rGTT6TSSySRisRiam5sBAAMDA3C73RyipD/F7G0PzLzP29vboVareYqi2NAThAuTBsfHx9kDp/QUbc5KpRI+nw+9vb2IxWIYHh5GKpWC0WhkuScnJ1nRUKi/tLQUmUwGarWa04ltbW2oqanBhg0bsHnzZi53K9ZcAzJe9Ho91qxZAwAYHh5GNBrFwMAAgAstlX0+HywWC9+PyWSSe2+IIwHhcBh9fX3M28nn82wwnD17Ft3d3bzOtJ9SOoH4PWazmft5rFmzBvfffz9zL4rNd1Gr1fjgBz+I1tZWJjITp8rv9+Pw4cPQ6XQcaXS5XLwfAeDeK9FolEnQBoOB0yW5XA7vvfceuru7EQ6H4fF4mBtG614oFKDRaFBeXo5Vq1ahsbERK1aswAc+8AEYDAbm+RSznbVarcZDDz2E2267jfcqQmdnJ771rW/BZrOhoaEBpaWlqK+vh8lkgsvl4nuwtLSUFTpw4T6Ry+WcyhkYGMDExATrzEQiwfJHIhHeS1QqFTo6OrBhwwasXr0a27dv59RqMYydoiTHyYsjSy6Xy3E5X01NDSt0Cv8XCgVUVlYin89zV6bZ4TLy3mnzob/FbNN8Ps99p6nZS1lZGbLZLDo6Oooh2mUhbi2aSCQgl8thsVi44x6FclUqFee1SRbyBGeDNh6TyQSHwwFgZl6Qak+np6e5xW9/fz8ikQicTicqKyths9mK7gUSZvMKaF0oLEqMf1LkZCjQQ00kHnGZHMktXm+xR015UzIGKEXk8XgQCARQX1+P6upqJJPJBZGb1kRc7kVGKHmxFI2hyA5t9BTey2QyrARpMBFFQ8izJ8NG3L4UAD9ftDHEYjH2Pqm0s9htTglksFgsFjgcDkSjUQ7BUhSIcvAUiaE1IO4KMbP9fj/cbjdisRjOnDmDVCrFrPnx8XH4/f6LwuLinDA9SzabDdXV1aiurobRaCx6X306B4rkuVwuTlklEgl4vV7mnmSzWfbEaC3FVQAymQxerxc9PT1IJpNs9DocDmi1Whw5cgRdXV18XLrmCoWCZ0LQtbdYLLDZbGhsbGRy6ULM75DL5RylJSI38ZiowyIZc5TqMhgMvO6kCH0+H7q7u/l9qmzK5XI4c+YM3n33XTbqxSA2P0WMy8vLUVdXh1WrVqGhoWFG3r3YEQ8yymw2GwwGA3Mx4vE4zp8/j5KSEvj9fphMJqRSKVgsFkQiEW6vbTab4fP5cP78eQBAWVkZp6qSySR6e3vR19eHcDjMzwvtW+J+MNS9kMrYqYKqWFGOoih/ceieyDjT09PYu3cvotEorFYr6urqWDkkEgm8/fbbGB0d5d7RRPagC6DX67F582bOdZhMJvYER0dHcfToUS6FEpOmJicnoVAo5vWoiwXxph+LxRCNRhGLxbgNY1dXFxKJBEKhED8QpKwTiQSGh4dnDGegDY42WZPJhAcffBDNzc3w+XwIhUJMCgyFQhgeHoZMJkMkEuGqClIQ4kE/xYZSqYTD4UAmk2Hintfr5Xyd0WhkY5C6b1F9MqVo6uvrUVlZybXKBFL8VquVc+kWi4UHx1BnP0EQmN1M6YCVK1di+/btqK+vXxCuAxl5RqMRDQ0NiMfj6O7u5vrbgYEBVFRUoL6+nrkM2WwWPp+Pp8+lUinua0EzAWhTIZCXV15efpEhRBsn1bdbLBbs2LEDd9xxB1paWnim+UJ5/hRJ6+vrg9vtxp49e3D27FmYTCY4nU5OdwiCwAqJjCAKz4dCIZw8eZLJshT+JAIhGVYUWaHoAUWQmpqasHLlSqxbtw7r1q2b0X52oeTesmULqqqq+FmlLpZ+vx+nTp3iaaJkiCkUCiQSCTb0ATAvhyJdcrkcjY2NsFgsHEIH3icWUu0+9S5xuVwwGAzczKqsrGxBujkSVCoVWlpaUF5ejvPnz2NsbIxJuB6PB8eOHQMANsaogRftdaWlpSgpKUE8Hsfg4CCn7ChlVSgU4Ha7mcwmhl6vR0dHB8rKyrBq1SpUVFSgubkZFRUVMBqNM9Z7IQxd4hl95jOfwf3334/Dhw/j6NGjmJyc5PHxZKhTJ08qeSXCLXnzMpmMOVjE7aAeLaS/qP+L0WjExo0bYbVasWbNGk7r2O32Gf0ziiVzUZQ/LTidGCmy7u5uBAIB1NXVIRqNQi6Xcwj0rbfewuTkJHcmE+cRqY6ztrYWdrsdt9xyC1veNC1tYGCAm9yQByaTyRCLxbgcbKFBchIZhUJ2Pp8Po6OjzI4l7yifz3OdcjQaZS+QvDXykimktXr1aqxatYotxP7+fr5ZqG8CRQ8o5Ez/niuiUAzQIBF6CAEgkUggkUhgZGQELpeLCX9qtRoOh4PDnkRaJCU1m6BE3g41QmpoaEB5eTlGRkZw9uxZhEIhRCIR5gMQwz2dTsPpdKK2tvaaa14vB/ICtVotbDYbysvLMTAwwIRXIqlNTExAoVDwOni93hmsdIoeiNMdwIXoCa0hhXLFhDOqFKD7TKVScdixo6ODm1sVe0MUGx7k9QPgGQPDw8M87U08TplC8B6PB7FYjElS9GyIGf90vpTWoyZfs1NM+XweNpsNNpuNG7zQprhQikChUKCiogIajQanTp2CxWJhoiPlqsU8G5fLBY1Gwxs8QZzOAsAcCgBMIBV7/cShcblcMJvNcDqdMBgMaGpq4j72CyU3nYPJZOJ+GlRhkslk4PF4WKHPBzJeKOp1uWicWAaNRsMOwqZNm7iUkcLdCyk38ZSUSiVaWlrQ0NAAjUbD5eVUakgRZ+p2SR476bG5MNdniMBosVjgcrmwevVqVFdX47bbbuO0xkINULpu5a9QKOB0OpkEoVAomHxDbQmJ0UxWH4VGxd3QiKBXV1eHXbt2weFwcEtYi8XC40Knp6exevVqrFy5En19ffj5z3+ObDbLZKSHHnoI7e3taG1tLcb1uaTcdrsdhUIBFRUVCIVCHJrs7e1lUh55N7TokUiEy+DIC6Q8TmNjI0wmEzo6OuB0OrF27VpYrVY4nU5MT0+jsbERa9aswejoKFwuF3d4ozCRRqNBR0cH1q1bx5tnsUEbk8Viwb333ouGhgYcOnSIQ7Y0ZplY6g6HAwqFgodR0NrX1taiqqqK56SXlJRgxYoVMJlMWLt2LRwOB5fv6fV6lJWVIRAIwOVyIZ/Ps3K5/fbbIZPJsHHjRg6jLtSmQB7Zvffei/Xr16OqqgrDw8Po7+/H0NAQ5HI5R7LIEKMcMFWp0HrTumm1Wl73trY23uRLSkq4FIjKIQGw3JRHb25u5hTTQioClUqFhoYG7rOxYsUKbnbk9/sxPj7OXisZe+IooEajgdPpnDF1kbxXGvZEPfopVUByy+UXhsxQ8ygifZlMpgXzAOn3yOgpLS3F9u3bUV5ejng8jlgshtHRUezbt49TPvR8U/8S4P1UDBm7ZNyXlpaio6MDDocDq1atQjweZ8OHDAm9Xo/Vq1czx4SiYQspt1h+Wvddu3ahpaWF+26Mjo7i5MmT8Pv96OnpuSiCR9eNZKB5G/S7tMZk7FELcOIQGAwGbNq0CSaTib3e2WmUhYaYsLp69WqYzWa43W7ccccdCAaDOH36NHdAnJ6e5nWh5nYUwZPL5Xx9KI3pdDo5ukmzOaqqqmCxWLB+/XqYTCaeyrqQFRzXrfwpTJ3JZJiwRItFRLVAIIDh4WH+DnmPNIhDXMLW2tqKD33oQygrK0NTU9OcrQtramqwcuVKnDp1Cs899xxyuRxKSkpQXl7OG/NC92GmfJwgCHA6nfxgpFIpjI6OYmBggENA9CAIgsBkH6fTyTe/TqdDY2Mjbr/9dtTW1uLOO+/k0JM4hF1RUYGmpiYEAgEoFBdGqVK5HU0Ya2xs5IdtocLfarUaRqMRd999N1paWnjIDLXfTSQS8Pl87OWLQ32kDMnQm56e5kqNzZs3o6amBm1tbbDb7ewtmM1mVFZWIpFIoKysjMvilEolKioquHVyscNiYpDy12q1WL16NXK5HKxWK3p6enDw4EFEo1Guy7+Ul0PNnjQaDW9uZOxt3bp1Rg01pcKoKoKUIHFoyCNYyM2QNluVSsUhyPvuuw/t7e084ra3txfd3d0zvHkCKT+dTgeLxcIMfTLmjUYj2tvbYbVaWcmJCZTxeJwVBvEtrqY99PXKTnnn0tJSrF+/HrW1tcx47+zs5NI0sfKn75ISp32RSibr6+thNBqxbt06zqvn83lYrVaeHFcoFKBWq7lrpljmxWqvSwZLa2srWlpakEwmEY1GMTo6Cr1ez6z02fl6AHzOJpMJK1as4HOWy+Xc4IxSAxS1Ky0thcvl4g6KSzEyWAw6PnFLkskkNm7ciKmpKbz44otcsp1MJpmFbzabkUwm4XA4sGbNGiZoU7WGTCZDc3MzamtrYTabeRiX0+mEVqvlWSWLgetW/hS2MJvNuPvuu9HY2IhYLMaMVZq+RexlQRCg0Wiwbt062Gw2JohRNUBTUxPPTp7v4aZNuKamBn/2Z3+GRCLBG0t1dfWiDPYhJUhWKrXYHBkZYa8+Ho8jEAhw5EMmk6G1tRVGo3FGyYbRaGQPgOrU58rliUuu1q1bN4MYZrVaORy/UAqQQIrQ5XJBp9Nh586dqK2t5fep/plyXJTHJ69cJpOhqakJ7e3tiMViWLVqFfR6PTZu3Aij0TjjwScOCHmVVVVVnBYi40I8JGoxvAK6t1wuF0esXC4XYrEYfD4fp3kAzBhrS0qMStPIA2xubkZJSQlvirTJE9tbLB91SRR7UosFkpsMELPZjIaGBlRXV8NisXBvD1ozmUzGHlt1dTWam5uZK0OKjXqU0/Q4tVrN6S/6LgBuZrKY6yyGXC7niE1paSnn+KkxC5UvitM49B2K1tBeR1UZK1as4LbEgiCwcUD/p54n9FtLITfw/j0mbil85513wu/3c5pPDFp/8vwdDgffr3K5nAmaVBUmTiNSv4KFNuyuBeKS7e3bt3MTsmw2C4fDAYPBALfbjdWrV7MukslknPIlg7CyspINYLoGs1tkLwZkwmWSMVVVVRgfH7/kj1CIMxaLIZlMoqurCwMDA5z3HB4exv79+7mTm9VqxeOPP46WlhYOZ5FXQYoFwCVvADH7HXj/Bi1WOOxq5KYc4KFDh3Du3Dm4XC64XC4MDAygs7OTW/eqVCo89thjaGxsRHt7OxsM1JdbHNaa7/zFLVDF/xd/73pkvxK5xbJTeoP6HcRiMYTDYa7XnZiY4NAZEfh0Oh0zl+mBoL+By6+7GCTrYqz37PMg+cUleRQGJG+IjDmSkeqVaWOfvW6XW/diyUu4FrnFvTaoAoPSWURwJRkpQkEjnYH3jYjZMs8l0+xWqkslN50L5e4pJUNpLIroEchr1uv1XH45u/Z/rtr42Vjq9RZDvObia3Gpz4mfa+BiOWbLV2x5Cdcjtxiz5Z79XBIPjBw1en2+53cp5S4K4Y8WmKxUKkmgzZHyl5T7LC0tRUtLC4d4yPK5mnAHPUCLPcFv9jlQBEAmk6G6uhoAOP+jVquRzWZRWVnJoT9q1FBWVsY8BQpXX4nVJzZylhLizYs8U3H6Ri6XI5lMsoJvbGzk8h8KfYtLOa/muDcCZhst4kgNebYA2LKn+5U8IjFx6UqPdyOA5BArZeo1QZuemO1Pe4KY2Ho1UYsbRW7gYgNbnBIhTg+B7gdq6y1OUxTDSF8KXKmCmq38bzY5LwWx4TYXaJ0XMz1zrShaE3wKSQqCgNbW1hkNdshKFmN2Y4Yb/ULNB5Jbo9Fg7dq1WLNmDd8cra2t2LJly4wuV+KNYClDecUAPQDErxDX9tfX1wN4P0Ix2+uZy/O52UDyU4iW8rpiz3j25ifeQG9m2cXnTnltQRC41nsuOW92mQlksIo3eLr3xZ+hvxcicnEjY6G82JsBN9OeVtQJOLQZ3qyK/FqxXOUm3Ij5ucXEct7sgD8cpX61WK5yS7g0bpZ7Ynnv2hIkSJAgQcIyhKT8JUiQIEGChGUGSflLkCBBggQJywyS8pcgQYIECRKWGSTlL0GCBAkSJCwzSMpfggQJEiRIWGaQlL8ECRIkSJCwzCApfwkSJEiQIGGZQVL+EiRIkCBBwjLDZQf70NSt5Qa3243KysqlPo1FhyT38oIk9/KCJPfygs/nQyaTmfO9yyp/CRIkSJAgQcIfFqSwvwQJEiRIkLDMICl/CRIkSJAgYZlBUv4SJEiQIEHCMoOk/CVIkCBBgoRlBkn5S5AgQYIECcsMkvKXIEGCBAkSlhn+H/6SOlNVxHAZAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAf8AAABnCAYAAAAOs0skAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAAxOAAAMTgF/d4wjAABf6klEQVR4nO29eXRcZ5km/tS+SLWqqlSL9l2yLUe27MTYie3YzkYyTWAySXpokkDT0BBgepgZZqbnADNAN1v3NH0GGDgTaBJIp0kCiUkCNE5sx453xfKufSmppNr3fbu/P/x7P9+SJa8lyUb3OcfHdm33vve793u3531fEcdxHAQIECBAgAABKwbi5T4BAQIECBAgQMDSQlD+AgQIECBAwAqDoPwFCBAgQICAFQZB+QsQIECAAAErDILyFyBAgAABAlYYBOUvQIAAAQIErDAIyl+AAAECBAhYYRCU/wrG8PAwnn32WXR1daGiogJKpRI1NTXYsGEDnn32Wbz66quXfWfbtm0QiUTYt29fWc7hq1/9KkQiEb761a9e83eefvppiEQi/NM//VNZzuFWxUqRc7HQ0NAAkUiEiYmJJTnevn37IBKJsG3btiU5ngABNwNB+a9Q/OpXv8KaNWvw/e9/H16vF5s3b8ZHPvIRdHd3w+Vy4fvf/z4+9alPLfdpCigjBOV0cxCJRBCJRMt9GgIElAXS5T4BAUsPj8eDp556CplMBl/84hfx9a9/HUqlsuQzfX19eOWVVy777vPPP49kMom6urqlOt0Vi7/927/Ff/2v/xU2m225T0XANWDjxo24cOEC1Gr1cp+KAAFXhaD8VyDeeOMNxONx2O12fPe73533M+vXr8f69esve11Q+ksHm80mKP7bCGq1Gh0dHct9GgIEXBOEsP8KhMfjAQCYzebr/u5COX9+fnp8fBx/9md/BqvVCoVCgebmZvyP//E/kMlkrutYJ06cgM1mg0Qiwd/93d/N+5kbOdZLL72EHTt2wGg0QqFQoL6+Hh//+McxNDQ07+f5uePXX38d9957L4xGY8l14IeEX331VWzZsgVarRYVFRXYvHkz3nrrreuSHVg458/nSfh8Pnz2s59FbW0t5HI5amtr8bnPfQ7hcLjkO9u2bcP27dsBAPv372fnKxKJ0NDQcNmx3377bXz4wx+GzWaDXC6HxWLBo48+isOHD897rnz5f/rTn2LTpk3Q6XQlOXf+vbN//37cd999MBqNUKvV2LhxI1544YUFr0U+n8f//b//Fx/4wAeg0+mgVCrR2tqKz3/+83C5XNd2Qf9/TE5O4lvf+hbuvfde1NXVQaFQQK/XY8uWLfjRj36EYrFY8nm63nNlpT8k39XSKgMDA3jmmWdQX18PhUIBo9GIHTt24Je//OW8n7+RdRYg4JrBCVhxeOGFFzgAnEQi4fbs2XNd3926dSsHgNu7d2/J60899RQHgPvCF77AabVarr6+nvt3/+7fcTt37uRUKhUHgPvQhz502e995Stf4QBwX/nKV0pef/311zm1Ws2pVCru1VdfLcuxisUi97GPfYwDwEmlUu7ee+/lnnjiCa6trY0DwKnVau63v/3tZd+rr6/nAHDPPvssB4Dr7e3lnnzySW7r1q3cu+++y3EcxwHgAHBf/vKXOZFIxG3evJl7/PHHubVr13IAOJFIxP3qV7+6rmtNcv70pz+d95p9/OMf52pqarjq6mruwx/+MPfQQw9xOp2OA8Bt2LCBy2az7Dt/+7d/y91///0cAK66upp76qmn2J8vfvGLJb//xS9+kQPAicVibuPGjdxjjz3G3XnnnZxIJOIkEgn3k5/85LJzJfmfffZZTiwWc1u2bOGefPJJ7s477+QmJiY4jrt073z+85/nxGIx19XVxT3xxBPcPffcw4nFYg4A9x//43+87LfT6TS3c+dODgCnVCq5Bx98kHv88ce52tpaDgBnMpm4vr6+BddtfHy85PWvfe1rHACusbGR27FjB/fEE09wW7du5eRyOQeA+/CHP8wVi0X2+V//+tdsLQCUXLunnnqK8/l8HMdx3N69ezkA3NatWy87lzfeeINTKpUcAK69vZ174oknuHvvvZeTSCRsLefiRtZZgIBrhaD8VyBisRjncDiYUtq2bRv3ta99jXvzzTc5r9d7xe9eTfkD4P76r/+ay+fz7L0zZ85wFRUVHADu0KFDJd+bT/n/4z/+IycWizmz2cwdPnz4snO40WP98Ic/ZMri5MmT7PViscjOQ6/XX3YNSIlIJBLu9ddfn/e60Pno9XruyJEj88rY1tY273cXwtWUPwDu6aef5tLpNHvP6XSytX3xxRdLvncl5UT48Y9/zAHgWlpauFOnTpW8t3//fk6j0XByuZwbGhoqeY/OR6vVzrtmHHfp3gHA/c3f/E3Je/v27WOG2+9+97uS9770pS9xALjm5uYSRZ7NZrlPfOITTJFnMpmS7y2k/I8dO8adOXPmsvNzuVzMWPvlL3952ft07gthoevrdruZsv76179eYlgcP36cMxgMHADuxz/+ccn3bnSdBQi4FgjKf4ViYGCAu/POO9nmwv9zxx13cD/84Q9LlCrhasp//fr1JZsb4dOf/jQHgPtf/+t/lbzOV/6FQoH7D//hPzBFOTIyMu+53+ixmpubOQDcP/7jP172nWKxyHV3d3MAuG984xsl75ESmc87I9C1m++30+k02/ydTueCvzEXV1P+NTU1XCKRuOx73/zmN+c936sp/0KhwNntdg4Ad+LEiXk/8+1vf5sDcFm0gOSfe835oHunp6dn3vcp4rBr1y72WiqV4iorKzkA3O7duy/7TiKR4KqrqzkA3C9+8YuS9xZS/lfC73//ew4A99hjj1323o0qf4o0rF+/ft7vffe73+UAcK2trSWv3+g6CxBwLRBy/isU7e3tOHLkCI4ePYovf/nLuP/++xkHoL+/H3/5l3+JBx54ANls9rp+9+GHH563HKqzsxMAFszPJpNJfOQjH8E//MM/YMuWLTh8+DCam5vLdqzp6WmMjo4CAJ566qnLviMSifDMM88AAPbu3Tvv8f7tv/23VzwfAHjkkUcue02hUKCpqemyc7pZ7NixY15m+dWu9UI4efIkZmZm0NzcPC/ZEwDLZx86dGje96/lGn3sYx+b93Val4MHD6JQKAC4yPuIx+MwGo3zXlu1Wo0nnngCwMLrNh8ymQx+85vf4Mtf/jI+/elP45lnnsHTTz+NH/3oRwCAwcHBa/6tq4F4IfPddwDwiU98AsDFvhszMzOXvV/udRYgABDY/iseGzduxMaNGwEAHMfh5MmT+M53voOXXnoJe/bswfe+9z385//8n6/59xaqBtBqtQCAdDo97/v/+3//b+TzeaxevRp79uyBQqEo67Fog6yqqmLvzwUZGwttpvMR427mnG4W5T7W2NgYAGB0dPSq9ew+n2/e16/lGjU2Nl7x9VQqhUAgAIvFwtZioe8AV1+3uThy5Agef/xxOJ3OBT8TjUav6beuBVeTQa/Xw2g0IhgMYnp6Gna7veT9pbynBKwcCMpfAINIJMK6devwz//8z0gmk9i9ezdee+2161L+YvGNBZM++MEP4uDBgzh79iy++c1v4itf+cqiHetGoVKprvqZpTynch+LWO5WqxX333//FT9rMpnmff1artG1gOO4svzOXCSTSXzoQx+Cx+PBM888g7/8y79ES0sLtFotJBIJhoaG0N7evmjHvxEs9X0uYGVAUP4C5sV9992H3bt3w+/3L8nx7rjjDnzjG9/Arl278NWvfhWxWGzBHgQ3AofDAQAIBAKIRqPzev/k+dJnVxpqa2sBXIyOLGZL4fHx8Xlfp5I5pVKJqqoqAJfWYqHvANe3bu+++y48Hg/WrVuHn/zkJ5e9Pzw8fNXfuF44HA4MDAyw85yLSCSCYDDIPitAwFJAMClXIK7Fq6GQaE1NzWKfDsOqVatw4MABNDQ04O/+7u/w6U9/+rKa6xtFTU0NCw/Pp9g4jmOvUz38HxvkcjmAizXz82HDhg0wmUw4f/48zp07t2jn8fOf/3ze159//nkAwJYtWyCVXvRLent7UVlZiWAwiN27d1/2nVQqhZdeegnAta0bKdmFQukLnRsAyGQyAAtfv4VAPImf/exn875PRkhra6ug/AUsGQTlvwLxgx/8AE899dS8pC2O4/CrX/0K/+f//B8AYGSqpUJzczMOHDiA9vZ2/OhHP8LHPvax695sF8J/+k//CQDwta99DadOnWKvcxyHr3/96+jv74der8cnP/nJshzvVgMZcsPDw8jlcpe9L5PJ8JWvfAUcx+HRRx/FwYMHL/tMoVDAO++8gyNHjtzwefT19eHb3/52yWsHDx7E97//fQDAX/3VX7HXlUolPvvZzwIAvvjFL2JycpK9l8vl8IUvfAFutxuNjY3XRDYkktzbb7+N8+fPl7z34x//GP/yL/+y4Hfp+l2vYfTJT34SWq0W77//Pv7mb/6mxPg+efIkvv71rwPAdaXXBAi4WQhh/xWIXC6H559/Hs8//zzMZjN6enpgMpkQDodx/vx5Fn796Ec/ypjIS4mamhq8++67uO+++/CLX/wCiUQCL7300jWRAK+ET33qUzh06BBeeOEF9Pb2YuvWrbBYLHj//fcxODgIlUqFF1988YY6H94OqKurQ29vL06cOIE1a9agt7cXSqUSJpMJ3/zmNwEAzz77LJxOJ77zne/g7rvvxqpVq9DS0gKVSgW3243+/n6Ew2H88Ic/xF133XVD5/H5z38e/+2//Tc8//zz6O7uxszMDA4cOIBisYgvfOELeOihh0o+/z//5//EiRMn8Pbbb6OzsxPbt2+HRqPB4cOH4XQ6UVVVhZdffplFNq6Enp4e/Mmf/Alef/119PT0YNu2bTAajejv78fg4CD++3//7/jGN74x73c/8pGP4Lvf/S527tyJe++9FxqNBgDwrW99i6Up5kN1dTV+8Ytf4LHHHsNf//Vf44UXXkBPTw+8Xi/279+PfD6PZ5555o/W6BRwa0Lw/FcgPvGJT+C1117D5z73OTQ2NuL8+fN4+eWXsXfvXkgkEjz55JP47W9/ixdeeGHZyEYWiwX79u3Dpk2b8Nprr+GRRx5BMpm8qd8UiUR4/vnn8eKLL2LLli1seFEymcTTTz+NkydP4sEHHyyTBLcmXn31Vfzpn/4potEo/uVf/gXPPfccC5sTvv3tb+O9997Dv//3/x7xeBy/+93v8Oabb2JmZgbbtm3D//t//w+PP/74DZ/Do48+ij/84Q+wWq146623cOzYMaxbtw7/9E//hH/4h3+47PMKhQK/+93v8IMf/ABr167FgQMH8Otf/xoymQyf+9zncOrUqQVLE+fDyy+/jO985ztob2/HwYMH8a//+q+oq6vD73//e/z5n//5gt/72te+hv/yX/4L9Ho9XnvtNTz33HN47rnnEIvFrnrMhx9+GO+//z6eeuopxONxvPLKK+jr68Pdd9+Nl156aV7+gQABiwkRdyvRWgUIEPBHi23btmH//v3Yu3evMFZYgIBlhuD5CxAgQIAAASsMgvIXIECAAAECVhgE5S9AgAABAgSsMAg5fwECBAgQIGCFQfD8BQgQIECAgBUGQfkLECBAgAABKwyC8hcgQIAAAQJWGATlL0CAAAECBKwwXLW9r0Kh+KNtd3ol+Hw+Qe4VBEHulQVB7pWFlSx3JpOZ972rKn+z2Yzp6emyn9StjpqaGkHuFQRB7pUFQe6VhZUs90IQwv4CBAgQIEDACoOg/AUIECBAgIAVBkH5CxAgQIAAASsMgvIXIECAAAECVhiuSvi7FcBxHDiOQzQaRSgUQjabRSqVgk6ng8PhgFQqXba584sJktvn88HtdqNYLCKfz0On06GmpgZSqfSPUnZ+x2mRSLSMZyJAgAABf5y4LZR/sVhEsVjE8PAw3n33Xfj9fszMzGDt2rX45Cc/CZFIxP78MYHjOBSLRfT19eHll19GJpNBIpHAmjVr8KlPfQparRYVFRV/NMqflP7ccRN/bOsqQIAAAcuNW1b5cxyHbDaLYrGIVCqFTCaDiYkJTE5OIhwOw+12w2Qywe12Q6fTQafTMS/4dlYWxWIR6XQahUIBuVwO2WwW4+Pj8Hg8yGQySKfT8Hq9mJmZQTqdhsPhAAAm9+0oOxk5+Xye/ZvjOCZ/oVBAPp+HTCZDRUUFJBIJ5HL5bS0zgaI7AJj8c/8QxGIx5HI5JBLJcp1uWVEsFkv+P1degkgkYgbu7b7WfNzOsgi4/XHLKv98Po9gMIh4PI4zZ85genoax44dw/Hjx5FKpRCPxxEKhWA0GtHQ0ICdO3dCp9NBJpPdtpsjGTxjY2OIRCKYmppCKBTCgQMH0N/fD7FYDIlEgmKxiFdeeQUOhwOPPvoojEYjFAoFpNKLy3k7bSrFYpEp90gkgmw2i2w2i3w+j6GhIQwNDSESicDv96O6uhrbt2+HTqdDU1MTk/l2Xm+O45DP51EsFhGPx5FOp5HL5ZDP55HJZJBKpSASiSCVSqFSqVBXVwelUgng9lrnuSC5OY5jBlw+n0ehUGDvA5cUPxl79NrtBjJq6d8AIJFIbktZBPxx4JZR/hTaJ483k8lgdnYW0WgUo6OjmJmZwezsLFMQmUwGkUgEbrcbUqkU4XAYEokEGo3mtlIGJHc+n0c2m0UikcD09DR8Ph9cLhcikQgCgQAymQxTAtFoFF6vF2KxGH6/H2KxGHq9HsDFDeV2kJ/WulAoIJPJIJvNwuv1Ip1OI5PJIJ/PY2xsDE6nE5FIhL03NDSE6upqGI1GaLVapghvl4gPKXxa82KxiGw2i1wuh0gkglQqhXQ6zXgtgUAAUqkUarUaGo0GFouF3Qe3U+SDL3ehUECxWEQulwPHcUwOeu7pPQBMVorsSSSS20JufgqLv6/xjRqZTMYMGjKCFgIZCreyzHzwI1dk0JGstH78SA/JNff//H/fTrLT37fymt0yyj+VSiEUCsHtdmPv3r3w+/04d+4cAoEAvF4vUqkUkskkUqkUgIs3UCgUwokTJ+B0OpHJZGC32/Hwww/DarUuszRXB934qVQKsVgMTqcTu3fvRiAQwODgIBKJBPMUPB4Pstlsiaf4/vvvw2AwIJfLwWw2Y9u2baipqYHBYEBlZeUyS3dlcByHZDKJQCCAUCjE1vnkyZOIx+PMI3S73QgGg0gmk0gkEpBKpThw4ACsViseeeQR1NXVobe3FxaLBQqFghkCtyr4qYx4PA6n04lkMonh4WEkEgmkUin2XiqVgs/nw+joKDQaDerr62E0GhEOh2GxWFBXVweNRgOZTAaZTHbLbjD8FE4ul0MikWDPazgcBsdxMBgMkMvlCAaDCAaD8Hq9cLlcTGnqdDo8+OCDMJlMqKqqglKpZHLfqqBzJyMuFArh9OnTyOfzUKvVUCgUqKmpQUVFBTOCicBLzzgAFu0jufkpkFsRtEdR9CqdTsPn80EsFqOyspKl7qRSKfL5PPL5PFOQZCiIRCJm7PD/psjmrQxaS7oOIpEIcrn8lnw+l/Vq8q3DTCaDUCgEp9OJs2fPwuv14uTJk4jFYsjlcmwT4TiOebeFQgFerxeZTAYmkwmpVAqJRAKFQuGW9QTJKiSPnyIY09PTOHXqFNxuN9sclUolJBIJ0uk0+z55ilT14HQ6EY1G0dnZyQiA81nPyw2+3GT0BINBuN1uDA0NIRAI4MyZMwiFQuw7iUQCyWSShcA5joPf70cwGMSqVauQy+XQ1NQEtVoNiURyVe9puTDX20+n08zgC4VCGBoaQiqVQi6XQ6FQQDweZ8YBKf9isYhkMonx8XFks1mmMMVi8S2b7iGZyZNPJBIIh8NwOp1IJBIIBoMoFouwWq2oqKjAzMwM/H4/vF4vJiYmWDTMbDajq6sLHMexjZQiAsCtKTcpNjLufT4fBgYGUCgUUFlZyQwYnU7HFKVMJoNCoSiJEkgkEshkMqhUKrbv3aqeMClvWm+S3ev1QiQSoVgsQi6XM6MtnU4jnU5DLBZDLBaz74lEIigUCojF4pLoCEU0bzW5gcuNXLr3SQ+RjIRrqWha7HVeFuVPF6lQKMDtdjNFf/ToUfj9fpw9exbpdBrRaBQcx8Fut0Oj0cDn8yEUCqGyshJ6vR75fB6BQACRSAQSiQRmsxnvv/8+isUiLBYLdDrdcog3L/gWcaFQgM/nQyAQwNDQEE6dOgWXy4WTJ08yL5cMA7FYDIPBALvdztIdhHw+j6mpKQSDQRw9ehRerxfr169nOVKVSrXsDwrJTXn82dlZ+Hw+nDt3Dv39/QiFQrhw4QISiQT8fj/y+Ty0Wi0UCgXbQCorK1FdXc3Ijul0GqdPn4bb7YbP54PJZMJDDz2ENWvWlCjD5QZ/M/T5fPD7/XC73RgZGcHMzAz6+voQi8UQCoVQKBSYJx8IBBCLxRjRtVgs4vz585iYmEAoFILBYMDg4CDq6+uxbt06NDQ0MCVxK4CUX6FQwOzsLLxeL4aGhnDmzBmEw2GMj48zPoNEIoHD4YBKpYLH44HP50MymUQsFmP3jkwmg9frhV6vR0tLCywWC7Zv347Ozk5IpdJbxrPiP9+Dg4MYGBiAz+eD1+uFx+PB+++/z5S8XC5HXV0dKioqEI/HkUgkmHdL9w2lRGQyGTo6OmCz2bBx40a0tLRALpezSMByg7/eMzMzcLvdOHv2LPr6+piRr1Kp0NDQAI1Gg8bGRuh0OjidTng8HkilUigUCmQyGQSDQYjFYlRXV0OtVsNms0Gv16Ourg5WqxUymYzJfSvIDlyU3+v1wuv1or+/H++99x6kUikqKiqg0WiwZs0a6PV6Jj9F9vhGAe1ZlCKhPVAul0OhUCzKeS/bLknW7czMDE6fPo2jR4/ijTfeYOVs5MXJZDIYDAZUV1cjmUwiHo9DpVLBZDIhHA5jZmYGHMdhenoa8Xgco6OjMJlMqKiouKWUP3DpIcnlcszjPX36NI4dOwav1wu3280WHwALiVVXV8NsNrNrk8vlkE6nwXEcQqEQ4vE4pqenkc/nYbfbYbFYoNVqoVKpllHaiyALmAwXl8uFs2fP4vTp0zh48CDjbVCKQywWo6KiAsCl0KlcLofFYkE4HEYgEEA+n8fMzAyCwSA8Hg80Gg26urrQ1dW1nKLOC5LB5/NhcHAQw8PDGBgYwNTUFA4fPsxy22KxGCaTCUqlEqFQCLFYjP1GoVBAOp2GRCJBNBpl6+rz+eBwONjwDqlUestsiETi9Hg8OHPmDN5//3288847iMVi8Hg8Jes9OTkJtVrNDPn54HK5IJFI0NnZidraWjgcDjQ1NQEA5HL5ksm1EPj57Vwuh5GREbz33nsIBAKYnZ2F2+3G8PBwSf57ZGRk3vWeC6lUirVr16KhoQEVFRWw2WwAcEuluUju2dlZnDp1CocOHcJrr73G0lxqtRpr1qyB0WiEz+eDTqfD6OgoxsbGoFQqoVarWUpIIpGgra0NWq0W9fX1qKqqglQqRWVlJSoqKqBQKG6Z+xy4uPbBYBCDg4PYu3cvfvazn0GlUsFsNsNsNiMQCKC6uhpKpZI5AmTkEEGd1jKbzbIoSWVlJXPkFkPeZVH+uVwO586dg9PpxOHDhzE2Nobh4WEkk0kW6gLALF/yEh544AE0NDRAoVCgoqICw8PD+OlPf4pYLIZEIoF8Po8zZ84gkUhAo9HAbDaznNlygpR+NptFX18fxsbGcPbsWUxPT2NmZgYTExMlOX6CSqWCXC7HnXfeiZ6eHvaQBAIBnD59GplMBplMBoVCAaFQCOFwGA6HA3V1dZBIJKwHwHLlCCmMl06n8dZbb2FwcBBTU1MIBAJwOp0sZTM3BKZUKmEwGLBu3TqW57Xb7QiFQjh//jySySTC4TDS6TRLHRw9ehRWq5XJv5x5UX7479SpU5iamsLJkyfhdDrh9/sZkZNv6AEXN3mlUonOzk5UVlYyT5C8nUwmw3gRZ8+ehcvlQmVlJYuO1dfXXxYaXkrwiav8Cp3x8XFMTEzA7/dftt7AxXCuQqGAw+FAbW0ttFotzGYzU6bZbBYulwvpdBqJRAIDAwPo6+tDdXU1ampq0NjYWEIEXGrww/zT09MIBoM4cuQIjh07xkL6FOWg6ySRSFBbWwubzcYMeAr78/csyiFHo1GcPn0aNpsNKpUKbW1t6OzsXHa5Kao3MDAAj8eD/v5+TE1NYWpqCtlsFlKpFAaDATKZDD6fD7FYjOX/A4EA4zrodLoSxU6Kb3R0FENDQyxFYLfb0dTUxKIFy7m35fN5uFwuhMNh7N+/HxcuXMCFCxcAXFTi4XAYmUwGR48ehU6ng8vlgl6vRzgcRiwWY883PeP0PQDQaDRQq9Xo7u5GW1sb9Ho9TCYT+0451ntZlH8+n8c777yDvr4+HDt2DBMTE+w9sVjMwhzk+WYyGSSTSdx99924//772eZx9uxZvP7664wMmEgkcOTIEUxOTqK7uxurV68GgFtC+RcKBSSTSezevRtHjhzB6OgovF7vFWubVSoVKioq0NXVhZ07d0Kv18NoNLLv0U2UTqeZMmhpaUEgEIBKpUKxWGREmuXcFCORCJ5//nn87ne/u+p3RCIR1Go1dDodNm7ciA984APQ6XQwGo2IRqOoq6uDz+fDqVOn4PP5MDExAY/Hg+PHj0MikeDee++Fw+FY1rAgGT2xWAy//vWvceLECQwMDMDtdi/4HYpyqdVqdHR0oL6+nm1wcrkcBoMBgUAAo6OjCAaDzIsUiURIJBL4wAc+AJvNtqylrmT0xGIxvPnmm+jv70d/fz9cLteC3yG5Sfnr9XpYLBbU1tYCuLgHJJNJDA4OIhgMor+/H16vFydOnIBUKsXmzZtZ5GO5FAE935lMBsPDwxgcHMSxY8dw7NgxaDQaGAwGZDIZFtrlOA5SqRQNDQ2ora1FMplEJpOBSqVi/BWZTMbuo0wmg9///vcYHh6GVqtl1SAtLS3LKjcAZpwdOHAAZ8+ehd/vh9/vx+TkJHK5HGQyGfR6PYtSEt+H+nWQZ0vRPq1Wy6KFZDzPzs6yEtjVq1ezyO5yRXz459fX14dz587h+PHjOH36NEtV075HkU2RSIR3330XEokEmUwGuVyuJGUlFouZIcXXCR/84AexZcsW9PT0QKPRsD4ft53yJws4HA5jamoKY2NjSKVSkEqljBgElJZKcBzHvHi9Xg+FQsHeNxqN6OnpgdFoZKSpfD6PUCgEl8sFv98PjUYDrVa7rGEiInEFg0HWqjeVSpUQeihMSuWKCoUCjY2NMJvNaG5uhslkglqthlwuh1qthtVqhVQqZb9DDYBcLheOHTuGQqGA6upqtrEupfx8foPf74fH40E+n4dKpWK1+ZTLIguaNgG1Wo1169bBYrGgs7MTVquVRTwAoLW1FRaLBfl8Hn6/H6FQiKVSZmdn4XK5WJ58qXPBtCmkUikMDAxgdnYWExMTcLlcrEqFPHOSm9ZbrVbjzjvvhNVqRUtLCxobG5lHQN+xWCzYtm0b3G43+vr64PF4EAwGMTIyAqvVilgshoqKiiU3APjKb2pqCl6vF5OTk6yaAQDjoORyOSSTSRbhUSqVWLt2LUtXmUwmGI1G2O12li6iSo9wOIxkMgmxWIxkMomRkRFUV1dj48aNUKvVS572oGc2nU5jfHwc4XAYhw4dgsvlwszMDACwyBv1MSBiGBm5Wq0WJpOJVS9QPpsIrNlsFslkElqtFgBYBYjVaoXT6YROp4PZbF4y2fmKjyIygUAA58+fx8jICItipNNp9vzN5TBVVFTAYDBAoVBAJpOxCA5dp2KxyJyaqqoqxONxVgVVKBRgMplgNpvR3t6+pN1d6ZnNZDKMrHv69Gm4XC5Eo1FmzBDo3+SE6XQ6qNVqZryp1WpUVlYy44fSmdTHJpvNYnZ2FmNjYygWi5DJZEwflMPwWVLln8vlMDo6iqmpKRw7dgwnT55kmwLVegOXmJPE2rdYLHA4HDCbzSV5bJvNhh07dsDpdLIQDEUAKJ/e3NwMjUazrMqfiHlOpxPnz5/H+Pg4M3QUCgV0Oh2y2SzL5zY1NaGqqgo9PT1wOBxYv349bDYb2xR0Oh1aW1uh1+vh9XoRjUYZSergwYMYGBhAIBBAY2MjtFrtklvI/EjH6dOnGXPbYDDAbDYzT55Im5TP1ul0qKysxObNm+FwOFBfXw+DwcAebqVSiYqKCmSzWdhsNhb6F4vFSKfT6Ovrg9VqRTAYhEajWfIGQLQper1e/PCHP8TIyAjOnDmDSCTC1psMHCrpk8vlqK+vh81mwwc/+EF0dXXBarVCr9czucm7ymQyjPvw05/+FPv378fs7CyGh4ehUqmwfft2ZiQupdx0fsFgEL/5zW/gdDpx4MABTE1NMUO9srKSGSi0ZnQfbN26FT09PUxuYoST8stms2hubkYsFoNIJIJWq8XAwABOnToFuVyOrVu3wmQyQaVSLakXnM/nEY/H4XK58KMf/QhTU1M4ceIEgsFgSW5fJpOx3hV0H4hEIuj1ethsNrS0tMBsNgO4vD8ART727dsHAJienmb7XEVFBTo7O7Fr164lIbnSuVF0Z2ZmBj/5yU/gcrnw3nvvwe/3M8OfUrPkANDrcrkcRqMRNTU1UCqVUCgUqKurw6pVqxgRMpPJYGxsDNFoFB6PB8lkklVD1dXVweVyYdWqVaitrWVkuaXY32ktpqen8b3vfQ8ulwuzs7MlBi49d+TM0J4gkUhQV1cHu92OVCqFVCrFUppqtRomkwmZTAZ9fX1wu92YmJhAOp3G6OgowuEwDAYDjh07hu7ubvzVX/1VWcp7l0T580t9hoeHMTY2xhjO84W8ydMxGo2orKxER0cHGhsb2YbITv7/zyeFw2F289OxqE52vt9fKlC+OxwOY2BgAJOTkwiFQiW5fXrAabOQSCQsvF9bW8uMF35jE7FYzLwGqvWnVAnVTxMxkGrmlwrk8VMd/9DQEFwuV4mHTl4i3xtWKBQwGo3Q6XSwWCwwGo2s1JEwX8kM/RaVBVK9vFwuXzK555arzszMwOPxwO12sygEQaPRoLa2lpV4VlZWoru7G2azGQ0NDeyepw1tbmtbnU4HsVgMs9kMm82G6elphMPhkoZXVqt1SZrC0PVNp9OYnZ3F7OwsJicn4Xa7mbdH3k51dTXq6uqQTCah1+shlUpht9uh1+vR1NQEs9nMSKr0/NPvU45YJBLBZrMhHo/D7/djamoKsViMGZcmkwnA4hMfab3T6TSLYFLUiYi4KpUKMpkMVqsVzc3NiMfj0Gg0KBQKkMlkjMzW2NgIk8nEmnTR79M9k8/noVQq0draypj0Pp8PkUiEESVTqdRlXRAXA0RejcfjGBsbw/j4OGZmZuByuSASiVBRUcEcN4PBAIvFwoxcek7lcjlaW1tRU1MDtVoNlUoFu90Om83G8tkUCYzFYkgmk9DpdDh//jxSqRSrIDEYDMyooCqZxVpzikonk0lMTk5ifHwcs7OzzEvnOI7xFRQKBTQaDQAwwz2RSLCKDYpoEaGP9rmqqirGh4lEIqirq4PH42ERlmw2i+npaVgsFoRCIeYM3YzRtyTKP5PJsHDV3//93zPlD1zy8glkyRmNRpa//ehHP4qamprLPFgqEyJGOLGiM5kMJBIJC6ksNWhzCAaDOHz4MIaGhvDzn/8cHo+npI4dAAsB0UYtl8tRW1uL2tpa7Nq1Cw6H47JOYGQYqVQq9PT0wOv14ty5cxgbG2MVEcFgELFYDEqlcsmVYDgcxrFjxzAwMICf/OQn8Hq9iMfjrO41n8/D6/WiUCjAYDCwWv3Vq1ezzZLfqpk2Qz4BjNo7UydEUvrUCZLjOBiNxiWRm8K/TqcTL7/8MpxOJ44fP84MHj5WrVqFXbt2IZfLIR6Pw2g04v7774der4fZbGYb+NyaYJpnQOSou+66C0qlEocOHYLb7cbY2Bh++9vfoqOjAzU1NezeX0wlSMpgZGQEf//3fw+3241Tp06xZlxisRjr169HR0cHLBYL7HY75HI5KisrmbGnUqlQX18PtVp9GXGNCL+0H2g0GmzduhVr1qyBVCrF9PQ0nE4nfv7zn6OlpQU6nQ5VVVXQ6XSL+twTuW9oaAjf+973MDMzg/7+fhbqJsVeXV2NVatWobe3l0X2iMxWWVmJjRs3wmg0zkva4/dAKRaL+Iu/+As8/PDD+OUvf4l//ud/hsvlwssvv4ze3l5s2LABNpuNpcgWC2TU9/X14Rvf+AY8Hg9mZ2chFovR0tICrVbLSnq7urqwatUq5gxQSlImk+GjH/0oGhsbWa+Dub0qOI5DZ2cnisUi7rnnHqRSKbz55pt49dVX4fV6ceTIEczMzMBqtaKxsZG1dp8bdi8XstksKy/+wQ9+gJmZGZw5cwa5XA4WiwWVlZVoamqCwWCA0WiE2WxmTl8mk4HX64VCocAnPvEJNDc3M8OQn7KgtX/44YdRLBZZ+mTfvn149913Wf+bZDKJN954A62trbjrrruYoXEjWFTlz2eDBgIBxgL1er0ln+EbAHK5HCaTibGXa2trYTQaGSGED1KWFF6ZG1ZbLqb7XM/A5XJhenr6MsVPZCeSoaKiAhaLhT3IGo3msppWyhdSMxCVSsW8JVKOpIz4PIqlkply3uPj45iamioxePgGDqUwDAYDi3RUV1fDYDCwh4O817nGC39t+TXGlI/jR36WAlTmRN6Yy+VCPB5n4T6K1FBXt7q6OuYRUP6SyDwL1erTfUwkQIPBgKqqKtbNkToCmkwmNgxpsY0+iuQQAZE6MlIjG7VajdraWtTV1cFsNrN6fopskIFCVS3886V/01qTktDpdBCJRDAYDIwcNjk5CY1Gw9JmN7MhXgn8sHcikYDH48Ho6Cg8Hg8zbi0WCzQaDZqbm1k0h9ab+ApqtRpKpZLVcvMVP/855z9TlP6yWq3Q6XSMO+XxeOD1eqFSqWCxWBZNbtrPAoEAxsbGMDIygmg0inQ6DaVSCZvNBrPZzPpa1NfXo6WlhSl/4nbJZDLWg4WY7vz9jS8/cPG+p1RoU1MTxGIxxsbGkEgkMDs7C6lUypyccrP/6RwymQz8fj/Gx8cZ8TSZTEIqlcJsNrM9u6qqiqWoU6kU/H4/0uk04zpZLJaStNaVjktcoObmZoyMjCCZTLKqj8nJSYhEIqxZs4ZxqG5E7kVV/lTuNDIygueeew6zs7NIpVKQyWQsHE0bN13o9vZ2fPzjH0dtbS3WrVvHajsXAimHaDSKSCQCmUzG2kgud9mP1+tFX18fpqamSprzAGA3QFtbG9auXYvq6mr09PTAbDajtbWV9XKfu6ikNGmGAfV/j8Vi7Bh85bgUxg+/u1U+n8fo6Cief/55eL3ekvplkUiExsZGtLa2orGxEV1dXdBqtbBYLFCpVKyJx1xGK8nDL++xWCyQy+XQ6/VQq9WsXp66oS0VyZHuPZfLhcOHD+Ptt99mm6JEIoHdbodOp8OuXbvQ1dWFjo4ONDU1lSg1IqtdqWSLXxdPJKm2tjacPn0aABAMBnHs2DEAF9tBA1hUzgM1Njl37hwOHjzISnVJ8f/Zn/0ZmpqacOedd7KonVKpLMnR0rnxn1Uybsmw43d3I5a8wWBAT08PJicnMTU1hffffx9isRinT59GS0sLC6WWG2RYDwwMYM+ePbhw4QIGBweRTqeRz+eh1+vx8Y9/HKtXr0ZjYyMMBgPUajXUanVJKJ+ey7neHx/8e5/4AWq1Gvfccw9isRiGhoawZ88eeL1e7NmzB42NjaitrWXE2HKB0ln5fB779+/Hiy++iImJCQQCAQAX01hWqxV/+qd/itWrVzNDjt9um2Sn+SQUmZlP6c+VXyaTQSqV4q677kJbWxuOHDmCeDyOTCaD/v5+TE5OQqFQoKGhAXfeeSeqq6vLJjsZMgcOHMBzzz2H6elpjIyMAADbtz7zmc9g1apVMBqNUKvV7HzJEePPqeB35bwSqNJLoVBg06ZN6Orqwr59+3Du3DlkMhm88sorsNls0Ol06OzsRFNT0w31tFlU5U/eeDAYxOnTp+H1ehlrkd/Bit8C0eFwMGVIPdsXAv+GIY+PQqO0eSwl+GQdsvSpgyF5onTDq1QqKJVK1NbWoqmpCQ0NDejp6UFlZSVj/y6kCKgOmF8ORJ7+3FDSUjKA+YS3wcFBxONxdg50viaTCRaLBS0tLVi1ahWUSiUL8VP5z1zMZwRQBcBcy5euzVIYPfycN1U1eDwepNNpVtFQVVXFFHVvby9MJhPz4kgJXmudNj//r1KpWPULcPH+DwQC8Pv9rHRsse5/8gRjsRiGh4cxMzODWCyGfD7PPPqWlhbccccdrEkL3xidK+98Gz/94XuABEp7GY1G+P1+luYKBAIwGAyLEu2ie5xaip87dw6jo6OIxWKsVp2aTa1evZp1qOPX7M9dj6utD8lP9wo1gmpoaEA4HAZw8d6bnp6GVCplhOlyg1JtIyMj2L9/PzN2iLxaVVWF1tZW1NfXs1D+QikM4PqGcJHRp9FoUFFRgWAwCKvVyqqmwuEwRkZGkEqlsGbNmrLKzU9r7dmzh+XeiXhstVrR1dWF1tZWKJXKBdNs/Gj09chNTl5FRQVaWlpgMBjg9XpZpcHw8DDrgnjLKX/arKiTXyqVgl6vR7FYhNfrRS6Xw65du1hOkBjeHR0dJSNqr/T7ZGHyG2hQGJZPpFtK0DmEw2EMDQ0hHA6zjZGG7+zatQurV69GTU0NrFYr1Go1y/9dibDEf4hEIhFMJhNyuRwLdfI9jKt5k+UCHTOdTiMUCiESibA1oeYd69evR1VVFbZt24bW1lZotVqWpyNlvZDC5stL66xQKFj7Yn6Yn8L/S7HuZPCEw2FcuHABgUAAarUaNTU1ePDBB1FVVcU80dbW1pKSrisZaHylMB/pj8g+5B3Q63TfkIe6mHIXCgVWZhiJRJgy3rlzJ2w2G3bu3FmSzriSl0cKbr73CKQI6TrQdaTwKaWbqFHYYiAej7MJk8AlA8xiseDBBx+Ew+FAT08P69R4pbTVlf4/H+gZMZvNWL9+PUKhEKRSKcvBU3ldOUGeK6UWUqkUurq6EIvFEAwGUVdXhwceeIBxlBQKxYI16DezB/Hvf71ej9WrV2N6ehrj4+Pw+/149913YbFYcO+996KxsfGGj8NHsVhkBqXT6UQ6nWb7qtVqxTPPPIPm5mbU1dWxtOuV7l3+39cD2r/tdjsee+wxjI2N4ZVXXkEikcAbb7yB06dPs9bH14tFV/4ikYg1PKAaTQCsu1NHRwfuu+8+1NfXw+FwlIR2r8V7m9vbm8LuczkASwk6h3g8Dq/Xi0QiAeBiqN9ut6OmpgY7d+5ET08Pa+Iyl/RyLccQiUTQaDRsCBD/PWDpxtzyc4LE0KW0hsViYRtWU1MT1q9fD4vFUrK+1yovfZZCprTx0+An+hxtWosd+SGjJx6PY2ZmBvF4nEVzduzYAbvdDrvdzljN10PA4ytE4JLnSzJR/wb+sBO6piT/YoGMHhotHY1GUVFRgaamJuzYsYMZ8bQpXs2oo39fTWnwjSWKJPG9aup4uRjrznEcqzKIRqMAwPq3NzY24v7774fNZmNtXPn3N//eXUi2q4Hk1Gg0LL9O/Q6mpqYgkUhYeLkcoGe6UCggEAhgeHgY2WwWDocDyWQSCoUCTU1NzMi7EsnyZuTmf4f4EvX19ayvCRHjaPR3OUCyh0IhZmDwOUTkxNTU1FwTufRmDR9Kldx1111QqVT4zW9+g3A4jBMnTuDcuXP4i7/4ixv67UVV/vSAdnV14Utf+hJyuRyUSiUr18hms9i1axfa29tLyC/XqrQoBDc9Pc3q5amUDMANEyFuFGTlS6VSqFQqrF69Gs8++yyy2SzbrKmcq6WlpSQseD3W4VzCG39zIUXDL7tZbNB6UT+FzZs3sxI3rVYLtVqNnp4e6HQ6aLXaeQlOV8J8IWIK/ZvNZjQ1NbEWyXQ9l6rFrUh0kYR17733wufzobu7GyaTCe3t7aisrGRlSNd7Plf7LPUDJwNIrVbDbrfDarWykrnFlr+xsREPPfQQUqkUIpEILBYLOjo6GJHvWnKb1wu69ysqKlBdXQ2v18tCo0R+XSy5iU9CZEuDwYDm5mbU1NQwRTCXwEbnfKOY+10qi62srGSNf6jOvNygvYPSeSqVClVVVbDZbFizZg0cDgcjnV6PIX+joEhvQ0MD43eJxWJoNJqyVXhQBCmTyeDQoUM4evQozp49CwCora3Fpk2b0NbWhrq6OhgMhiUbIkadb4ksSry5m8GiK3+JRIKGhgZ89rOfRaFQYF34otEocrkc6urq2M1zvYqamgY5nU4oFAro9XoEg0E2MWmpm7wAKDluS0sLPvOZz7CbViKRsEYk/HO7EaVAI42BSyVRlCOqrq5mY2CXwgAgOSgHr9Pp0NzczI5Lcwb44e7rxVwvmCJEDocDLS0tiEajmJmZYe8thdFHshCLlwwxUswikaikTPNquJ51om5wfOXf3NwMm83GRjsv9jWoqalhDHO+0cs34Pl563KAfqeiooLVQut0OtZPYDEm3fGjLdSQpbq6GlVVVaz/iM1mu4yoWk65CWKxmFUKUAq13OF+Psj7pw6dVqsVdrsdnZ2d0Gg0MBqN12zcluNa0DNPaQ+qAKmuri6b8g+HwwgGg/jDH/6AV155hUXROjo68Pjjj6Ourg42m23Rpu3NB6p6ICOXuqPeDJbEbOErRJFIxAZbUH3+jYanM5kMJiYmMD09jUgkwkJ+FI6rqqpalsl2fAVN3jCFP/k5bv5nbwTFYpENRaJJaVTuBqCknnQpQF4Zf0oVvX6jTWfm20D59f4+nw/T09MlodjrDbHfKPghaH6p2tzo1UJK/UY2SzIwqNSLvD66JiKRiJFeF0t+fuSJmM30Ot/YpM+V2wAALtZeUwieenssNtGPZowkEgnWcTGVSkGtVrNWzUsxVpk88cUkdlJTm0QigQsXLuDs2bOstFMqlaKrq6tkL1uq9CIZO8TtEIlErLyzHI5eoVDA9PQ0JiYm4PP5wHEcq0CyWq1oaGhgBNalBP+Zo+g2X6/cCJZE+fNPkHrz87sg3eiNk0gksH//ftYCMZvNsvAI5daXo+SPZKJ8Pr3Gf78cyGQybFDQ1NQUezAo90Ue4FIqf1rruZvgjSr++fKlHMchmUwiEong/PnzOHDgAIuCqFQq1hBmseXms9ep0mI+Gfh/E650bgu9x8/D+nw+DA8Ps5IrynlzHMeMn8XaoPgKHbjEAQAuTSUjr4xv9N8M+NeR4zjG8na5XAgGg1Cr1YvGdSBCaTgchs/nYyWsIpGIGbl2ux1AaYvXxbj/aP0TiQQCgQAbf15u5PN51sFv9+7d2LNnDzNuHnnkEWzbtq1kIt1iP2u07plMhhHxCoUCJBIJqyAqh+efzWbx5ptv4vTp0xgcHIRIdLHMsqqqCu3t7ejs7Fz0TorzgfQJVXWk02lmlNzSyh8or/IjNjN1s6P6UZp+RoOAbiTXWk7cjGFzJfAt/1gshkgkwjZdGodLHIqlHHZyNQ/vehTgQt/nl1HG43GkUilGdOJ74Uux7vwmLHOHUvHLewCUhMFv5rzomNQ+mUK+MpmMVVYsVqkjefXUxY2iL+QRApdSfdSOt9yg6033fyqVYoYXhUXLdVy618jT9Hg8GBsbYx0rqZcEGThLcc/xJ/3xhwSV2/suFouYnZ3F4OAgZmZmGImXjF1qZ8u/r5cKc6t5rocndjUQ0Y/fqrmyshL19fUwGo3Lkkom8Ne5HLIuy0jfm0UqlYLX62V1tkQsUyqVeOCBB9DS0oL169cv+VS3xQRfcVKf55mZGbYREfmjra0NGzduRG9vLyObLQfp8Uo1zbRhzSX9zbdW/Ndoylsmk2ENXvx+P4BL9f1EslvsJj+kBPmdBenfNACkWCwyA4ymmPFDw9dzfnzDh7yy06dPw+l0AgBMJhPWrFmD9vb2m0qlLQRq0pLP5zE8PIypqSnWqyMcDsPr9UKpVKKpqQl6vR6dnZ2s+2A5zoPuK7q+NCEzGAyySCINeylH6J2OlclkcOrUKUxMTOAPf/gDTp06xci73d3dWLt2LeMc8HuMLBaoy144HGZcHyKClXO9M5kMfvGLX2Dfvn2sQ6darYbBYEBNTQ1qa2tLohxLBY7jWNh7sSIeExMTOH/+PIuk9fT0YMeOHejt7V1yj5/Ad2yom+F8++z14LZS/vycj8/nw+zsLNt0Kc9vsVjQ3Ny8JISna8W1LNC1Pri0KQWDQXi9XmadUn8Ak8kEm83GmKhL1eFv7v/nesKE+ZjQVyrzmvseeT7BYJBN/ALANkCaaLeYeUi+B0zTKKmbJTWbIg4CGaX5fL4sBgkZAJFIpGTELTWHMplMi7LmdN+l02m43W4MDw8zb5dq3ysqKliN+81GeRYC5d/T6TRSqVRJqo/IUOXyAOlaU0XRxMQEJiYmWE8DjuNYc5el8vyJNE28HiLTUhVNuUD1/bOzsyXH4Zf1LQW/Yb7zoucNuNQivZwRzmQyycZOi0QiNoxrOafDzo0w8vlGN4rbSvnH43FEo1EcOXIEL7zwAmZnZ1krU7vdDpPJhO7ubqxZswYGg2HJz28+b5cWjCzV+chQ9PeVykZEIhFTLjTUw+PxIBaLQavVorq6GjKZDOvXr8fOnTthsViWJARZLBbZRD26IROJBJu4RSE52hzNZjPr7c5v7rPQuc6n+CORCN555x02KREAtmzZgs2bN+Oee+5Z1FQHyZtKpbB//36cOXOGNSyiYUO03mq1Gtu3b4darWbzG26G+1AoFBCLxVhnvZMnTyKdTqO6uhqdnZ3YunUr6x1eLkVEx6Y23W63G6+++iqOHz/OhtfQfU6d/Wgy4dy+9eUAhfvdbjfGx8dZ7XlTUxOqq6vZxMByIZ/P4+zZs7hw4QKmpqaQTqdhNBrR3d2NlpYW1NbWQq/XLxnpLR6PY2RkBH6/H2KxmDUYqq+vL+tMA7FYzHrVU8Rqx44d+MAHPoDu7u4lKe3jg891oK6S1GWwtrYWDQ0NV2wDf70QiUQl8zjovl4uZDIZjI+PY3h4mHXTNJvNN9XKetEH+9wM5pK8qKnDhQsXsGfPHqTTaRQKBRYG0Wg0rH3qck7zI5ByJGuVNkngEgmS2lfS31d6mIhxHA6Hcf78eRb5oAlpGo0GDocDVqt10XKufFn5yp8mlonFYgSDQQwNDTGmLDHwqS0mlYNdi8xzj0lDQqampnD+/HnEYjGIxRfbQq9fvx42m23Rvf5UKoVoNIr+/n4cPHiQddhKJBLw+XwALuXg8/k8uy43Y6Xzo17UzpYGutBoULPZzCIf5ZSfNl7y+M+ePYszZ86wqAOFIiUSCbRaLTQazaLkRuk8MpkM4vE4wuEwm++g0WgWpcqDOr1NTU0hkUgwo468f76Rs9ggwpvf70cikYBIdHGMbkNDA5qbm8u+5/GjaUqlEnV1deju7l4Sx2IuaG/NZrMIBoOIRqNs39Rqtaiqqiqb0Uf7B5XTajQaFsFbLs8/m82yIWnZbBbFYhFKpfKmjN1FUf6Uk0mn00gkEshkMohGoyWDLcg7mduVjH/RibQHXFx8t9vNhojwu7oplUqsXbsW9fX1bCzqUuWiaEOiTSIajbIe0NR/meYZUK2mTCZj9dgGg4FFKWgR5zLcqVNhMBjE4OAgzpw5g6mpKQQCAWQyGUilUvT29qKxsRGdnZ3Q6/WLKj89hFNTU4hEIjh37hxmZmZYdcPs7CwuXLjANieNRoMtW7ZAp9NBoVAwIua1KmnK8yYSCQwNDbHyzlgsBoPBAKVSiY6ODnR1dUGv1y8ayTKfzyOVSmHPnj0YHh7GkSNHMDIyUpJ/JPloUqHVamVtfa8XcyNH6XQa58+fZ70tkskkKioqSurcy638yOBKJpM4deoUBgYGWHtbGm6i0+lgs9ngcDhYg6FyKUO6rsR1OHXqFI4dO4YTJ04gGAyivr4ed9xxB5qbm1lnvXJFPCjSRKOoKyoqUCgUoNPpmMMxH3el3ODvMU6nk1X3AGDTQMtt7HPcxYFVgUAAkUgEUqmUMcxvtGz3Zs6FH/U8cuQIQqEQ2zubmprQ3t5etrJuSuslk0nk83nEYjGkUqklresn0Lr7fD4cOHAA09PTbEKiw+FAbW3treP5U26QLDS32w2fz8c2SeCi16vX61m+TCaTIZ1Os3nYlZWVLKQhk8nYBuB2u9HX14fR0VE2FRC4uOE2NzczxbfUXj9tTC6XC+Pj46y//fj4OPbt2weJRILq6mo2ipWUFY295OfryMKlkLlIJGKbkN/vx9GjRzE+Ps7KjdLpNBQKBcxmM1avXg2bzVb2yV580Lnl83lcuHABo6OjOHXqFJxOJ+u0NjY2huPHj0MsvjiIxGq1Yt26dSz0TVyEa1EQtPFls1lEIhFm+Hg8HiQSCdTU1LBGP1arddHyriRzNBrFnj17cPz4cWb8UOe3yspK6PV6lvum5j8VFRU3ZJ3za8xpjGx/fz9GRkYwOTmJdDrNcpLURKncZVd0/VOpFIaHh9Hf38+GylCTFxpwYjAY2HmUO/9M1+Dw4cPYvXs3nE4nYrEYZDIZGhoaUF9fX/bqHqoqon2GuAzk7fOJm+Wo5JgPc42f8fFxHDp0iFX3UOOhcpf0kvKnsmGRSFSi/JcKJD8Z/06nE319fQAuNt2pra1l6adyKmdycPj9FHK53HVFKssBcjr8fj8OHTrEHD7a800m0w3LXTblT7OGU6kUJiYmMDs7yxR/JBJhTWgAMIISX0nTA051o3q9HplMBmq1GrFYDIlEAseOHUM4HGYhH41Gg/b2djQ2NmLDhg2ora1dsqY+fM+AJosdOnQIFy5cQCqVQjqdhsfjQTAYZI0ZqBkFhYHpdZoMxi9Z5HMA3G433G43BgYG2AAZmpDX2NgIi8WCTZs2obu7e9G5DnxP8OTJkxgcHMTY2BgCgQDrcqZSqWC32yGVSmGz2djUKbpJ6drx0yT8f9MGTiS6cDjMKhumpqbg8/mgUqlgNpvR29uL5uZm1NfX31CXyOuRm9abPCLagK1WKzo7OxmTv7Kyks1zl0qlzHufm8a6GsjbodpmYrjHYjFGbu3t7UV3dzdWrVp11VLLGwHJTC18qbpCrVZDr9dDq9WyEdo3Wt8+H1eGz7Yn4yOVSiEYDLLNr6qqClarFUajEVqtlq1RObx//noHg0HMzMywKIBUKmU8jytNcyvH8amUcnZ2FoFAAIODg0gkEuz5t9vtsFgsZSf8zQeao7IU5cP8MD+1g5+cnMTw8DDTE6QrqKV5ueQnfhLpK34EaClAKe5sNovZ2VmMjY3h0KFDLOSv0Wig1+uxZs0adHR03NBEP6CMyj+fz2NoaAhTU1N47bXXcPz4cTZ8gXJ0/HA21SOTkPw5yEQeOnToEBQKBU6ePIlgMAiz2YyqqioEg0EAgNlsxv3334+mpib09vZCq9UueSgqnU6z8Z4vv/wyDh8+fJlCE4lE8Pv9bAgI3bhSqRSpVAqBQABerxcHDx4EADgcDhZWFIvF2Lt3Lw4fPoxEIoF4PM4GXFitVvzJn/wJmpqa8IEPfAAmk2lJSH7ZbBbhcBh79uzBoUOHSjZv8nS7urqYgtbr9YwFTvMI+ARIPi8CAJtvHwqFEAqFMDg4iAMHDrDBKul0GlqtFkajEdu3b8eGDRtgsVgWvbqBIhAulwtut5utbXNzMzZt2sTuc6PRiFWrVjEveCFDZ24vAPqbPpfJZBAKhRAOh3Hq1Cn4fD5MTU3B6/WyOQmbNm3CBz/4QbYB8O+5coA4HeFwmBn1pPgMBkMJyY6fyrme8+BfFzomGcOzs7NIp9PM6KfpcsRzoOFJ1F6ZGr/cLPjVDZOTk3A6naytKnldcyMd5ewvQB5fOBxGPB7HW2+9hbNnz6Kvrw/RaBRmsxkNDQ1wOByw2+1sIuhigrgdS9HPnq4B7Xl9fX3Yv38/RkZGkM/nmb6gSDFxXcoBilBSqR9/Umi5n6/5wHEXew34/X78+te/xi9/+UuEw2F4PB4oFApYrVY0Nzdj165daG1tvWGi502vIm3ayWQSY2NjGB0dhd/vZ5Y6We7kyZJnTl4d1afzGzeIxWJks1m43W5IpVL2QFMInK9s6CEla/haRgGXA5SDjUQiuHDhAsbHx+HxeNiNSb2Xqe0nbRrJZBLRaBQul4tN41OpVPD5fBgbG4NIJEI0GmXdmwCw/H4ul0M2my0h4tCmQ++Vq6PaQqC2n9Rgp1AoMI+7srISBoOBKUn+5LlYLAalUsmIKrTBUx6dGvcUi0UWJvd6vfD5fHA6nQgGg8wLAsBCr7lcDvF4HJWVlWyK3GJ4YrSWRLahtAx5CeQNkWFG60ET0HK5XMkgJrp/+PMXaC2JMxKNRhnJZ3x8HIlEAlKplEXFyCMJhUJswAnNPCin/CQfXYdcLgeJRMK4GPR8k8cCXGr1S/8m0PPK/z8/tEvXmO6xkZERtu5UVikSiVirXeo9sBiTHGk9+REFMjBJ/lQqxYitc7/L//eVzosf6aCGSdFoFJlMBlNTUwgGgxgdHcX09DSr76f9lF/3vdiGP4235TdQu9k0E1+5ki6hZyOfz8PpdMLr9eLs2bOYnp6Gz+djzmNjYyPsdntZSZdisRh6vR4GgwGhUAipVAqzs7Po6+tjnCLa1272etOeQs9UoVBAPB5HNptlvB5KI+dyOday/p577kFNTU2JQ3UjuGktmc1mMTMzA4/Hg927d+Ps2bOM9EaEP4VCwVrNNjU1oVgs4vTp04hEIqycgup2SVEWCgWMjo5CLL44w9psNpd4jCKRiJU+eb1eDA0NsSlb5Sx5WQi5XA5TU1OYmprCyy+/jDNnziCVSgEADAYDbDYbUqkU4vE46/EvkUjg8Xjg8/lw/vx5AEAkEkE0GmUKE7i8YxVtevyBNhaLBUajkVnHPp8PxWKRdXmjjWsx5CZuA40qJsOmsbERHR0dLEQsEolYbnRsbAzT09MIBAKsMVOxWEQsFmMdxNxud0nnMlKC1LGRppkpFAqW6woEAqwhRy6XWzTOR7FYZKWm/G6K/OE61GAIAPNUk8kkZDIZU2Bk9ESjUYyOjjKvlt8HYHp6GrOzsyxdJpfLodPpUFlZiaamJpjNZrZJ0DzzhoYGFItFaDQaWK3Wsq09DZKhaAxwsQ6aqg6IdEgGaDAYhFQqLWH88/PhFLrnt+Il5RcIBODz+TAzM4PJyUlEIhEMDQ1BJpOhs7MTKpUK4XAYEomEeUb19fWsyyf1nS+HAUA11PzzJyIyGQDxeBwulwsajYaluei6z+3ENjfSw1ccZLxEIhHWvGvv3r0Ih8MYGBhgbYUTiQSrb6cW2sT4Lldv+4XAcRwOHz6MTCaDtWvX4oEHHmDVVTfqbNG6U+MiIg9HIhFMTEwgHo/j+PHjmJycZOkvSos6HA5s374dzc3NsNvtN8yrmQuJRILGxkbEYjEcP34cqVQKb731Fvbt24eHH34Yn/3sZ2EymeBwOG66z0E2m0U8Hkc8Hsfk5CR8Ph/279/PjB2KcqXTaeh0OjQ0NGD9+vX40pe+BJPJxJq43agRctNXizZpCl9TfTfHcdBqtTCZTFCpVNDpdNDr9airq0OxWGSNQWgz5Ic+KU9KXgQpg7nh03w+j3g8DrlcjrGxMRaKowdjMYl//LIzCs3Reel0OjQ2NrIwMHV4E4lESCaTyOVymJmZQSQSQTweX3AONW0m/EoIuh5kLMViMcjl8hLFyTcSFiMXSV4tP0cvlUqhVqtZHpQeDAqJUt6W+nKTlxONRjE5Ocneo2gQn+FcKBTYiGJ+9UexWGS594qKCsZ2XyzGP1CqFPgGGp/MSt3wgEsTCCn9xa8M8fl8SKVSbF44eS9TU1NsRHEkEoFCoWDkIypvIm+BesyTIVhOz5fkJRnIgCci3FzvlgxRUph8Yif9DvWkp5A6RY4AwOl0wufzsfQGcT1kMhnq6uoAgEUN5HI5y31SGRYdr1xrT2tN4X6RSMSijPwGW2Sg8vsa8OXmk3b5qU86T4qQ+v1+5uFTtI+qOkjx03NmMBhYvp+ia+W854msa7FYmKFJVVuhUIg1sSKlO/e68+WmZ2Pu+ZFBHA6HGadhZGQEsViMVfO4XC4EAgEoFAoolUrWzKq9vR319fWoqqoqK9FTLBajuroaDocD586dA3Cp6Y/L5cLExATbb/jXfW777rkNeYBS8iY5E8FgkBm5fr+fVTLNzs4iFAox3Wm327Fq1So0NjayMco3S6wtS9g/nU6zfFwgEIBarYZMJsOjjz6KRx55hHlISqUSFosFhUIBx44dw+TkJPbt24djx46xUApZs9TQoVAoMG+RHkJSDpFIBEePHoVcLsc777wDhUKBLVu2oLGxETt37kRbW9vNindViEQiFoKnMO7mzZvx2GOPsfCgXC6HVqtFsXhxCp/P58Pu3bsxNjbGUhp8zwC4dBNqNBqEw2G2yZMB0d/fD7VajUQiAa1WizfeeAPZbBYbN27Ehg0b0NHRgY6OjrJ7A3O9FgrBabVa1NTUoLu7m4UjgUtpoTNnzsDn88HtdmN0dJQR2Ci3S0YFx3GMD0HXg5QcKUOZTMbKXY4fP45YLIa77roLq1atwj333AObzVZWmQGwcL5Go4HBYCiZYU4yUB1wsVhkg6YoLB2Px5FOp9k9kUgk4Ha7WbosnU6zjXRiYgJer5cpWeotH4vF0NraCgAYHR2Fy+ViIUqDwQC9Xl/2ki++Auzu7oZcLmdTNKlihypyUqkUpqenmZIiL42fn/X5fBgcHEQsFsPExARSqRS0Wi2Te3Z2tuS6ZTIZVFZWIpPJQC6Xs3RiV1cX6urqsH79etx1112s3K1ccw3IeFEoFKirq8Ps7CxLQ+3Zswd9fX2wWCxoaGhg7YzVajXb4OkeIWfF7/djZGSEKVCO41iUanh4mEWBEokESyeSgcRXJHa7nXFMnnzySeZUlbvKQy6X40Mf+hA6Ojpw8OBBDA4OMk6V3+/HkSNHoFKpoNfrGf+ILz+lIqLRKAYHB1lUihyBfD6Po0ePssgGOS/UqpwcGYVCAZvNhlWrVqG5uRltbW245557oNFoGM+nnO2s5XI5HnnkEWzatAkejwejo6Psvb6+PnzrW99CVVUVmpqaUFlZicbGRuh0OlitVnYPVlZWMoUOgHXdjEQiSKfTGB0dxczMDNOZtBdkMhlEIhGWEpfJZOjt7cX69euxevVqbN++nVUUlcPYKUtynMpwyJKjPDd5+qTQKfxfLBbhcDiYRwdcUip8y5luaBKUFCltLIVCgfWdJiJcdXU1stksent7yyHaVUE9Cyi0WyxenDNAzWaIkUqeKzXnoM/PVx5Esup0OphMJubRkcVIZV+5XA5ut5vNso/FYsxaXwwFSODngOka0ENItf7E7eDngekeIRLP3IgHrTvxNmjsM3ldfPYvKX+3241AIICamhrY7fZFm3LGz7FSqopSULQetKaZTIZt4rFYjIX3MpkMu3aUEkokEozBns1mIZfLkU6nS9qX8q8dbQzU6Y+eLQCLxsSm9TUYDDCbzSwfTZ4wyUzDhvhRIVpHujf8fj9cLhdisRjOnj2LVCoFvV4PhUKB6elp+P3+y54Jfk6Y9oWqqirU1taitrYWWq227H316fjEsbDZbIhEIojFYohGo2ztgIubOzXCISVNaRpS3H6/HwMDA2wuSbFYRE1NDZRKJY4dO8YUBR2b3/WT1lQkEsFisaCmpgb19fWw2+3MYSp3ik8sFrMoLRG5aWw1peikUil8Ph/jN2k0GrbupAh9Ph8GBgbY+1KplOmIs2fP4r333mNGPR8kNxk3NpsNDQ0NWLVqFZqamkry7uWOeBgMBnaPaTQaxsOIx+O4cOECKioq4Pf7odPpkEqlYDAYEIlEYDKZYDQaodfr4fP5cOHCBQBAdXU1S1Ulk0kMDQ1heHgY4XCYPS80opjuYYpi1tbWsjJ2Si2VK8pRFuXPD90TGSeXy2Hfvn2IRqMwGo1oaGhg5JREIoG3334bTqeT9Y7md8Gj0NZdd93Fch06nY5VBjidThw/frxkuhUpyNnZWUgkEsTj8XKIdkWQkiZOAt3UdMPT2Fny4HK5HDweD+LxOCYmJhhZQyKRsPI52mx0Oh3+zb/5N2hpaYHf72dhYiKAOJ1Odvy557OYtbhUvkeGG8dxrMf84OAguw5ECtNqtWxYBqVoiKVMufO5v19dXc287IqKClZal0wmWXMTYjfrdDoUCgWsXbsWDz74IBoaGhaF60BGiVarRWNjI+LxOAYGBuD3+/Hee+9hcnISWq0WZrMZwCWF5fP5WNiWvFeZTIZMJoNYLMY2FeCSwiHjjW/4kgKmVAmVw+7YsQObN29Ge3s7y/suludPkbTh4WG4XC7s3bsX586dg06ng8ViAcddGitMhjuFTA0GA3Q6HUKhEPr7+xkvhMKfRKAjJUqRFZFIxJRqJpNBS0sLOjs70dPTg56enkULe5Pi1Wg0ePLJJ3Hvvffi8OHDcDqdbG2j0SjrsskfoywSiTA0NIRUKsXWLhQKYWxsjPFYSLGRQqVjikQido8ZDAZ0dnay9IZarWY57qqqKlYxtRgpLplMhvb2dthsNtbaWCKRMC/9xIkTAC5xD6iUl2SprKxERUUF4vE4i3BSWoT2a5fLxchsfKjVavT29qK6uhqrVq2C3W5Ha2sr7HY7m1a6WI2G+I3YPvOZz+Chhx7CkSNHcPz4cczOzrLx8eSZT01NQalUspJXihaRN0+RYTKUC4UC/H4/otEo019yuRxWqxVarRYbNmyA0WjEmjVrUFtbi+rqaphMpkWpKimL8qcFpxOjh2NgYACBQAANDQ2IRqMQi8UsBPqv//qvmJ2dZZ3J+HlEquOsr6+HyWTCHXfcAavVCp/Ph1AoxMiARKiiNIBIJEIsFmPlYIsN2iCoyxrxHYjEFolEMDMzwxrUUOkW5bdJSZMXTRu8RqNBdXU1urq6sGbNGoRCIZYPoxpn6rLGj5iQIbGYdbg0OU+n07H8O4Vip6enMT4+zkhscrmclR+Gw2FkMhmIxWLodDqWNwdKp/xJpVLU1NSwPJfJZILT6cTg4CAj0ZHhQRUG1N+eQrCLITs/DEzDk0ZHR1mnw+npadb2lYxXjrvYlZLy/fl8nkUP+OkOuq7E+DUajTAYDJdVEVBonZQHKYne3l4WQi73hsg3PMjrB8AiDxMTEywHTbLRPSKRSOB2u1lEyuFwwOfzYXJysoTxT+dLaT1q8kWGBz0fhUIBVVVVqKqqYg1eaFNcDLnJkG5tbWXGakVFBSthdrlcrOGY3+9nNf8AMDIyAo/Hw+7zZDIJv9/PIgNUSjbfMVUqFRobG2E0GtHb2wur1Qqz2czY3lTTv5iDdeg5ValUMJlMrMIkk8nA7XYzhb4QVCoV1Go1i3pdLRrHXzsiDzscDmzcuBE1NTWoqqpi4e7FWG/+eVCaqr29HU1NTVAoFKy8nEoNKeJM+zCfn7XQdZnvM5QaMxgMsFqtWL16NWpra7Fp0yaW1lisdb5p5S+RSGCxWNiDIZFIGPmG2hKSMiCrj0KcKpWKbW5E0GtoaMCuXbtgNptx9913s3piGheay+WwevVqdHZ2Ynh4GD/72c+QzWYZGemRRx5Bd3c3Ojo6ynF9rii3wWBgJRgUCpbJZBgcHGReEL+BD3kL/GYhlC+vrKyE3W6HTqdjVu/atWthMBhgsViQy+XQ0tKC1atXY3p6GlarFZlMhlmDlI/duHEj1q9fD6PRuGgPB5HqHnzwQTQ1NeHw4cOMyU9GGbHUzWYzK/XjT72rr6+Hw+FAKpWCx+OBWq1GW1sb9Ho91q1bx4iiCoUCOp0ODocDwWAQVqsVhUKBKZdNmzZBIpFg48aNqKqqWrQWnLQxq9VqPPDAA1i3bh1qamowMTGBqakpptCoyoEqGojgSSkCagJESpU/Brerqwtms5ndy1QKRCQzMnZIIUqlUrS2tsJisSyK4ieQomlqamJ9Ntra2lhExu/3Y3p6mpERydjjRwGpQoX2CErRqdVqmEwmlsskBrNMJmOldGLxxSEz1DyKSF9kRC6W3ACYMpZKpaxHRzQaZVUfCoUCmUwGw8PDAC616CaOB0UDyGikyh+tVovu7m6m2EOhEOsjr1ar2X3Q3d3NvGgivZWL13Al8FOvu3btQnt7O8LhMMLhMJxOJ/r7++H3+zE4OHiZEUPXgYxgSoHS79Ia0/NNLcCJQ6DRaLBx40aW9iTZF1Ppzyc/KevVq1dDr9fD5XJh8+bNCAaDOH36NOuAmMvl2P1Ize3UajUsFgvEYnEJ+VcsFrNKLYVCwTqD1tTUwGAwYN26ddDpdMyJWMwKjptW/pQLzGQyjLBEi0VEtUAggImJCfYdsVjMBryQp0A5nI6ODnz4wx9GdXU1Wlpa5t3M6+rq0NnZiVOnTuHFF19EPp9HRUUFbDYb25gXuw8zhZzp2PwblKYvUdki8QIoKpDP51lIu7KyElarFTU1NVi/fj3q6upwzz33sNAT/yG32+1oaWlBIBBgBBLKO7a1tcFqtaKlpYU9bIsV/pbJZNBoNNi1axfa2trYeF16KGjNyQMko4/yhblcjhl60WgUY2NjMJlM2LRpE+rq6tDe3g6TycS8haqqKtTU1CCRSKC6upox46VSKex2OzQaDQunlzMsxgcpf6VSidWrVyOfz8NoNGJwcBDvv/8+isUiK+O8Uicw6kimVCrZ5tbb2wuLxYKtW7eW1FBTKozqvkkJUp6f3152sUCbrUwmYyHIBx98EN3d3RgbG4PT6cTQ0BAGBgZKvHm+vNTfw2AwsBA2GT2k4CjaQXly4jbE43GmMIhvcT3tocshO3n0NpuNkdgikQhCoRDr3eFyuUrk57dapnuTKmEaGhqg0WjQ2dmJuro6ZhjSPAgALExutVpL5qAsheInUNSio6MD7e3trEeJ0+mEWq0uKUmdC+Jg6HQ6tLW1MSUmFothNBqZ0qNOlfX19WwvpPLF5RgZzAcdn7glyWQSGzZsgMfjwSuvvAKfzweXy4VkMslY+Hq9HslkEmazGWvWrIFYLEYqlWLVWiKRCK2traivr4der2fDuCwWC5RKJYxG45K1p79p5c/3BO+77z40NzeznvNEjAmHw4y9zHEcFAoFenp6UFVVxR4sqgagMZnEDJ0PtAnX1dXhz//8z5FIJNjGUltbuySDfUiha7VabNy4kXU6Gx8fZ+RGSkHQJD6RSIS2tjbGjKfBKJTDa25uZuSl+XJ5/JKrnp4eFkEBLipItVrNQoKLrRAkEgmsVitUKhV27tyJhoYG9n40GsX09DTLcUkkEmbp0rm1tLRgzZo1iMfjrBvehg0boNVqSx584oCQV1lTU8PSQtTYhr85LoVXQPeW1WplZE673c7awBIJDkCJzKTEqDSNvLy2tjZUVFSwTZEUW7FYZGQ5km+u57cU8s6VmwwQvV6PpqYm1NbWwmAwsN4etGYi0aU+D7W1tWhtbWXNbPjKlDZAIjlRVIzfP5/Pj1mqdeaDqlrob0pzUK8Kr9d7mfIn45e4GNQ5zmg0oqKigj3vANi+SHM55q79YjbuuhbZSSbqaXD33XfD7/fDarWy/iYEfsUORTH4/Q9oj6OqMIp+EqdmKQ2c6wEZMyKRCNu3b0c4HGYtd81mMzQaDVwuF1avXs10kUgkYiRliqY4HA5mANM1mNsieykg4q6SjKmpqcH09PQVf4SY6LFYDMlkEmfOnMHo6CgjuU1MTODAgQNIpVLMY3r66afR3t7OmraQV8EnsV3pBuB3xQIu3aDlCgNej9zU7OTQoUM4f/48rFYrrFYrRkdH0dfXB4/Hg8OHD0Mmk+FjH/sYY2+azWZYLBZUVVVdJvdC589vgcr/P8l8s7Jfi9x82YvFIsv5JxIJxGIxhMNhVq87MzPDQmc0bpnyiJSa4Fd4kPxXOi4fJOtSrPfc8+D3IeCPW87lcswbIlIXycgP3fLZyldbO/46l0Newo3IPbdVMXFaaJIlx3FMRopQ0Ehn4JIRMVfm+WSa20p1ueSmc+H3G+F3HaT0Hn2Gz9omfsrc+5yv0BdqGbvc680Hf8351+JKn+PLC1wux1z5yi0v4Wbk5mOu3HOfS+K7kaPGX1/+524FuctC+KMFJhYmlSTQ5kj5S8p9VlZWor29nYV4yPK5nnAHbRpLPcFv7jlQBEAkErFGJJT/kcvlyGazcDgcjAlKjRqsVisrB7qeXBbfyFlO8JU23zuh9I1YLGYsb5FIxOpiaZ35cl+PLMvl/czFXKOFNnJKBdF9SZY9rS95RNfbGvVWklskKh0gxHEXJ93RpkfcBv6ewC9Zu56oxa0iN1BqrPCNUJKL32SJvFcqfZ0r91LlrsuJa1VQc5X/7SbnlUDyLOSgkJFHe8KtjLI1waeQJMdx6OjoKGmwQ8QlPuY2ZrjVL9RCILkVCgXWrl2LNWvWsBuks7MTd999d0mXK5J7Pk/gdgOdN/ErVCoV2wAbGxsBXIpQzG3zyleetytIfkpJSKVS9gzM18mN/j/f67cb+OdOPB+O41it93xy3u4yE/j3MO1bKpWqxCBYSOY/BvmvhsXyYm8H3E57Wlkn4PDDWSsJK1VuAt94WYnXYCVvdsAfj1K/XqxUuQVcGbfLPXF7upwCBAgQIECAgBuGoPwFCBAgQICAFQZB+QsQIECAAAErDILyFyBAgAABAlYYBOUvQIAAAQIErDAIyl+AAAECBAhYYRCUvwABAgQIELDCICh/AQIECBAgYIVBUP4CBAgQIEDACsNVB/vQ1K2VBpfLBYfDsdynseQQ5F5ZEOReWRDkXlnw+XzIZDLzvndV5S9AgAABAgQI+OOCEPYXIECAAAECVhgE5S9AgAABAgSsMAjKX4AAAQIECFhhEJS/AAECBAgQsMIgKH8BAgQIECBghUFQ/gIECBAgQMAKw/8HWSznzZ39pGIAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -384,10 +383,10 @@ "in the meta distribution $\\mathcal{D}$ during training\n", "\n", "The following instantiates\n", - "{class}`~ott.neural.models.MetaInitializer`,\n", + "{class}`~ott.neural.initializers.meta_initializer.MetaInitializer`,\n", "which provides an implementation for training and deploying Meta OT models.\n", "The default meta potential model for $f_\\theta$ is a standard multi-layer MLP\n", - "defined in {class}`~ott.neural.models.MLP`\n", + "defined by the ``MetaMLP`` below\n", "and it is optimized with {func}`~optax.adam` by default.\n", "\n", "**Custom model and optimizers**.\n", @@ -438,7 +437,9 @@ "outputs": [], "source": [ "meta_mlp = MetaMLP(potential_size=geom.shape[0])\n", - "meta_initializer = models.MetaInitializer(geom=geom, meta_model=meta_mlp)" + "meta_initializer = meta_initializer.MetaInitializer(\n", + " geom=geom, meta_model=meta_mlp\n", + ")" ] }, { @@ -451,7 +452,8 @@ "Meta OT models have a preliminary training phase where they are\n", "given samples of OT problems from the meta distribution.\n", "The Meta OTT initializer internally stores the training state\n", - "of the model, and {meth}`~ott.neural.models.MetaInitializer.update` will update the initialization\n", + "of the model, and {meth}`~ott.neural.initializers.meta_initializer.MetaInitializer.update`\n", + "will update the initialization\n", "on a batch of problems to improve the next prediction.\n", "While we show here a separate training phase, the update\n", "can also be done in-tandem with deployment where the\n", @@ -501,7 +503,7 @@ "Now that we have trained the model, we can next deploy it anytime we\n", "want to make a rough prediction for new instances of the problems.\n", "While in practice, the model can be continued to be updated in deployment\n", - "by calling {meth}`~ott.neural.models.MetaInitializer.update`,\n", + "by calling {meth}`~ott.neural.initializers.meta_initializer.MetaInitializer.update`,\n", "here we will keep the model fixed so we can evaluate it on test instances." ] }, @@ -516,7 +518,7 @@ "prediction of the solution to the transport problems from above,\n", "which are sampled from testing pairs of MNIST digits that\n", "the model was not trained on.\n", - "The initializer uses the Meta OT model in {meth}`~ott.neural.models.MetaInitializer.init_dual_a`.\n", + "The initializer uses the Meta OT model in {meth}`~ott.neural.initializers.meta_initializer.MetaInitializer.init_dual_a`.\n", "This shows that the initialization is extremely close to the ground-truth coupling." ] }, diff --git a/docs/tutorials/neural_dual.ipynb b/docs/tutorials/neural_dual.ipynb index e268485c1..2021eebfb 100644 --- a/docs/tutorials/neural_dual.ipynb +++ b/docs/tutorials/neural_dual.ipynb @@ -7,12 +7,12 @@ "# Neural Dual Solver \n", "\n", "This tutorial shows how to use `OTT` to compute the Wasserstein-2 optimal transport map between continuous measures in Euclidean space that are accessible via sampling.\n", - "{class}`~ott.neural.solvers.neuraldual.W2NeuralDual` solves this\n", + "{class}`~ott.neural.methods.neuraldual.W2NeuralDual` solves this\n", "problem by optimizing parameterized Kantorovich dual potential functions\n", "and returning a {class}`~ott.problems.linear.potentials.DualPotentials`\n", "object that can be used to transport unseen source data samples to its target distribution (or vice-versa) or compute the corresponding distance between new source and target distribution.\n", "\n", - "The dual potentials can be specified as non-convex neural networks ({class}`~ott.neural.models.MLP`) or an input-convex neural network ({class}`~ott.neural.models.ICNN`) {cite}`amos:17`. {class}`~ott.neural.solvers.neuraldual.W2NeuralDual` implements the method developed by {cite}`makkuva:20` along with the improvements and fine-tuning of the conjugate computation from {cite}`amos:23`. For more insights on the approach itself, we refer the user to the original sources." + "The dual potentials can be specified as non-convex neural networks {class}`~ott.neural.networks.potentials.PotentialMLP` or an input-convex neural network {class}`~ott.neural.networks.icnn.ICNN` {cite}`amos:17`. {class}`~ott.neural.methods.neuraldual.W2NeuralDual` implements the method developed by {cite}`makkuva:20` along with the improvements and fine-tuning of the conjugate computation from {cite}`amos:23`. For more insights on the approach itself, we refer the user to the original sources." ] }, { @@ -24,7 +24,7 @@ "import sys\n", "\n", "if \"google.colab\" in sys.modules:\n", - " !pip install -q git+https://github.com/ott-jax/ott@main" + " %pip install -q git+https://github.com/ott-jax/ott@main" ] }, { @@ -47,8 +47,8 @@ "\n", "from ott import datasets\n", "from ott.geometry import pointcloud\n", - "from ott.neural import models\n", - "from ott.neural.solvers import neuraldual\n", + "from ott.neural.methods import neuraldual\n", + "from ott.neural.networks import potentials\n", "from ott.tools import sinkhorn_divergence" ] }, @@ -58,7 +58,7 @@ "source": [ "## Setup training and validation datasets\n", "\n", - "We apply the {class}`~ott.neural.solvers.neuraldual.W2NeuralDual` to compute the transport between toy datasets.\n", + "We apply the {class}`~ott.neural.methods.neuraldual.W2NeuralDual` to compute the transport between toy datasets.\n", "Here, we aim at computing the map between two toy datasets representing both, source and target distribution using the\n", "datasets `simple` (data clustered in one center) and `circle` (two-dimensional Gaussians arranged on a circle) from {class}`~ott.datasets.create_gaussian_mixture_samplers`.\n", "\n", @@ -95,18 +95,7 @@ "outputs": [ { "data": { - "text/plain": [ - "(
,\n", - " )" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAoAAAAFeCAYAAAAVEa7hAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABj/0lEQVR4nO3deXydZZ3//9d9nz0nJ3vSNEnXpKWlhbJXoJa9FQsiigiugDA4Oo7Oz2V05uHCVx2GUdxnFEZFRpGlLAJ2EFDQshQQKaWUQps2XZI0+0nOSU7Odt/X74+QM0mbLpQ2aXPez8fDh+Tufc657/vc574/93Vdn89lGWMMIiIiIpI37IneABEREREZXwoARURERPKMAkARERGRPKMAUERERCTPKAAUERERyTMKAEVERETyjAJAERERkTyjAFBEREQkzygAFBEREckzCgBFJqmrrrqKmTNnTvRmiIi8Ldu2bcOyLH71q19N9KZMKnkbAK5fv57LLruMGTNmEAwGqa2t5YILLuDHP/7xRG+aiMhBsyzrgP735z//eaI3dZRnn32Wb3zjG/T29k70pojkBe9Eb8BEePbZZznnnHOYPn061113HdXV1ezcuZPnnnuOH/7wh3zmM5+Z6E0UETkov/71r0f9/T//8z88/vjjeyyfP3/+eG7Wfj377LPccMMNXHXVVZSUlEz05ohMenkZAH7729+muLiYv/71r3tcaDo6OsZ9ewYGBgiHw+P+uflAx1byzUc+8pFRfz/33HM8/vjjeyw/GMYYkskkoVDobb+XvDU69nKo5WUX8JYtW1iwYMGYT5lVVVWj/s5ms3zzm9+kvr6eQCDAzJkz+Zd/+RdSqdSo9SzL4hvf+MYe7zdz5kyuuuqq3N+/+tWvsCyLv/zlL3zqU5+iqqqKurq63L8/8sgjnHXWWUQiEYqKijj11FP57W9/O+o9n3/+ed71rndRXFxMQUEBZ511Fs8888x+9/vPf/4zlmVxzz33cMMNN1BbW0skEuGyyy6jr6+PVCrF5z73OaqqqigsLOTqq6/eYz8BfvOb33DyyScTCoUoKyvjiiuuYOfOnaPWeeqpp/jABz7A9OnTCQQCTJs2jX/6p39icHBw1HptbW1cffXV1NXVEQgEmDp1Kpdccgnbtm07LMf2ne98J+FwmEgkwooVK9iwYcN+j1smk+GGG25gzpw5BINBysvLWbJkCY8//nhunVdeeYWrrrqK2bNnEwwGqa6u5pprrqG7u3vUe33jG9/Asiw2bdrERz7yEYqLi6msrOSrX/0qxhh27tzJJZdcQlFREdXV1dx8882jXj/8Hd599938y7/8C9XV1YTDYd7znvfs8R2MxXVdfvCDH7BgwQKCwSBTpkzh+uuvJxqNjlrvxRdfZPny5VRUVBAKhZg1axbXXHPNft9fjg633XYb5557LlVVVQQCAY499lh++tOf7rHezJkzueiii3j00Uc55ZRTCIVC3HLLLQBs376d97znPYTDYaqqqvinf/onHn300TG7l/d3zfrGN77BF7/4RQBmzZqV66YeeR3Y3dlnn83ChQt55ZVXOOussygoKKChoYF7770XgL/85S8sXryYUCjEMcccwx//+Mc93qOlpYVrrrmGKVOmEAgEWLBgAb/85S9HrZNOp/na177GySefTHFxMeFwmHe+8508+eSTe7zfXXfdxcknn5y7fh933HH88Ic/HLWflmXt8brha9fI/d3Xse/t7eVzn/sc06ZNIxAI0NDQwE033YTruns9XsMO5Lf93e9+lzPOOIPy8nJCoRAnn3xy7riOZFkW//AP/8DKlSs59thjCYVCnH766axfvx6AW265hYaGBoLBIGefffYe3+fwd/i3v/2NM844I7c9P/vZz/a7HwCvv/46l112GWVlZQSDQU455RQeeuihUescyPU7X+VlC+CMGTNYs2YNr776KgsXLtznutdeey233347l112GZ///Od5/vnnufHGG9m4cSMPPPDAQW/Dpz71KSorK/na177GwMAAMHQRuOaaa1iwYAFf+cpXKCkpYe3atfzhD3/gQx/6EABPPPEEF154ISeffDJf//rXsW07dzF/6qmnOO200/b72TfeeCOhUIgvf/nLNDY28uMf/xifz4dt20SjUb7xjW/w3HPP8atf/YpZs2bxta99Lffab3/723z1q1/l8ssv59prr6Wzs5Mf//jHLF26lLVr1+aC6pUrV5JIJPj7v/97ysvLeeGFF/jxj39Mc3MzK1euzL3f+9//fjZs2MBnPvMZZs6cSUdHB48//jg7duw46ASGsY7tr3/9az7+8Y+zfPlybrrpJhKJBD/96U9ZsmQJa9eu3ednfeMb3+DGG2/k2muv5bTTTiMWi/Hiiy/y0ksvccEFFwDw+OOPs3XrVq6++mqqq6vZsGEDt956Kxs2bOC5557b46L/wQ9+kPnz5/Pv//7vrFq1im9961uUlZVxyy23cO6553LTTTdxxx138IUvfIFTTz2VpUuXjnr9t7/9bSzL4p//+Z/p6OjgBz/4Aeeffz4vv/zyPlsIrr/+en71q19x9dVX84//+I80NTXxk5/8hLVr1/LMM8/g8/no6Ohg2bJlVFZW8uUvf5mSkhK2bdvG/ffff1Dfhxx5fvrTn7JgwQLe85734PV6efjhh/nUpz6F67p8+tOfHrXuG2+8wZVXXsn111/PddddxzHHHMPAwADnnnsuu3bt4rOf/SzV1dX89re/HTMoOpBr1vve9z42bdrEnXfeyfe//30qKioAqKys3Od+RKNRLrroIq644go+8IEP8NOf/pQrrriCO+64g8997nN88pOf5EMf+hDf+c53uOyyy9i5cyeRSASA9vZ23vGOd+SCmMrKSh555BE+8YlPEIvF+NznPgdALBbj5z//OVdeeSXXXXcd8XicX/ziFyxfvpwXXniBE044ARi6Blx55ZWcd9553HTTTQBs3LiRZ555hs9+9rMH9T2NdewTiQRnnXUWLS0tXH/99UyfPp1nn32Wr3zlK+zatYsf/OAHe32/A/1t//CHP+Q973kPH/7wh0mn09x111184AMf4Pe//z0rVqwYte5TTz3FQw89lDtvbrzxRi666CK+9KUv8V//9V986lOfIhqN8h//8R9cc801PPHEE3t8h+9+97u5/PLLufLKK7nnnnv4+7//e/x+/z4fOjds2MCZZ55JbW0tX/7ylwmHw9xzzz28973v5b777uPSSy8FDuz6nbdMHnrssceMx+MxHo/HnH766eZLX/qSefTRR006nR613ssvv2wAc+21145a/oUvfMEA5oknnsgtA8zXv/71PT5rxowZ5uMf/3ju79tuu80AZsmSJSabzeaW9/b2mkgkYhYvXmwGBwdHvYfrurn/nzNnjlm+fHlumTHGJBIJM2vWLHPBBRfsc7+ffPJJA5iFCxeO2tcrr7zSWJZlLrzwwlHrn3766WbGjBm5v7dt22Y8Ho/59re/PWq99evXG6/XO2p5IpHY4/NvvPFGY1mW2b59uzHGmGg0agDzne98Z5/b/XaPbTweNyUlJea6664b9fq2tjZTXFy8x/LdLVq0yKxYsWKf64y1v3feeacBzOrVq3PLvv71rxvA/N3f/V1uWTabNXV1dcayLPPv//7vueXRaNSEQqFR+zj8HdbW1ppYLJZbfs899xjA/PCHP8wt+/jHPz7q+3vqqacMYO64445R2/mHP/xh1PIHHnjAAOavf/3rPvdZjg6f/vSnze6X+rHO1+XLl5vZs2ePWjZjxgwDmD/84Q+jlt98880GML/73e9yywYHB828efMMYJ588kljzFu7Zn3nO98xgGlqajqg/TrrrLMMYH7729/mlr3++usGMLZtm+eeey63/NFHHzWAue2223LLPvGJT5ipU6earq6uUe97xRVXmOLi4twxymazJpVKjVonGo2aKVOmmGuuuSa37LOf/awpKioade3Z3fDvf3fD166R+763Y//Nb37ThMNhs2nTplHLv/zlLxuPx2N27Nix188/0N/27udHOp02CxcuNOeee+6o5YAJBAKjtvuWW24xgKmurh51jfrKV76yxz4Of4c333xzblkqlTInnHCCqaqqyt2nmpqa9vj+zjvvPHPccceZZDKZW+a6rjnjjDPMnDlzcssO5Pqdr/KyC/iCCy5gzZo1vOc972HdunX8x3/8B8uXL6e2tnZU8/H//u//AvD//X//36jXf/7znwdg1apVB70N1113HR6PJ/f3448/Tjwe58tf/jLBYHDUusOtRy+//DKbN2/mQx/6EN3d3XR1ddHV1cXAwADnnXceq1evPqAugI997GP4fL7c34sXL8YYs8fT1uLFi9m5cyfZbBaA+++/H9d1ufzyy3Of3dXVRXV1NXPmzBn19D+yFWpgYICuri7OOOMMjDGsXbs2t47f7+fPf/7zHl2Qb8dYx7a3t5crr7xy1HZ7PB4WL148ZqvFSCUlJWzYsIHNmzfvdZ2R+5tMJunq6uId73gHAC+99NIe61977bW5//Z4PJxyyikYY/jEJz4x6nOPOeYYtm7dusfrP/axj+VaMgAuu+wypk6dmjtnx7Jy5UqKi4u54IILRh2Hk08+mcLCwtxxGG7F/f3vf08mk9nr+8nRa+T52tfXR1dXF2eddRZbt26lr69v1LqzZs1i+fLlo5b94Q9/oLa2lve85z25ZcFgkOuuu27UeofqmrU3hYWFXHHFFbm/jznmGEpKSpg/fz6LFy/OLR/+7+HfkjGG++67j4svvhhjzKjfw/Lly+nr68v9bj0eD36/HxgaQtHT00M2m+WUU04Z9dsuKSlhYGDgkHYtjnXsV65cyTvf+U5KS0tHbff555+P4zisXr16r+93oL/tkedHNBqlr6+Pd77znWNey84777xRPSjDx/r973//qGvU7t/BMK/Xy/XXX5/72+/3c/3119PR0cHf/va3Mbevp6eHJ554gssvv5x4PJ47Bt3d3SxfvpzNmzfT0tKS2+f9Xb/zVV52AQOceuqp3H///aTTadatW8cDDzzA97//fS677DJefvlljj32WLZv345t2zQ0NIx6bXV1NSUlJWzfvv2gP3/WrFmj/t6yZQvAPrukh0/gj3/843tdp6+vj9LS0n1+9vTp00f9XVxcDMC0adP2WO66Ln19fZSXl7N582aMMcyZM2fM9x0ZVO7YsYOvfe1rPPTQQ3sEd8M3mEAgwE033cTnP/95pkyZwjve8Q4uuugiPvaxj1FdXb3PfdiX3Y/t8HE799xzx1y/qKhon+/3//7f/+OSSy5h7ty5LFy4kHe961189KMf5fjjj8+t09PTww033MBdd921RyLR7jdUGPs7CAaDua6vkct3H0cI7PEdWJZFQ0PDPsdMbd68mb6+vj3GuQ4b3u6zzjqL97///dxwww18//vf5+yzz+a9730vH/rQhwgEAnt9fzl6PPPMM3z9619nzZo1JBKJUf/W19eXuybAnr8nGBr/V19fv8fQht2vlYfqmrU3dXV1e2xDcXHxmNcyIHct6uzspLe3l1tvvZVbb711zPce+Tu+/fbbufnmm3n99ddHBU4jj82nPvUp7rnnHi688EJqa2tZtmwZl19+Oe9617sOat92f/9hmzdv5pVXXtlr9/i+EhkP9Lf9+9//nm9961u8/PLLo8aBjzV+8a3cT4A97gc1NTV7JOrNnTsXGKr/N/wgPVJjYyPGGL761a/y1a9+dcx97ejooLa29oCu3/kqbwPAYX6/n1NPPZVTTz2VuXPncvXVV7Ny5Uq+/vWv59YZ66Q/UI7jjLn8YDK5hp+Uv/Od7+TGneyusLBwv+8zsnXsQJYbY3Kfb1kWjzzyyJjrDn+24zhccMEF9PT08M///M/MmzePcDhMS0sLV1111agn/s997nNcfPHF/O53v+PRRx/lq1/9KjfeeCNPPPEEJ5544j7340CP7fDn/frXvx4zsPR69/0zWLp0KVu2bOHBBx/kscce4+c//znf//73+dnPfpZrybv88st59tln+eIXv8gJJ5xAYWEhruvyrne9a8wWjrGO3/6O/9vlui5VVVXccccdY/778A3FsizuvfdennvuOR5++GEeffRRrrnmGm6++Waee+65AzrH5Mi1ZcsWzjvvPObNm8f3vvc9pk2bht/v53//93/5/ve/v8f5+nayTg/VNWtv3s61DIYypvcWnA4HCL/5zW+46qqreO9738sXv/hFqqqq8Hg83HjjjbkHdxhKIHz55Zd59NFHeeSRR3jkkUe47bbb+NjHPsbtt98O7P1e8lbuE67rcsEFF/ClL31pzNcMB09jOZDf9lNPPcV73vMeli5dyn/9138xdepUfD4ft9122x4JiXDw38HbMfz9feELX9ijhXTY8MPIgVy/81XeB4AjnXLKKQDs2rULGEoWcV2XzZs3j6qZ1d7eTm9vLzNmzMgtKy0t3aOAaTqdzr3X/tTX1wPw6quv7vEUvfs6RUVFnH/++Qe2U4dQfX09xhhmzZq1z4vM+vXr2bRpE7fffjsf+9jHcsv31jVSX1/P5z//eT7/+c+zefNmTjjhBG6++WZ+85vfAIfu2FZVVR30cSsrK+Pqq6/m6quvpr+/n6VLl/KNb3yDa6+9lmg0yp/+9CduuOGGUQkzh7PLYff3NsbQ2Ni4z6fa+vp6/vjHP3LmmWce0E39He94B+94xzv49re/zW9/+1s+/OEPc9ddd+X9RfNo9/DDD5NKpXjooYdGtd7sbyjESDNmzOC1117DGDMqqGlsbBy13lu5Zr2dB+23qrKykkgkguM4+92ue++9l9mzZ3P//feP2saRjQTD/H4/F198MRdffDGu6/KpT32KW265ha9+9as0NDTkWjp7e3tHVaF4K71J9fX19Pf3v617wL5+2/fddx/BYJBHH310VKvgbbfddtCfty+tra17lOvatGkTwF6T82bPng0M9TodyHHY1/U7n+XlGMAnn3xyzKeQ4fFTxxxzDADvfve7AfbIqvre974HMCobqr6+fo+xF7feeuten+x2t2zZMiKRCDfeeCPJZHLUvw1v68knn0x9fT3f/e536e/v3+M9Ojs7D+izDtb73vc+PB4PN9xwwx7HzxiT66ocfvIbuY4xZlQ5BIBEIrHHvtbX1xOJREZ1O7zdY7t8+XKKior4t3/7tzHHvezvuO3eBVtYWEhDQ0NuG8faX9jzvDmU/ud//od4PJ77+95772XXrl1ceOGFe33N5ZdfjuM4fPOb39zj37LZbC7Ijkaje+zLcOvNWGWB5Ogy1vna19f3lm7wy5cvp6WlZdSY6WQyyX//93+PWu+tXLOGA4DxmAnE4/Hw/ve/n/vuu49XX311n9s11vF6/vnnWbNmzajX7H6dsG0790A2/LsZDohHXs8GBgZyLYQH4vLLL2fNmjU8+uije/xbb29vbsz2WA7kt+3xeLAsa9T1ddu2bfzud7874G18K7LZbK68DQw93N9yyy1UVlZy8sknj/maqqoqzj77bG655ZYxGwJGfn/7u37ns7xsAfzMZz5DIpHg0ksvZd68eaTTaZ599lnuvvtuZs6cydVXXw3AokWL+PjHP86tt95Kb28vZ511Fi+88AK33347733veznnnHNy73nttdfyyU9+kve///1ccMEFrFu3jkcffXSPMV17U1RUxPe//32uvfZaTj31VD70oQ9RWlrKunXrSCQS3H777di2zc9//nMuvPBCFixYwNVXX01tbS0tLS08+eSTFBUV8fDDDx+WYwZDF69vfetbfOUrX2Hbtm28973vJRKJ0NTUxAMPPMDf/d3f8YUvfIF58+ZRX1/PF77wBVpaWigqKuK+++7bY+zHpk2bOO+887j88ss59thj8Xq9PPDAA7S3t48a2H0oju1Pf/pTPvrRj3LSSSdxxRVXUFlZyY4dO1i1ahVnnnkmP/nJT/b6+mOPPZazzz6bk08+mbKyMl588UXuvfde/uEf/iH3/kuXLuU//uM/yGQy1NbW8thjj9HU1HQQR/nAlJWVsWTJEq6++mra29v5wQ9+QENDwx6D8Ec666yzuP7667nxxht5+eWXWbZsGT6fj82bN7Ny5Up++MMfctlll3H77bfzX//1X1x66aXU19cTj8f57//+b4qKinIPRXL0WrZsWa6l6vrrr6e/v5///u//pqqq6oBb1a+//np+8pOfcOWVV/LZz36WqVOncscdd+QS2IZbyt7KNWv4Zv+v//qvXHHFFfh8Pi6++OLDVsj93//933nyySdZvHgx1113Hcceeyw9PT289NJL/PGPf6SnpweAiy66iPvvv59LL72UFStW0NTUxM9+9jOOPfbYUUHttddeS09PD+eeey51dXVs376dH//4x5xwwgm5HqRly5Yxffp0PvGJT/DFL34Rj8fDL3/5y9z16EB88Ytf5KGHHuKiiy7iqquu4uSTT2ZgYID169dz7733sm3btr1eGw/kt71ixQq+973v8a53vYsPfehDdHR08J//+Z80NDTwyiuvvJ1DPqaamhpuuukmtm3bxty5c7n77rt5+eWXufXWW0eNK9/df/7nf7JkyRKOO+44rrvuOmbPnk17eztr1qyhubmZdevWAfu/fue1ccs3PoI88sgj5pprrjHz5s0zhYWFxu/3m4aGBvOZz3zGtLe3j1o3k8mYG264wcyaNcv4fD4zbdo085WvfGVU6rkxxjiOY/75n//ZVFRUmIKCArN8+XLT2Ni411Ile0vDf+ihh8wZZ5xhQqGQKSoqMqeddpq58847R62zdu1a8773vc+Ul5ebQCBgZsyYYS6//HLzpz/9aZ/7PVxCZOXKlaOW722bhksWdHZ2jlp+3333mSVLlphwOGzC4bCZN2+e+fSnP23eeOON3DqvvfaaOf/8801hYaGpqKgw1113nVm3bt2oVP6uri7z6U9/2sybN8+Ew2FTXFxsFi9ebO65557DcmyffPJJs3z5clNcXGyCwaCpr683V111lXnxxRf3edy+9a1vmdNOO82UlJSYUChk5s2bZ7797W+PKqXT3NxsLr30UlNSUmKKi4vNBz7wAdPa2rpHCZu9HdOPf/zjJhwO7/HZZ511llmwYMGofQDMnXfeab7yla+YqqoqEwqFzIoVK3LldUa+58gyMMNuvfVWc/LJJ5tQKGQikYg57rjjzJe+9CXT2tpqjDHmpZdeMldeeaWZPn26CQQCpqqqylx00UX7PU5yZBqrDMxDDz1kjj/+eBMMBs3MmTPNTTfdZH75y1+OWYpkbyU0tm7dalasWGFCoZCprKw0n//85819991ngFElWIw58GvWN7/5TVNbW2ts295vSZjdfxv722bAfPrTnx61rL293Xz6058206ZNMz6fz1RXV5vzzjvP3Hrrrbl1XNc1//Zv/2ZmzJhhAoGAOfHEE83vf//7PX5f9957r1m2bJmpqqoyfr/fTJ8+3Vx//fVm165doz7zb3/7m1m8eHFune9973t7LQOzt2Mfj8fNV77yFdPQ0GD8fr+pqKgwZ5xxhvnud7+7RzmzkQ70t/2LX/zCzJkzxwQCATNv3jxz2223jVnCZqxjOlyyZffyXmPdf4a/wxdffNGcfvrpJhgMmhkzZpif/OQnY77nyDIwxhizZcsW87GPfcxUV1cbn89namtrzUUXXWTuvffe3DoHcv3OV5Yxh2iEuYgcdn/+858555xzWLlyJZdddtlEb47IKD/4wQ/4p3/6J5qbm6mtrZ3ozZEj3Nlnn01XV9eY3fBy+OXlGEAREXl7dp/WMZlMcssttzBnzhwFfyJHgbwcAygiIm/P+973PqZPn84JJ5xAX18fv/nNb3j99df3WmZIRI4sCgBFROQtW758OT//+c+54447cByHY489lrvuuosPfvCDE71pInIANAZQREREJM9oDKCIiIhInlEAKCIiIpJnDmgMoOu6tLa2EolExnW6HhGRw8VxHBobG2loaNjrvKUiIkcTYwzxeJyamhpse99tfAcUALa2tjJt2rRDsnEiIiIicvjs3LmTurq6fa5zQAFgJBLJvWFRUdHb3zIRkQnW3NzMggULdF0TkUkjFosxbdq0XNy2LwcUAA53+xYVFelCKSKTwvC1TNc1EZlsDmS4npJARERERPKMAkARERGRPKMAUERERCTPKAAUERERyTMKAEVERETyjAJAERERkTyjAFBEREQkzygAFBEREckzB1QIWkRERN6e1rYuotEYpaVF1FRXTPTmSJ5TACgiInKQDjSoW/3MWh5ctZqOriiO43D6qcfx/veeq0BQJowCQBERkYOw+pm13LnyMaK9cUpLIlz5gWUsPfPEPdZrbeviwVWr2dXWxbbtu+js6uWvf9vIn596ic9+6oNjvkbkcFMAKCIi8ha1tnXxs1/cz46d7Xg8Ns0tHXR03o1rDHMbpgPkWgaj0RgdXVF2NLfRE+3DH/DiZB12NLdx58rHaKifppZAGXcKAEVERN6iTZu3s6WphaLCMIWFBexq7+KVDY388D/vIhQKYFkW4XCISGEBS05fRGdnlJbWTlzHxePx4A/48Pt8RHvjRKMxBYAy7pQFLCIi8lZZ1pv/b0hnMsRi/WCgtCTCjp3tbN/RRnVVGcYY7rj7D7S0duJkXbKOS9ZxcBwXx3EpLYlQWlo0sfsieUkBoIiIyFs0t2E6DbPrGBxM093dSzqTpbS0kK6ePvr7E2QyGVLpDIXhEFu3teL1eZgxvZpAwI9xDdlslqrKEq78wDIANmzcSmtb1wTvleQTdQGLiIi8RTXVFVx/zaXcee9j7NjRRl9sgK7uXna19eC6LrZtsfGNJsrLSrBti8JwmHQ6QyDgw3EcwqEg7734bABu/tEdxPsTRAoLuGTFUiWFyLhQC6CIiMhb1NrWRXlZMaeedCyuMSQGk6RSWVzXxbLAGMOG15oYHEwxY/pUbI9FW0c3/f2DAHi8HlY9+gx3rnwMYwwNs+owxvDgqtVqCZRxoRZAERGRt2D1M2uHWv52ttHS2kkmk8Xjsclmnf8bGmhZOK5LKBigtz3Otu27SKUy2LZFRUUJZcXFNG1rxXVcTj/tODwem+qqchqbmpUUIuNCAaCIiMgBWvvKJm7+0R1s39HGQGKQwWQaCwgE/AAYM7SeZVnYlsVfX9qIz2cTDoVIpzMYA4mBFMFAEtu2CIUCtHV0U11VTltHN5HCAiWFyLhQF7CIiMgBWP3MWr7zg1+z/rUtRPvi2B4br9fGNQbHdfB6Pbl1XXco2zfaG6Ozs5feWBxjhrqGY/EB2jt6qKwo5QOXnodlWTQ2NWNZFpesWAooKUQOP7UAioiI7MfwbB7GGDweD9msSzbrEAz4cd0UxhgKw0Hi/YO4rsEYg+O4ABjAuCb3XpZlURAKUloSYfGpC1l86sJc0ejGLTuVFCLjQi2AIiIi+xGNxoj3J2iYPY3S4gi2ZZHJZHFcQ0FBkPrZdVz87qUUFhZg21ZuLOBYbMsiEPCRTKZz4/0WzJ8NkAsylRQih5sCQBERkTG0tnXlumJLS4uIFBYwkBjk1JOPpaS4ENu2yaQzpFIZOjqiPPfCqzhZB6/Xi8dj47HHvsUaDL29/WzbsWvUZwwHmdVV5bmkkHh/gmg0Ns57LvlAXcAiIiK7Wf3MWh5ctXpUV+wlK5by4KrVZLJZTlg0lx3N7fT2xvB4PAwMDLJ95y7S6ez+39yA6zrE4wP8+Gf3UFJcSGVFKUtOX0SksEBJITIu1AIoIiIywsjxfiO7Yhvqp/H5f/wwn/37D3LVh1dQVVFKKBhkcDCFa8Bx3FGJIHtj2RbpdJbEYIpNjTvY1dbNrrYunl6zjgXzZ9PX18+rG7fkkkJUEkYOB7UAioiIjJAb7zerbo/6fAvmz6amumKoW7gkwoaNW4j3J4ChzF+f14tt22AMrjFjvr95898sC7weD9lsls6uXvpiA0R742SyWXxeL0tOX6QEEDls1AIoIiIywvB4v7aObhzHHbMrtqa6gmXnLcbj8QwFdK6L9WZiiGWx1+APyI0RDBeEyDoOPp+XaDRGZ1cUv8/LcfPrKSku5Ok165QAIoeNAkAREZERaqoruGTF0j3q8+3eFTu3YTrHL2zgmIbphEIBbNvGALY9dgqwx7YpKS6kOBKmrLSIYNCPbdv09MSwPTYVFSXMnlmrBBAZF+oCFhER2c3SM0+koX5arj7fWOPwSkuLCIdDxAcGKS8tHir8HB1KCnGyyVwroGWBbdtMr61i0XFz+du6N+jvT5DJZKmsLGX6tCksmD+b5uYOJYDIuFEAKCIiMoaa6op9JmDUVFdQWVZCd08fFuDzeamtqaKltQOvz0s6ncmtGy4IkXUNA4NJTjhuDpFIAR2dUXp74/i9vqGp5QYG6Yv10z8wmMs8VgKIHC4KAEVERA5Ca1sXnd29lJcW4/N68HhtOrv6cFyXcDgExrxZFNqioryYgoIgyWSaY4+ZhcdjEwoGeH3TdsrKimmYVUdbRzeDyRQffN/5zJ0zQ8GfHFYaAygiInIQotEYjuty8onzCBUEicUHiMX7Ma4hmxlK7rBtD5WVpcyeVcvsWbWUlkRyySU7WzoAqKupyo37cxyXyopSBX9y2CkAFBEROQjD2cKBgI+F82fjuga/30dd3RQ8HhvHdTHGpSgSpqK8hCsvW8aVH1iWSy4JBv3Uz6plIDG412xjkcNFXcAiIiIHYThb+MFVq2nZ1YnX66GqopSCUICiwqns6uhi9owaPvP3H2Ruw/Rcq97I5JLGLTt5cNVqGpuaNe5PxpUCQBERkYM0nC28afN27r7/j/T19dPZ1Ut3Tx/hghAfueJCzl5y0qjXjEwuqamu2G+2scjhoABQRGSSaG3rUiAxAYYDOtu2eXDVarxeD9PrpnDRhUu49OKz91h/9+9pf9nGIoeDAkARkUlg9TNreXDVauL9iVxXoqYRG18HUjtQ35McKZQEIiJylGtt6+LBVasxxtAwqw5jDA+uWq1pxCZATXVFbr7g3el7kiOJAkARkaNcNBoj3p+guqpc04gdwfQ9yZFEAaCIyFFuuBzJcH05lRM5Mul7Ojxa27rYsHGrWlLfIgWAIiJHueFyJMP15SzLUjmRI5C+p0Nv9TNruflHd/DDn97NzT+6g9XPrJ3oTTpqKAlERGQSOJAEBJl4+p4Ond3HVLZ1dPPgqtVEImH8Pq+O734oABQRmSRUTuTooO/p0BgeU9kwqy43pnLNX9fzk5/dg8frUZb1fqgLWERERI46u4+p3Lqthc6uXnw+r7KsD4ACQBERETnq7D6mMp3JUlVRyuyZtcqyPgDqAhYREZGj0sgxlelMlt/c9QhtHd1UV5Ury3o/FACKiOSpA5k6TtPLyZFu5JjKeHyAB1etprGpOTcGUOf22BQAiojkof1NSdba1sUfn3yBNS+sx3FcDaiXI9bIQO5As6w1JZ8CQBGRvLO38hkN9dOoqa5g9TNruXPlY7y07g18Xi8nHD8nN6B+eB2Ridba1sWfnnyBZ59fj+OOfkjZ1zm6v/M/XygJRETkKPdWZkJobevixZc20tEVHXNKsuGbYzKVJhjwEwr52dS4k8JwSAPq5Yix+pm1fPOmX/CTW+9l/YYtZDMOPdEYd658bL+/A03JN0QtgCIiR7G30pU1vG5nV5TNjTvp642z6Li5DCQGc4Plh2+OZSURthhDJuNgTJadLR2UlRZpQL1MuNa2Lu5c+RidXVG8Hhsn6/D8i69SUhwh6zj88ckX+NiV797r60eWj8nnZBG1AIqIHKV278oa7qZd+8qmPVoER64bKQyTSWfYuGk7jzz+LG3t3bnB8qWlRQwMDPL8i6+RGEyyq62L3r5+gkH/qAH1mn9VJsofn3yBl9a9wa62bnqiMbqjfTiOg2Vb+Lxe1rywfp/npabkG6IWQBGRo9RYMyH85Zm1/Nt3biMUClBZUZprERxed0plGa++tpWy0mKwoLCwANcYGuqn5d7XGANAuCCE1+OlqrKEv7v6Uk48fi6gAfQycVrbuljzwnp8Xi+hkJ9kKkhHZy9+vxcLmDNnGtFonE2NO/YZ0GlKPgWAIiJHrd27sp5/8VUat+yksLCAstIislknN7i9tLQIj8dm4xtNDAwMMpAYJNobp7snxo6dbfy/G3/OZz55OZ1dUQoLCzhn6clkMhl8Ph/tnT34fUO3Cw2gl4kUjcZwHJcTjp/DpsaduK6LhcHn9ZBKpXn5lc34fF7uvvdxotEYcxum7zXAy/cp+RQAiogcpYa7sh5ctZpXN25hZ0sHgaCfKVVlxPsTtO7qwuv1EI3G2NS4g6Ztrexs6SCRGCSTyebexxh4+JGn2fD6Vupn1ZFOZygMhygMh9jZ0kEw6M+Njxqr1bGxqZloNJbXN1MZH8MPPcYYjjt2Nk8+1UukKEwoGKC7J4Zt27zzjEX0xfq5+ce/ZfbMmlEt4SOpDqCIiBy1hruyXnxpI7++6xGaWzrYvmMXAKl0huKiMM+/uIH/vGUlsXgCy7YwZijosywACxjq8m1p7SSTdigtjZBKZ2hu6QCgflYtjVt2UlNdQTqTJZt12Lqthdkza/N2AL1MjFEPPa9tId6fIOj3EYsN4Loutm3T1tHD4GCSwcEUjuPSuquTO1c+NqqVevdhDEtOX7TP1sLJSAGgiMhRrqa6glNOms8jjz3Llq3NGAOu6+KxbRxjWPXoM/TFBygIBcGycB2XTCb7ZhBoRr2Xz+ehsyvKjGlTOXbeLOpqqhhIDPLgqtV09/Tx9Jp19PbG6eiK0t7Zw+yZtXk5gF4mztIzTyQSCfPdH/ya7p4+EokUtscim3RwXJdNm7fjOC5YFi++tBHHNXg9FnV1VfzrF67eYxjDy+s37be1cDJSFrCIyCRQU13BGYuPw+fzUlpSSFVlKUtOX0RhQYj+/gS2ZeE4Dh7bwn7zfzDUEjjMcVzSmSyZTJZ4f4K6mipKigupriqnoyvK7x95GmMM7zh1IQvnz6aspIiPXHFhXtws5cji93kJFxYwb+5MXONivRnOWBZkHZdMdug8zmazJJNJ4v2D3PY/v+ffvvurUXUAE4NJOrt6yaSzTKkqy2XS50N2uwJAEZFJ4rxzTuOkE45h7pwZnP3Ok/D5vG8GehaO6xLvT9DdE8NxXWbUTSESKSAQ8OVen806dHT2kEymaO/o5vEnnmfrtlbaOrrxeb1kstlc8dzZM2vxeD255BCR8TQ8FrAwHKK6qpxsNovrGpwRrdvGGJKpDJZl4fXYuMbwu9//hda2rqGEqE3b6OyKEosPUFQUJlwQyqui0AoARUQmiZrqCq68bBllpUWsf20Lr27cSl9sgM6uKKFggFAwgM/rIRQMcOHyJbzjlIXMnFFDIODDsiwsCxzHkHWGuo+jvXGeXvMyg4MpLr5wCZUVpbR1dOM4rsb+yYQaHgsYDofw+byk0kOBHli82bidY1sWtsemIBQgnc7w0suv09fXz2uvN/HcixvIZh0qK0ooCAXz6rzWo5uIyCSy9MwTSaUz3PrLB6ifVUtVZSldXb0URcLUz66jry/Oth27WL+hkWw2S2dXFMuyKCkuBAtiff14PD4qK8soSqWIxROcf+5pXHrx2ZSXFfPgqtU0NjXn6v9p7J8cLrtn6e7+93AC1O8e/gs//fl9BPw+unp6sSyLTCaLz+sdSlpyXGxjcByHwsICNr6xjeop5dTPqqO5tYNYbIDi4sK8O68VAIqITCKrn1nLnSsfY+u2VspKi/D5vBQVhYnFBwj6fbzR3kMwEGBuwzR2tnTg9XioqiwlGPAD0NfXj4WF1+PB9frw+byUv9kaouK5Ml52z9KdNaOGDa9vJRqNU1oa4crLluXGnk6ZUkYg4Md1HUKhIJlMBtv2jxrf6rqGRCLFiYuOwe/35YYyRAoLaGxq5oPvO5/KitK8Oq8VAIqITBLD2Y1+n5ey0iIGB5Ns39FGIODDjRne2LKDxGCSE46fQ3FRIf0Dg1iWRVGkENd1iUZj+P0+gsEA/QMJHMelflYtc+fMyH1GvhfPlcNv9yzdrdtauO3XD+Pz+/DYFluammlu6eCV9ZvZ8HoTO5rb6R9IkEymsWBozJ/Xg+O6FEXCZLMOxhgCQR+pVBpgjzJGc+fMyLvzWgGgiMgkMbJIs8/n5fVN22lt6yJcECQU8BPtiZFOZ9i0eSfx+CC72rpwHIe+WP9QK8vMGpbVTyPaGyfaG6e0JMKVH1iWdzdGmVi7Fxu3bZveWD8lxYUMZl36+xO0tffQuKUZv99HYjCJcV18Xg+VlaWUlRZjjEtPT4zO7l6Ma8hks7jGZVPjTmqmVrCjuX1UGSOADRu3qgVQRESOPiOnhptWO4VYbIC+WD+zZtbQ0xPDdV38AT/JVIq1696guCjMgvmz6IkO1fUrKRkqAL3svMV5VxRXjhy7T3EY7Y1hAQOJ5FA5I3eoRS+VTpN1HJKDqTcLnBu6u/soKAgyo64ay7LY1daFawyWBa5jSCbTLJg/m0QiSSaT5SNXXEg8PsDNP7oj7+a2VhawiMgkMZwZaVkWjU3NeLwe6mqqmF43hUwmS0V5KQG/j6qKUrKOw8Bgir+tfYPGLc3E4wkCPi/GGJ5es07Bn0yY3c/jkuIIdbVVpFIZBhJJMhkHy3qzzEsyhWuGyr+4rmFwMEVfbIAzT1/ElMoyQqEgPp8H27ZxjYsxLp1dvbkyRn2x/lHdzflUB1AtgCIik0hD/TQufvc7wRiKiyP85q5H6OiM4jgObe1dhEIBdrR04DouicQgMDRAvrAwRMuuLmbNrKW9s0dz+8qE2j3h6A+Pr+HG795Oxhi8Hhvb4wEMrjt6JhvXuIRDQRafsoATFx3DYDLF5sadeLweentjGAPNLe2EC4JECgvAmLyd21oBoIjIJLF75uQlK5Yya0YNz7+4gd7eOJlMFtcYUqk0RZECBhJJXOMChoDfh+u6NLd2UFZalBd10OTINjLhaPEpC5haXU57Rw/pdAbjung8HjweG2MMtm1j2xbGNVRWlORasC+84HS2NLUQ8PuGxgZiiMUTpDNZPrRiKQ3100Z1N6sOoIiIHFV2z5xs6+jmzpWPgQUL588mUlhAR2eUaF+cdCZDSSSCZfeRTmfIZLJ4PB6SqTTBgD9v6qDJ0SP95sNLaUmEoqIwPdEY2ayDZVm4riGTyZBKZ/D6PFy47AxgKKlj4YIGTjrhGJLJNNNqq+jojJLJZPmHT17OicfPBeCSFUvzsr6lAkARkUlg98zJ6qpyXlr3BgAnLToGj8emsqKUVzduATOFnmicQMBHOp0hEing+IUNnHvWKZx/zml5cfOTo4vf56WyooT+/kHS6SylJUUUFoY4bn49Tzz1N/r7ExQWFvDei86iYXbdqKSOBfNm07S9lbaOnlyANxz8Qf7Wt1QAKCIyCeyeOdnW0U1pSQQsRi2rrCjl0ovP5rE/PU+0N04w4Oe8s0/hPAV+cgQrLS1i9sxaBgYGiRQWEO9PEA6H+OR17+fSS85hZ3M70+qmMKWqjJt/dMeolvCm7a185IoL8fu8ew3w8rG+pQJAEZFJYDhzcmRX1pUfWAawR/fW0jNPZPGpC/OuxUOOXiPP7774wKiu2prqilyL3oaNW8dM6vD7vCyYP3uC9+LIogBQRGSS2FtXViQSzrWQDN8o87HFQ45Ow3MAN9RP4/P/+OF9PriM1RKeL0kdb5UCQBGRSWT3wG73zOB4fOCgi9wO34jVaijjZazM9n2dv2O1hOdLUsdbpQBQRGSSGisz+MFVq2mon/aWb4hv9UYs8nYd7Pmbr0kdb5VmAhERmaSGM4Orq8pz46Hi/Qmi0dhbep/db8T5NFuCTJy3c/7WVFewYP5sBX/7oABQRGSSGjkeynHcPcZDtbZ1sWHj1v0GcocqkBR5K/Z3/srbowBQRGSS2n1OVcuycuOhVj+zlpt/dAc//Ond3PyjO1j9zNq9vo9uxDIR9nX+ytunMYAiIpPYWOOh3urYKg2sl4mi8XyHjwJAEZFJbvfM4LFmDWlsaiYaje31BqsbsUwUlSw6PBQAiojkmYOtlaYbscjkoTGAIiJ5RmOrREQtgCIieUhduiL5TQGgiEieUpeuSP5SF7CIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGe9Eb4DI/iSSGVLZLAGvl4Kgb6I3R0RE5KinAFCOaLu64+zo6CPjuPg8NtOriplaHpnozZI81drWRTQao7S0iJrqioneHBGRg6YAUI5YiWSGHR19ZB0Xv9cmk3XZ0dFHcThIQdCnlkEZV6ufWcuDq1YT708QKSzgkhVLWXrmiRO9WSIiB0UBoBxRRgZ1qWyWWCJF1nFxjcG2LLwem1Q2S193Ui2DMm5a27p4cNVqjDE0zKqjraObB1etpqF+mloCReSopABQjhi7d/eWhIMMpjIYA0G/h0zWxXFcEoMZmrtie20ZFDnUotEY8f4EDbPq8HhsqqvKaWxqJhqNKQAUkaOSAkAZN8Ote65jsD3WqK7b4e5egJDfw0AyS2NrD5msi+u6ZBwHv8dDQdBH1rjEEykyWQeXoVR2n9dDKpulAAWAcuiVlhYRKSygraOb6qpy2jq6iRQWUFpaNNGbJiJyUBQAyrjY1R1n664o8USKVMYh4PMQKQgwe2opU8sj9A0kiSdSWBakMg7pjEMy4wDgscFr22CB3+ch1p+ifzCNbVuEAl5SaQcnPRRYihwONdUVXLJiKQ+uWk1jU3NuDOCBtv4peUREjjQKAOWwGNnal3Yc1je1k0hlSKaHgrrBdJZ01iGTdYgNpGhqi9I7kALAAkaGco4LxnXweGziAyl64oOksg4W4BpDwOfF67GxPda476fkj6VnnkhD/bS3HMgpeUQkv418AASOmIdBBYByyA2P5YsnUiTTWbKOm2vNGymVydITd+mJD5LJurnlY7XjuYDruBjj4vV68NgWxoBtWxQEfAT9XgJenc5yeNVUV4y6aO+vZU/JIyL5beQD4MDAIMYYPF4PPq+Xiy9cwqUXnz1h26Y7phxSw2P5Uuksg+lsbvzeWBwXHNfFZuygbyxZF7LpodY/y4JM1sWyLKZXFSsBRMbVgbTsKXlEJL/s3to3/ABYXVXGE3/5G4ODSSKRMIPJFJsadwBMWBCoAFAOqVQ2m2v5S6azWBa4+4nu3H3/85gMYAz4LJhSEh6zBEx3X4L+VJrCgJ/y4oKD+BSRsR1oy97uySNbt7XgZB1a27py/65AUGRyGH4o7OyK4vN6OWHR3NwDYHe0D9e4DCSShAuCTJ1Swa62Ln7/yNPMnFGD3+cd9+uBAkA5pFzHkExn36zbB1nHHHDr3sEI+X30DiRJJDOjWgA3bOugaVeUrGvw2hazppayYGbVYdwSySf7a9lrbeti0+btYFksOX0RT69Zx3N/fZWOrig+n5cbbvw5VRWlzJpZozGBIpPA8EPhrrYuOrt6icUG2PB6E7U1lRSGQ1hAIpEkk3UoLAyTGBykqChMW0c3P/nZPXi8nnEfI6wAUA4p22MR8HtJpjJk3cMb/NkWFAT9ZBx3VAmY7r4EW1p7MGaopEwm69K0K0p1aaFaAuWQ2FdZmNXPrOVnv7ifLU0tADTMruOUk+azY2cbtdWV9PTGyKQzuTFBGhMocvSLRmN0dkXp7OrFsmBqdQW72rvIpDNs39lGc0sHjuMChubWDqZUlVEUKaC9M0pFeQllZUXE44lxvR4oAJRDKuD1UhDwMZBMH/jAvoPkGujs7ac4HByVANLcFSOZzmLbFm7KDBWLdgz9qTTlKACUt29vZWEA7lz5GDt2tlNUGAbLsPGNbWx8Yxuu6xIpLCCVylAztZLevjiRSAF9sQGNCRQ5ypWWFpHNOnR391JdXUEiMUhRJAyWRSaTpX5mLQ31dWzYuJWdLR0UhALYto1tWbTu6mL7jjZ8Pi+RwoJxux4oAJRDYuQUblWlBXT09mPYs6TLoZZxDPFEmo5oP1WlhfQNJOmOJbAta+izXZdEyiXg81AY8B/GLZF8M1ZZmA0btxLtjePx2BQWFpDOZOjvT+DzeSgqKiSTydKfGKStvYtIJEw8nlBBaZFJoHHLTlLpDLH4AL19/RQVhSkrLWJXezeZdIbKilIqKkpYfMpCkqm1+Pw+0ukM7R09hMMhptdV09UdZTCZIp3Jjss2KwCUt233KdymlIQpCPpJZ5LYbyaBHK4gcLjy36aWbnZ09pFMZxlMZSkIehlMOThm6JNrKtT9K4fe7mVhSkuLKC2J0NzSQX9/gp5oH4PJFI7jxWMPEgz5sd5sEbBtC8u2ci2HGzZuVVKIyFFg9/JPw+P/An4foVCQ3r54bhzgrJm12LbF4GCSV15tZGtTC21tXSw8tp6qGVNp2r6LZCpNW3vXUAtgURi/b3xCMwWA8raMnMKtqMDPYCpLU1sv5s3U38M9OYcBUlmHbL9DwufBtizSWQdjbKaUhUkkM3hsi1lTyg7vhoi86YzFx9HRGWXbzl10R/swBtKZLJlYP3a/RXVlOTNn1gytbOCVVxtVKFrkKDFW+afysmI6u6Ls2NlGOpOlsDBEOpXFGEMylaJ2aiV9ff00t3ZijAEMr73eRG8sTrggSF+sH9cYEskUBtjUuIMF82cf9n2xD/snyKSWymbJOC6hgBfLsvB6LKL9SVIZB793/E4vx0A64+D12AS8HjKOS6w/RTrj4BpobO1hV3d83LZH8s/qZ9Zy84/u4Kk164hECjj+2AYKCkL4fF4wBsdxyWQc2jq7yWQynLToGIwx3HXf4wwMDNIwqw5jDA+uWp0rEyMiR47dyz8N/17TmSx9ff3sausmOZgkmczg9XowxtC6q5PGLTvp7I6SyWTxemyMgV3tXaxdt4ld7d3E4wkGEknKSoqYXjeFp9esG5drgAJAeVsCXi8+j83gm087sUQax3XxeixCgfEtzOwaSKazeDw2Po+N12tTGgkxpTQMwI6OPrr7EkT7B0kkM+O6bTK5tbZ1cee9j9ETjZHNZmnc0sxf175GPD5AOp0ZVQvTGENHZ5TEYJJIYQGDgyk8HpvuaB+F4RDx/gTRaGzidkZExjRc/qm6qjxX/inen6Av1o/X58X22GBZuK5LMjlU8sXJOvTG+nPDPgrDQ/cjY8BxXGzLwuOxKYoUsOi4OZxw3NxxuwaoC1jeloKgj+lVxezo6COWGMr89do2tm29mfI+vjKOi8d2CQd9Q+MpCobGXIUCXtqjA7y2oxPLsvB5bKZXFY9ZQFrkrfrTky/w0stv4PXY9Pb14/N6SSRSuGNUQc9mXXa1ddPc0kEmkwVj+NvLbxDw+3AclxnTq5UUInIE2lv5J4yhvKyYpWecwPN/20BfXz+ua7AsKCsrwTWGnp6hgC4+MJB7P9uysD02jusSiw1gwaiSUoebWgDlbZtaHuG4WVNYMLOS42dPoaQwiOMYsuYwDwAcg8e2CAe9lEaGCm8Ot0zGE2lS6Sy2ZVFUMJQNvKOjTy2B8ra1tnXx7PPr8Xm9WLZNOp2lpzdGOp3e62uSyRRrXljPG4078Pm8OI5DNuuQzWbp70/Q3tEzjnsgIgdiuPyTZVk0NjVjWUNJXHPnzCBSWIDP56WyvISS4gjBgJ9AwE9vX5xUMo0xBtc1pFKZXOOIYWhoiGVZpNNZtu1sy72nysDIUaMg6MsVYp43vYKtu6Kksg42MJAan5R2y4LicIBEKkt/Mg7GEPR7yTgBXHfov0e2CMYS6VEFpEUORjQaw3FdFh03h9deb8J9c7yfx2PhjJEFZVng9XpYMH82sfgAW5paKC2JkEqlsWyL5tZOfvyze7jqwyuUDCJyhBmr/BPAJSuW8qs7VhGLJygvKyYUCtDd04fjuKRGPAwGg36SyaG/PR4Pruvi9/uYOX0qH73iQk45af64VQJQACiH3NTyCMXhIC3dfXREPWR7+0llDn93cNDnYTA11MoXCnhIpR3SWYe68giRggCNrT0MprKEAl4GU1l8HntUAWmRt2K4FEQ6kyVSWIAxhiWnL2L1My+xpamVvTWA+3w+QqEAFeUldHRGcV2XdDrDwEASgLraSvw+r2YIETlC7V7+CYYCw0gkzE9+dg8+n5ftO3fRuqsTY8C4DgDBgI+qihJ6ojESgymKi8KUl5YQ6x9get2UcQ3+QAGgHEY9sSSO45J13MNeEBrA47FJZ1wiBV5syybgh4FkFstjUV5cQDrr5MYqDo8BHDl/sMiB2r0UxKwZNTRtb6W9s4dZM2tpa+8mk3EwxuSKulpAIOjHtm2m100hHA7S2d2Lk83S3dOH6xo8tkUoFGT2zNpRcwuLyJGrta2LTY07wBguunAJj/3pedrau7HtoVF2w12+yVSGbNYBLPx+H0VFYRLJJKFQgIsuXDLuv3UFgHJYDJeHSaQzjEcuiAV4LAvbhlTaIeAf+n+vbeVmABlumRyesUTBnxyM3UtBtHV007S9lY9ccSF+n5cn//Iia9dtwjVDLXpFkTA+v5ey0mL8fh+WBZHCAl5+ZTPx2ACuMdj2m49IlkUsNsDWbS2aIUTkKLD6mbXc8ssHaNzaDED9rFpOX3wcO1s6iMUGSKZGjzNv74xSVlrE1OpyIoVhjDFcuOwMLr347HHfdgWAclgEvF6SqQx9/alx+TwDWLZN0GeRTGfJJg1e22LW1NJRM4CMHKsocjCGS0E0zKrLlYJobGrG7/NSWlrEhtebiBQWECkMkU5nyGazeL1eZs+sYfbMWto6uumJxggGA/gDfpLJZG6soG1ZxOIDpDNZPjROA8FF5OAMl3/avqONokgBGIsdO9vx+30MDCT2CP4AQsEA//DJy7Eti/997Fksy2LDxq088PCfmdswfVxnA1IAKIfNQDJ92Lt9hwW8NpXFIbKOIZnOUlMRoawwpOnf5JDbWymI0tKiXEJIdXU5G17bQjqTxXUcysu9VFWW5gLG1l1duI6D12vj8XiwbRsnm8Xj8TCnYRqf+eTlnHj83IneVRHZh2g0RjQ6NPf3UGsepDMZ+mL9ZDPOmK/x+320t/fwyOPPkslkKYqE6enp5alnX2bm9KmUlxdz+mnHcf45px32QFABoBwWOzv6SO3lB3CoWQxlVboGQgEvGceloriA0sLQuHy+5JfhUhAPrlpNY1Nzbjqo4Yu1x7bZ2tTy5uBvQ9YxdHVHeexPL3DKSfPw+32Ulkbw2BZ+31Br9OBgCsu2CRUEuGTFUgV/IkeB0tIiSkuH5v6O9w+AGap/WxAMUFZWRHtXD4OJFO6bGWG2bVEztZLXNm4hk84ytbqCltYOOrt7sS0Lx3Fo3NrMSy+/wZrn13PlB5Yd1koACgDlkNvVHWd7Ry9j1MA9LAzgugbbQtm9Mi72VgqiprqCBfNm8cc///XNxCdDMODHGJfBZIpnn1/P8QsbuOajF9Pd08f25nYGkylKiyMUFxcya0YN559z2sTunIgckJrqCq68bBmx2OgxgJddeh5Pr1lHKp1h0+YdpNJDXcHhghCLTz6WbTt3UVQUpi8WJzGYxHEcggUhEokUljXUTZxMpQ97JQDdJeWQSiQz7OjoI+Ab31Mr6PcymHaU3SvjZqxSEAALFzZQXBTGtmz6BxJ4vV4Sg0n8fi+pVIb0mzeD4UHfDz/yNJlslqqK0nErACsih8bww+BwFvDcOTOoqa6gvKyYxGCS9o5uslmXWTOmUjO1ks7uXsLhEJUVJex88wHQY9uEgn7SmSyO4+Dx2NTVVNHe2XNYKwEoAJRDajj7tzDkw2PBGHVwD4uqkjAzp5You1cm3NyG6RwzZwZbmlpwXJfMYHJovk/bZkpVGUWRcO7J/tKLz2bxqQv3aEkUkaPH3uoCuq5LNBpnTv00SooLcRyXxqZmlpy+iHXrN5PNOqTSGSKFIVzX0NzSiW1bNMyuYyAxeNgrASgAlEMq4PXi89gMJLMEAz4GxmmqtfboAMdMq1DwJxOuprqC66+5lDvvfYytTS10dfWSTGeIRMLMmzuDabVTRtX421tLoogc3ebOmUFtTWUumBtOGDv/nNM4/5zTiEZjbGrcwdNr1tHRFaUoEsbr9eD1ecdlSjgFgHJIFQR9TK8qZuuuKMYYgn7PmwPhXRwDtgWueTNxw2Pj8Vgk0weWLGIxNI2WZZGrLWhZYBlIZbL0DSQVAMoRYeQYwda2Lu5c+Rh+n5dptVPGdbJ3EZk4+0oYa23rAmDxqQtH9QIA49YjoABQDrnRU8ElyDoutm0R9HoYSGXo7U9iWRbhgJeBVBa/1yabddlXvWgL8Hk92DZ4bIuBZBabof/2eT3/t5LIEWK4ZW/B/NkE/L69Zg2LyOQ1VsLY7jMJXbJi6ahsX9UBlKNaQdDHnNoKasszo2beSCQztHTF2NHRS3wwTTrjEvR7KC0M0jeYIpt1KY+ESGUcBlOZofIuriHg8xApCFBbESHg9/LKlnaS6Sy2bWEB4aCf4oLgRO+2yJga6qdx8bvfOWqQuIjkh5HDPMaaSWii5v1WACiH1e4zbxQEfZRFQrR0xYgE/STtLAZIZhwKA36SVhbbtimN+Jk3vYLiwiCuY7A91ugEDwNbd0VJZR0CXg+zp5aq+1eOSHs87du2AkCRPLW3mYQmYt5vBYAyrnZ1x9nc0kPvQIqA1yYU9JFKZ0mmswTCAebUlVNVGt5vNq/m9ZWjwZH0tC8iE29fMwmNN3vcP1Hy1nCNQI9tEfB5cFxDKp3F47GxLQsD9PYnSaayBxTQFQR9lBaGFPzJEWv4ab+6qjz3tB/vTxCNxiZ600RkAgwnhliWRWNT87hk++6NWgBl3AzXCCwq8GNZ0DeQIpnO4nFcigoClBeHGExl2dHRR3E4qMBOjnpH0tO+iBwZ9jaT0HhTC6CMm+EagYOpLIUhP0UFAYoKAhSG/JQXh7AsKzeXbyqbnejNFXnbjqSnfRE5cgxXCJjIa4FaAGXcDNcI3NHRRyyRxuexqZ9aSnvvAIOpLKGAV3P5yqRzpDzti4iMpLusjKuxkjcCfu+ooFBz+cpko9k+RORIowBQxt3upWGU0SsiIjK+FADKEWH3oFBEREQOHyWBiIiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEie8U70BoiIyNvT2tZFNBqjtLSImuqKid4cETkKKAAUETmKrX5mLQ+uWk28P0GksIBLVixl6ZknTvRmiRw2euA5NBQAiogcpVrbunhw1WqMMTTMqqOto5sHV62moX7amDfG1rYuNjXuAGOYO2eGbp5y1NEDz6GjAFBE5CgVjcaI9ydomFWHx2NTXVVOY1Mz0Whsj+Bu9TNrueWXD9C4tRmA+lm1fPIT79PNU44ae3vgiUTC+H1etQi+RQoARUSOUqWlRUQKC2jr6Ka6qpy2jm4ihQWUlhaNWq+1rYs7732MLVtb8Pu8uMawectOfvnrh/faWihypBnrgWfNX9fzk5/dg8frUYvgW6QsYBGRo1RNdQWXrFiKZVk0NjVjWRaXrFgKwIaNW2lt6wKGbpxbt7bQ2xenoyvKrl1ddHf3se7VzfzpyRcmchdEDtjIBx7Hcdm6rYXOrl58Pi8Ns+oYGBjk9jtWsfaVTaNe19rWNer3IEPUAigichRbeuaJNNRPyw2Kb9yyk5t/dAedXVF8Xi8XXbiEmTNq6IsPkM1myWSzGAMWFhh49vn1nHfOaWoFlCNeTXUFS05fxMOPPE1ndy8+r5eqilJmz6ylubWD1zdtpyca4zs/+DWf/MT7aKifxh+ffIE1L6zHcVy1EO5GAaCIyFGuprqCmuqK3BipXW1ddHb1EosN8EbjDj74/vMpCAUxBrJZFwDLAtu22d7cxosvbeSUk+YrCJQj0nDW76bGHTy9Zh3ZbBa/18uZpy9iw8atbN3Wwrr1m+mJxrAsi61Nrfzbd39F9ZRyNr6xDZ/XywnHz8EYMypJKt+ziRUAiohMEtFojM6uKJ1dvVgWTK2uYFd7F39+6m8kBpMEAz5SqTQGMAZ6e2P09fXzi9sf5He//wsXX7iExacuzOuboky8kYFZ45adPLhqNR1dUZq2tTK9bgonHDeXto5uNmzcyoL5s/nd7/9Ce2cPwUCAsrIislmHzVua8fm8BAN+/H4vmxp38o5TF9DW0UM0Gsu9bz5nEysAFBGZJEpLi/B5vcRiA0ytriCRGKQoEsZ1DKFggO6evqGmP2MASKYyALzRuIPwrk7++rfXqKwooaAgREV5Mdd89OK8uynKxBpZ5sXjsenr66d6SjnVlWVs2ryD1l1dlJcVU1oSYf1rW+jtjeM4DhiDawx9ff0MDiZJpTJk0hkCAT/GNaTSaXa2dFBWWkQ6k31L5ZMmKwWAIiKTRE11BRdduIQ3Gnewq72LokiYyooSiosLGUymcBwX27ZwHDPqdfF4gmQyRSbj0BON4fd78ft8tHd0591NUSbO7mVeNm7axpamFurfzPq1bZvm1g6SyRRgkclmmVJZxpSqMl7b2EQ8PoBlAViAYVPjTqqry0km09i2RTDo55IVS/H7vAdcPmkyUxawiMgkcunFZ/P5z3yIBfNnU11dztTqCpaduxifz4ttWbiuO+brMhln1N+WDY1bW1jzwvrx2GyRXJmX6qpyEoNJCkIBHMehubUDx3FJpdJ4bBuPx4PjumSzWQoKgrz+xnZs2wLebNw2Btu2SaUzbN/RRld3L16Ph7qaSlxjSGeyo7KJ91Y+abJTC6CIyCRz6cVnM3NGDTub25lWN4Wnn32Z5tYOLNsa7v3dK8sayhC2LZuMm6G/PzE+Gy15b7jMy8vrN+WSmIyBWKyfzYMpfD4vS05fRGVFCel0lmeef4XNW3bSHe0Dy86NbjCA4/zfg47juLTs6uS/f/UQ9z/8F2ZNn8qxx8yis7uXxqbm3BjAfGr9AwWAIiKTzshxVN3dfWxpaqavr590Jrvf1xoDruuSyTiUFBVy/MI547DFIv9X5uXmH/+WTCZLUdHQEAa/38dJi44hGPTj83lxHJfEYJL6WbWk0hkymSyu4xLw+3LjWnfnugbLcnEyDus3bGHbjl0sPLaec886hfPztAySAkARkUlk5DiqKZVlrF33Bv39Cdz9Nf2N4PF6KCku5OMfXsGJx889jFsrMtrchunMmllDdWUZ4XCIzq5eXly7kWQyTV+sn/aOHizLIhQKcMX7L+DM0xfxpX/9EVu2teDzeclk3aGkkDFYWKTSGbDA6/FgWRbr1m/m/HNOy8uSMAoARUQmkZHTZfVE+4a6cjNZss7YY/8AbNvCsiwcx8VjW/h9Pqqryzl+YUNe3hhl4pSWFlFVUYrjujiOy7r1m98M+Pxs2xHHY9tMn16NMYYNr2/lfZecwzlnncK2nW2k0lm8XnuvAWDWcUgmU/j9Prw+L3U1VbR39vCnJ1/g5fWb864kjAJAEZFJZHgc1dZtLXT19NLd07fP4A+GuseGRk5BMBRgSlUp0WicH/30birfvBkP3xhHzjqigFAOteHpDR9ctZrNW3YS709QEArw+hvb6eyK4rouvbF+bNtiy9YW7nvwCbp7+jh+QT07mtvp6u7Dtq03z+nRLBgaBmENjQvc0tRMcXEhzz6/nlAokCsJc+fKx3CNYW7D9El9jisAFBGZRGqqK5g1o4bf3P0HOjp7cBwHr9eD67oY12B7bIwxBAN+XNcllcow8lZZVBimrLSYto5utm5rJRIJM3/uTNo6uvnZL+6nuLhQ02rJYTU8veGaF9bznR/8Bo/HJhQM0LKrE8dxCQUDACQGk6z6w9MURQqHip63dWNbFn6/D+O6OC5ks9lcC3ckUoDrGEqKC3Edlx3N7XzwtONY/9oWqqvK8XhsUqkML617g2hvnNqaykl9jqsMjIjIJNLa1kXT9laKi8J4PB78Ph8AheEQwVCQ4qJCKitKWXL6IsLhAgoKggSD/tzr+2ID9ET7yGSGbpx1NVV4PDbhghBbmlpIJtM0zKrLTavV2tY1Ubsqk1hNdQXz5sxganU5oVCQ/oHBN8ftgTEml6y0s6WTLVubefb59fj9XgIBP16Ph1AokCt5ZNs2kcIQ6VSWKVPKOPP0RSxdcgKzZ9Zw3MKGXEmY3r5+1q3fjM/rZW7DtEl/jisAFBGZRKLRGB1dUVKpDH6fl1AoiMfjIZXOUBDyU1tTyayZNfT29TOQGCSdGcqa9HiGbgfpdIZYPMGMadXMbZjOQGIQx3Fpbu0AYFptVa54brw/QTQam7B9lcmttLSI2TNrqZ9Vy4mLjqGsrJhQMEhhYQEGF6/HQ2VFCZUVpcT7E0OZ6yWFlJUW4fF4KSoKM7d+GtPqplBSHAELiiIFQ2MMHZfKilLmNkznkhVLsSyLzVt2kslmOeH4ORQXFU76c1xdwCIik0hpaRFO1qEv1k9ZaRHtHT1ks1k8Hg+lpUUUhkNMn1ZNYThE49ad9ETjQ8kfHg8e20NRUZhP/91lXLjsjNx8qY1NzQQDfhpm19E/MEhhuCBvi+fK+Bk5HjDen2BuwzQ6Onpo74ySTmeZUlXG8QsaKCkupLOnl6nV5cw/ZhadXVFi8QH8fh9lpUVgYEdzG6WlxdTWVO1R+6+muoKG+mls2rydu+//I36/Ly8KRCsAFBGZRBq37CSVztA/MEg8PoABSooKKSsrZmp1OVu3tXLsvFkUFxVSV1tFtDcOb3anRQoLqKut5B2nLhx1YxxO+hgZEOZr8VwZX8PjAaPRGJsad/DYn54nEPDT1t5NTXUFdTVVtHV0M3P6VPx+H9t27qIwHOJdF5xOpLCAlQ/8iS1NLQA0zK5j2bmLmdswfY8kpuHz3bbtvDnHFQCKiEwSwzUAZ0yrprgozEsvb6Iv1k+4METD7DqmVlewdVsrO1s6MAai0TihYJBg0EcymSGVStPdE+P5FzcA5G6SwzfA3QPCyXpjlCPL8Hn29Jp1hEIBzlpyEi+v38SO5nZe3biFbNYhGPDj8dj09fWTGEjy9Jp1eGybdDrDsfNmMa22io7OKL9/5Gn+4ZOX7/XcHRlwTvZzXGMARUQmiZFzqS6YN5tj583Ati0GBpI0NjWz7tVN1NVWEQz62bxlJwaYP28mrmuwbQuv10sg4OOXv36Ym77/P9z8oztY/czaUZ9RU13BgvmzJ/WNUY48I89tj8fmhOPmMrW6gul1U0il0kQKC6goL6G1rYuW1g6mVJaRTKXZ2dLBtNoqevv62dLUwsvrN/Pjn92zx3k9Ur6c42oBFBGZJIZrALZ1dFMYDrFjZwclRYUA7NzZDpbFCcfNYdm5iyktiXD3/X8kmUzT3d2LZdtYDM2WkHlzfJXjuDy4ajUN9dMm/c1Qjmwjz+1wQYiX129iZ3M7mzZvp39gkEhhAV6vh77YAB7bZtuOXcycPpXXXm+icUsznd29DA4mKSstwu/z6rxGLYAiIpPG8KB5y7LY1DiU0VhbW0liMIXtsfF4LNKZDE+vWcfcOTO48rJlBIP+oULRBmZMq2YwmaKoKEy4IDTpsyDl6DF8bre1d/O/jz/LxtebiMUGsLDwejz0ROPEYgN43qxzubO5nc6uKPWzasG26InGCIWCzJs7g9kza3VeoxZAEZFJZXgM06bGHfzq179nw+tbMWaoeK7jOMTjCTq6okSjsdy6f3ryBZ59fj39iUF8Pi+VFSUUhIKTPgtSji4N9dMoLi5k1vSpvJHK0Bfrpy/W/2bQ5+K6LkVFYXxeL/H+BOlMlk9+4n1EImF+/LN78Pu8TKudovP6TQoARUQmmeHEjZ0723h14xYM4DgOVZVlJAaT+Lze3M2vprqCj175bs4757RcpuXTa9blRRakHF2i0RiO4zK9rpoNG5uwLQvXGLyWjYVFOFzAWWeeyEAiSSaT5R8+eTknHj8XgKs+vCJvsnsPlAJAEZFJ6rxzTuPZF9bTtK2Fvr4BYvEBQqEAF1+4ZI+b33DQuGD+bBafujAvsiDl6DI8DrAnGiMcDuI4Dql0BsuGgnCQmdOrib85HvCSFUtzwR/kV3bvgVIAKCIySdVUV7Bg3mzWb9jCYDKF3+/j3cvO4NKLz97v63SDlCPN8DjAO1c+hsfjoay0mLq6SnxeLyUlEf7u6kvx+7yjArzWtq5RQZ/O6/+jAFBEZJIanhd44fzZRCIFxOMJunv6aG3r0o1QjkrDLXl/fPIF1rywHsdxx2zxA1j9zNrcLCLD6yw988QJ2vIjjwJAEZFJarh2WsOsOjwem8ryUhqbmolGYwoA5ahVU13Bx658N+e/OW51rC7d4aLoxhgaZtXR1tGt0i+7URkYEZFJamTttHyY21Tyy74KNu9eOFoljfakAFBEZJIaWRewsakZy7KU/Sh5QQ8/+6cuYBGRSUzZjzLZ7J7YMZbhhx+Vftk7BYAiIpOcsh9lsngriR16+Nk3dQGLiIi8Ta1tXWzYuJXWtq59LpODt3tihzGGB1et3ufx3dc4wXynFkAREZG3YaxWKUAlSA6x3bPaq6vKldX+NigAFBEROUhjlRu5c+VjYEEoGNijBAmgLsmDNDKxo7qqXIkdb5MCQBERkYM0VqvUS+veAGDWoppRLVV/evIFXl6/mY6uKD6vl4svXLLfWVnk/yix49BSACgiIrIfe8s8HatVqrQkAhajlnk8Ns8+v56+WD+dXb1EozFefW0L0d4413z04gncs6OLEjsOHQWAIiIi+7CvzNOa6gqWnL6I3z/yNF3dvVRWlHLlB5YBjGqpOvaYmTzxl78R7Y2TSqdJZ7IM9A3ys1/cT2lJRC2Bb4Gy2g8NZQGLiIjsxf4yT1c/s5an16wjnc3i9XpZcvqiXCvVGYuP48x3LGLB/Nm89sY2tu3cRXNLO9FoHMdxCBeEcB2Xhx95WpnCMu7UAigiIrIX+8o8BXLB4XHz62nr6ObpNesAWPnAn9jS1ILjOBjXcMzcGZx0/DH85Zm1DCQGKS4qpDAcIhDwEY3G2NS4Q61aMq7UAigiIrIX+5pSbKz5Znc0t/OrXz/MpsYd+HxefF4vffEBWnd1Ma1uCqedfCx+vw+PZ+j22xcboLWti7vvfZzVz6yd4L2VfKIAUEREZC/2NZ/yyOCwL9bPU2teZmtTM69v3kFnVy8dnT30DwxijKEv1s/AwCDVU8o54fi5TKurJhZPAIYF82YTCgX2W9RY5FBSF7CIiMg+7C3zdDg4vOWXD/CXzduJxQYIBnxD3b4GMhkH28pgXIMxhm0725heN4Vz3nkyq/7wNAOJQdIZL69v2saJi44hk82qqLGMGwWAIiIi+7G3zNOG+mkURcLMmj6VXW3dWLbFQCKF3+chncmSToPBUFAQxHUcykqLePGljUR744SCAcDQ29fP39Zu5MRFx6iosYwbBYAiIiIHKRqN4bgu84+ZRSyeINobI5vN4vF48Pt8AFiWxeBgilde3cKGjU14fR5CgQBTq8vp6u4jMZgknclyxuLjqKmu2GvNQZFDSQGgiIjIQRoeBziQGKSuppKm7a3Yto3f58WyLBKDSSrKixlMpklnMpi0wZv2MDiYoiAcorSkEL/fy4L5s1m4oIFf3/m/PPv8ehzX1RzCclgpCUREROQgDY8DHBxM0djUjOO4lBQVUllZypSqMiwsBpNpEokktm3hsW2KImFc19DbG2MwmWZuw3ROOXE+P/yvu/j+f97F2nVv4PN696g5KHIoqQVQRETkbRpMpujr68fjsfH5vPT2xuntjWOAWGwAANu2CAUDhMMhvD4PH7vy3Rx/3ByKiwr57g9/w6bGHcT7E/i8Xta+8gbnnXUq7Z09SgyRw0ItgCIiIgdpeKYQy7IoKY4QLgjS0Rkl+mbwN5LrGgaTKbq6e6koL+GyS8/j7CUn0Rfrp3FrM36fD7/Pi+s69PTEaNy6M1dzUORQUwAoIiJykIaLQU+rrcK2bdLpDLZt7XV91zX4fD7Ky4r/b6ExJJMpor1xMtksyVSaZCqNPaLmoMihpgBQRETkIA0ngfQPDFJbU8FgMo3juvt8jWVBJpPNTSdXXBzB4/HgOA5+nw+fz0c4HOTcs0+loX7aeOyG5CEFgCIiIgdp5Ewh6XQWr9dDpDCE37/3Ifax+ABbtjbzxF/+SmtbF36fl5nTpzK1uoJIYQElRYV4PV4ef+IFbv7RHZoiTg4LJYGIiIi8DcMzhbz40kYGEoP09MQYHEzvdX2vxybruPzm7kd57fVtLDtvMbNm1lBVWYrHY/O3ta8TCRQwt2Ea/QODPLhqNQ3109QVLIeUWgBFRETepprqCk45aT7FRYVksg7WXsYBhsNBystLqKosIRjwk0yleXrNOpacvohwOER3TwwDnHD8UHZwdVU58f5ErrtY5FBRC6CIiMghYlkWwYAf2/q/ANC2LcLhEMGAn9mzaunu7gNjEQj4qaupor2zh7kN01l86kI2bd7O3ff/Eb/fh+O4tHV0KxNYDgsFgCIiIgdp5LRt0WiMcDhE/exa2jt7CPh9OI6D7fFgXMO7l51BtC9OS2snjuOy6Lg5DCQGcwHe8HzDtm3z4KrVNDY152YDUfevHGoKAEVERA7C6mfW8uCq1cT7E0QKC1hy+iIGBgbZsHEryWQar8cmFAoytbqcaG+cd5x2HKecNJ8/PfkCzz6/nkw2SzDo3yPAGx5TqPmA5XBSACgiIvIWDReANsbQMKuOto5uHnvi+aHAz+vBti0cd6gU9OBgisLCAqbVTQHgpBPmsXBBA36fd68B3nBroMjhogBQRETkLRouAN0wqw6Px6a6qpyX1r2Bz+flwgvO4KV1b7ClqZl0OoNrDFe8/wLi8QF+c9cjuRbDS1YsZcH82RO9K5KnFACKiIi8RcMFoNs6uqmuKqeto5vS0ggYGEgMctaZJ1JRVkxiMMnfXXMpC+bP5uYf3TGqxVDlXWQiqQyMiIjIWzSyAHRjUzOWZXHlZcu48gPLcssqKkr4zCcv54JzTsu1GFZXledaDFXeRSaSWgBFREQOwt6SNcZaNlaLocq7yERSC6CIiMhBqqmuYMH82aO6cfe2bPcWQ5V3kYmkFkAREZFxsLcWw5G1BBUQynhRACgiIjJOdi/vsnstwUtWLGXpmSdO4BZKvlAXsIiIyATYvZagMYYHV62mta1rojdN8oACQBERkQmgzGCZSAoARUREJsDIzGDHcZUZLONKAaCIiMgEUGawTCQlgYiIiEyQvWUGixxuCgBFREQm0O6ZwSLjQV3AIiIiInlGAaCIiIhInlEAKCIiIpJnDmgMoDEGgFhMtYlEZHIYvp7puiYik8Xw9Ww4btuXAwoA4/E4ANOmTXsbmyUicuTRdU1EJpt4PE5xcfE+17HMAYSJruvS2tpKJBLBsqxDtoEiIhPFcRwaGxtpaGjA4/FM9OaIiLxtxhji8Tg1NTXY9r5H+R1QACgiIiIik4eSQERERETyjAJAERERkTyjAFBEREQkzygAFBEREckzCgBFRERE8owCQBEREZE8owBQREREJM/8/z1iSWj5iDePAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAoAAAAFeCAYAAAAVEa7hAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAABj/0lEQVR4nO3deXydZZ3//9d9nz0nJ3vSNEnXpKWlhbJXoJa9FQsiigiugDA4Oo7Oz2V05uHCVx2GUdxnFEZFRpGlLAJ2EFDQshQQKaWUQps2XZI0+0nOSU7Odt/X74+QM0mbLpQ2aXPez8fDh+Tufc657/vc574/93Vdn89lGWMMIiIiIpI37IneABEREREZXwoARURERPKMAkARERGRPKMAUERERCTPKAAUERERyTMKAEVERETyjAJAERERkTyjAFBEREQkzygAFBEREckzCgBFJqmrrrqKmTNnTvRmiIi8Ldu2bcOyLH71q19N9KZMKnkbAK5fv57LLruMGTNmEAwGqa2t5YILLuDHP/7xRG+aiMhBsyzrgP735z//eaI3dZRnn32Wb3zjG/T29k70pojkBe9Eb8BEePbZZznnnHOYPn061113HdXV1ezcuZPnnnuOH/7wh3zmM5+Z6E0UETkov/71r0f9/T//8z88/vjjeyyfP3/+eG7Wfj377LPccMMNXHXVVZSUlEz05ohMenkZAH7729+muLiYv/71r3tcaDo6OsZ9ewYGBgiHw+P+uflAx1byzUc+8pFRfz/33HM8/vjjeyw/GMYYkskkoVDobb+XvDU69nKo5WUX8JYtW1iwYMGYT5lVVVWj/s5ms3zzm9+kvr6eQCDAzJkz+Zd/+RdSqdSo9SzL4hvf+MYe7zdz5kyuuuqq3N+/+tWvsCyLv/zlL3zqU5+iqqqKurq63L8/8sgjnHXWWUQiEYqKijj11FP57W9/O+o9n3/+ed71rndRXFxMQUEBZ511Fs8888x+9/vPf/4zlmVxzz33cMMNN1BbW0skEuGyyy6jr6+PVCrF5z73OaqqqigsLOTqq6/eYz8BfvOb33DyyScTCoUoKyvjiiuuYOfOnaPWeeqpp/jABz7A9OnTCQQCTJs2jX/6p39icHBw1HptbW1cffXV1NXVEQgEmDp1Kpdccgnbtm07LMf2ne98J+FwmEgkwooVK9iwYcN+j1smk+GGG25gzpw5BINBysvLWbJkCY8//nhunVdeeYWrrrqK2bNnEwwGqa6u5pprrqG7u3vUe33jG9/Asiw2bdrERz7yEYqLi6msrOSrX/0qxhh27tzJJZdcQlFREdXV1dx8882jXj/8Hd599938y7/8C9XV1YTDYd7znvfs8R2MxXVdfvCDH7BgwQKCwSBTpkzh+uuvJxqNjlrvxRdfZPny5VRUVBAKhZg1axbXXHPNft9fjg633XYb5557LlVVVQQCAY499lh++tOf7rHezJkzueiii3j00Uc55ZRTCIVC3HLLLQBs376d97znPYTDYaqqqvinf/onHn300TG7l/d3zfrGN77BF7/4RQBmzZqV66YeeR3Y3dlnn83ChQt55ZVXOOussygoKKChoYF7770XgL/85S8sXryYUCjEMcccwx//+Mc93qOlpYVrrrmGKVOmEAgEWLBgAb/85S9HrZNOp/na177GySefTHFxMeFwmHe+8508+eSTe7zfXXfdxcknn5y7fh933HH88Ic/HLWflmXt8brha9fI/d3Xse/t7eVzn/sc06ZNIxAI0NDQwE033YTruns9XsMO5Lf93e9+lzPOOIPy8nJCoRAnn3xy7riOZFkW//AP/8DKlSs59thjCYVCnH766axfvx6AW265hYaGBoLBIGefffYe3+fwd/i3v/2NM844I7c9P/vZz/a7HwCvv/46l112GWVlZQSDQU455RQeeuihUescyPU7X+VlC+CMGTNYs2YNr776KgsXLtznutdeey233347l112GZ///Od5/vnnufHGG9m4cSMPPPDAQW/Dpz71KSorK/na177GwMAAMHQRuOaaa1iwYAFf+cpXKCkpYe3atfzhD3/gQx/6EABPPPEEF154ISeffDJf//rXsW07dzF/6qmnOO200/b72TfeeCOhUIgvf/nLNDY28uMf/xifz4dt20SjUb7xjW/w3HPP8atf/YpZs2bxta99Lffab3/723z1q1/l8ssv59prr6Wzs5Mf//jHLF26lLVr1+aC6pUrV5JIJPj7v/97ysvLeeGFF/jxj39Mc3MzK1euzL3f+9//fjZs2MBnPvMZZs6cSUdHB48//jg7duw46ASGsY7tr3/9az7+8Y+zfPlybrrpJhKJBD/96U9ZsmQJa9eu3ednfeMb3+DGG2/k2muv5bTTTiMWi/Hiiy/y0ksvccEFFwDw+OOPs3XrVq6++mqqq6vZsGEDt956Kxs2bOC5557b46L/wQ9+kPnz5/Pv//7vrFq1im9961uUlZVxyy23cO6553LTTTdxxx138IUvfIFTTz2VpUuXjnr9t7/9bSzL4p//+Z/p6OjgBz/4Aeeffz4vv/zyPlsIrr/+en71q19x9dVX84//+I80NTXxk5/8hLVr1/LMM8/g8/no6Ohg2bJlVFZW8uUvf5mSkhK2bdvG/ffff1Dfhxx5fvrTn7JgwQLe85734PV6efjhh/nUpz6F67p8+tOfHrXuG2+8wZVXXsn111/PddddxzHHHMPAwADnnnsuu3bt4rOf/SzV1dX89re/HTMoOpBr1vve9z42bdrEnXfeyfe//30qKioAqKys3Od+RKNRLrroIq644go+8IEP8NOf/pQrrriCO+64g8997nN88pOf5EMf+hDf+c53uOyyy9i5cyeRSASA9vZ23vGOd+SCmMrKSh555BE+8YlPEIvF+NznPgdALBbj5z//OVdeeSXXXXcd8XicX/ziFyxfvpwXXniBE044ARi6Blx55ZWcd9553HTTTQBs3LiRZ555hs9+9rMH9T2NdewTiQRnnXUWLS0tXH/99UyfPp1nn32Wr3zlK+zatYsf/OAHe32/A/1t//CHP+Q973kPH/7wh0mn09x111184AMf4Pe//z0rVqwYte5TTz3FQw89lDtvbrzxRi666CK+9KUv8V//9V986lOfIhqN8h//8R9cc801PPHEE3t8h+9+97u5/PLLufLKK7nnnnv4+7//e/x+/z4fOjds2MCZZ55JbW0tX/7ylwmHw9xzzz28973v5b777uPSSy8FDuz6nbdMHnrssceMx+MxHo/HnH766eZLX/qSefTRR006nR613ssvv2wAc+21145a/oUvfMEA5oknnsgtA8zXv/71PT5rxowZ5uMf/3ju79tuu80AZsmSJSabzeaW9/b2mkgkYhYvXmwGBwdHvYfrurn/nzNnjlm+fHlumTHGJBIJM2vWLHPBBRfsc7+ffPJJA5iFCxeO2tcrr7zSWJZlLrzwwlHrn3766WbGjBm5v7dt22Y8Ho/59re/PWq99evXG6/XO2p5IpHY4/NvvPFGY1mW2b59uzHGmGg0agDzne98Z5/b/XaPbTweNyUlJea6664b9fq2tjZTXFy8x/LdLVq0yKxYsWKf64y1v3feeacBzOrVq3PLvv71rxvA/N3f/V1uWTabNXV1dcayLPPv//7vueXRaNSEQqFR+zj8HdbW1ppYLJZbfs899xjA/PCHP8wt+/jHPz7q+3vqqacMYO64445R2/mHP/xh1PIHHnjAAOavf/3rPvdZjg6f/vSnze6X+rHO1+XLl5vZs2ePWjZjxgwDmD/84Q+jlt98880GML/73e9yywYHB828efMMYJ588kljzFu7Zn3nO98xgGlqajqg/TrrrLMMYH7729/mlr3++usGMLZtm+eeey63/NFHHzWAue2223LLPvGJT5ipU6earq6uUe97xRVXmOLi4twxymazJpVKjVonGo2aKVOmmGuuuSa37LOf/awpKioade3Z3fDvf3fD166R+763Y//Nb37ThMNhs2nTplHLv/zlLxuPx2N27Nix188/0N/27udHOp02CxcuNOeee+6o5YAJBAKjtvuWW24xgKmurh51jfrKV76yxz4Of4c333xzblkqlTInnHCCqaqqyt2nmpqa9vj+zjvvPHPccceZZDKZW+a6rjnjjDPMnDlzcssO5Pqdr/KyC/iCCy5gzZo1vOc972HdunX8x3/8B8uXL6e2tnZU8/H//u//AvD//X//36jXf/7znwdg1apVB70N1113HR6PJ/f3448/Tjwe58tf/jLBYHDUusOtRy+//DKbN2/mQx/6EN3d3XR1ddHV1cXAwADnnXceq1evPqAugI997GP4fL7c34sXL8YYs8fT1uLFi9m5cyfZbBaA+++/H9d1ufzyy3Of3dXVRXV1NXPmzBn19D+yFWpgYICuri7OOOMMjDGsXbs2t47f7+fPf/7zHl2Qb8dYx7a3t5crr7xy1HZ7PB4WL148ZqvFSCUlJWzYsIHNmzfvdZ2R+5tMJunq6uId73gHAC+99NIe61977bW5//Z4PJxyyikYY/jEJz4x6nOPOeYYtm7dusfrP/axj+VaMgAuu+wypk6dmjtnx7Jy5UqKi4u54IILRh2Hk08+mcLCwtxxGG7F/f3vf08mk9nr+8nRa+T52tfXR1dXF2eddRZbt26lr69v1LqzZs1i+fLlo5b94Q9/oLa2lve85z25ZcFgkOuuu27UeofqmrU3hYWFXHHFFbm/jznmGEpKSpg/fz6LFy/OLR/+7+HfkjGG++67j4svvhhjzKjfw/Lly+nr68v9bj0eD36/HxgaQtHT00M2m+WUU04Z9dsuKSlhYGDgkHYtjnXsV65cyTvf+U5KS0tHbff555+P4zisXr16r+93oL/tkedHNBqlr6+Pd77znWNey84777xRPSjDx/r973//qGvU7t/BMK/Xy/XXX5/72+/3c/3119PR0cHf/va3Mbevp6eHJ554gssvv5x4PJ47Bt3d3SxfvpzNmzfT0tKS2+f9Xb/zVV52AQOceuqp3H///aTTadatW8cDDzzA97//fS677DJefvlljj32WLZv345t2zQ0NIx6bXV1NSUlJWzfvv2gP3/WrFmj/t6yZQvAPrukh0/gj3/843tdp6+vj9LS0n1+9vTp00f9XVxcDMC0adP2WO66Ln19fZSXl7N582aMMcyZM2fM9x0ZVO7YsYOvfe1rPPTQQ3sEd8M3mEAgwE033cTnP/95pkyZwjve8Q4uuugiPvaxj1FdXb3PfdiX3Y/t8HE799xzx1y/qKhon+/3//7f/+OSSy5h7ty5LFy4kHe961189KMf5fjjj8+t09PTww033MBdd921RyLR7jdUGPs7CAaDua6vkct3H0cI7PEdWJZFQ0PDPsdMbd68mb6+vj3GuQ4b3u6zzjqL97///dxwww18//vf5+yzz+a9730vH/rQhwgEAnt9fzl6PPPMM3z9619nzZo1JBKJUf/W19eXuybAnr8nGBr/V19fv8fQht2vlYfqmrU3dXV1e2xDcXHxmNcyIHct6uzspLe3l1tvvZVbb711zPce+Tu+/fbbufnmm3n99ddHBU4jj82nPvUp7rnnHi688EJqa2tZtmwZl19+Oe9617sOat92f/9hmzdv5pVXXtlr9/i+EhkP9Lf9+9//nm9961u8/PLLo8aBjzV+8a3cT4A97gc1NTV7JOrNnTsXGKr/N/wgPVJjYyPGGL761a/y1a9+dcx97ejooLa29oCu3/kqbwPAYX6/n1NPPZVTTz2VuXPncvXVV7Ny5Uq+/vWv59YZ66Q/UI7jjLn8YDK5hp+Uv/Od7+TGneyusLBwv+8zsnXsQJYbY3Kfb1kWjzzyyJjrDn+24zhccMEF9PT08M///M/MmzePcDhMS0sLV1111agn/s997nNcfPHF/O53v+PRRx/lq1/9KjfeeCNPPPEEJ5544j7340CP7fDn/frXvx4zsPR69/0zWLp0KVu2bOHBBx/kscce4+c//znf//73+dnPfpZrybv88st59tln+eIXv8gJJ5xAYWEhruvyrne9a8wWjrGO3/6O/9vlui5VVVXccccdY/778A3FsizuvfdennvuOR5++GEeffRRrrnmGm6++Waee+65AzrH5Mi1ZcsWzjvvPObNm8f3vvc9pk2bht/v53//93/5/ve/v8f5+nayTg/VNWtv3s61DIYypvcWnA4HCL/5zW+46qqreO9738sXv/hFqqqq8Hg83HjjjbkHdxhKIHz55Zd59NFHeeSRR3jkkUe47bbb+NjHPsbtt98O7P1e8lbuE67rcsEFF/ClL31pzNcMB09jOZDf9lNPPcV73vMeli5dyn/9138xdepUfD4ft9122x4JiXDw38HbMfz9feELX9ijhXTY8MPIgVy/81XeB4AjnXLKKQDs2rULGEoWcV2XzZs3j6qZ1d7eTm9vLzNmzMgtKy0t3aOAaTqdzr3X/tTX1wPw6quv7vEUvfs6RUVFnH/++Qe2U4dQfX09xhhmzZq1z4vM+vXr2bRpE7fffjsf+9jHcsv31jVSX1/P5z//eT7/+c+zefNmTjjhBG6++WZ+85vfAIfu2FZVVR30cSsrK+Pqq6/m6quvpr+/n6VLl/KNb3yDa6+9lmg0yp/+9CduuOGGUQkzh7PLYff3NsbQ2Ni4z6fa+vp6/vjHP3LmmWce0E39He94B+94xzv49re/zW9/+1s+/OEPc9ddd+X9RfNo9/DDD5NKpXjooYdGtd7sbyjESDNmzOC1117DGDMqqGlsbBy13lu5Zr2dB+23qrKykkgkguM4+92ue++9l9mzZ3P//feP2saRjQTD/H4/F198MRdffDGu6/KpT32KW265ha9+9as0NDTkWjp7e3tHVaF4K71J9fX19Pf3v617wL5+2/fddx/BYJBHH310VKvgbbfddtCfty+tra17lOvatGkTwF6T82bPng0M9TodyHHY1/U7n+XlGMAnn3xyzKeQ4fFTxxxzDADvfve7AfbIqvre974HMCobqr6+fo+xF7feeuten+x2t2zZMiKRCDfeeCPJZHLUvw1v68knn0x9fT3f/e536e/v3+M9Ojs7D+izDtb73vc+PB4PN9xwwx7HzxiT66ocfvIbuY4xZlQ5BIBEIrHHvtbX1xOJREZ1O7zdY7t8+XKKior4t3/7tzHHvezvuO3eBVtYWEhDQ0NuG8faX9jzvDmU/ud//od4PJ77+95772XXrl1ceOGFe33N5ZdfjuM4fPOb39zj37LZbC7Ijkaje+zLcOvNWGWB5Ogy1vna19f3lm7wy5cvp6WlZdSY6WQyyX//93+PWu+tXLOGA4DxmAnE4/Hw/ve/n/vuu49XX311n9s11vF6/vnnWbNmzajX7H6dsG0790A2/LsZDohHXs8GBgZyLYQH4vLLL2fNmjU8+uije/xbb29vbsz2WA7kt+3xeLAsa9T1ddu2bfzud7874G18K7LZbK68DQw93N9yyy1UVlZy8sknj/maqqoqzj77bG655ZYxGwJGfn/7u37ns7xsAfzMZz5DIpHg0ksvZd68eaTTaZ599lnuvvtuZs6cydVXXw3AokWL+PjHP86tt95Kb28vZ511Fi+88AK33347733veznnnHNy73nttdfyyU9+kve///1ccMEFrFu3jkcffXSPMV17U1RUxPe//32uvfZaTj31VD70oQ9RWlrKunXrSCQS3H777di2zc9//nMuvPBCFixYwNVXX01tbS0tLS08+eSTFBUV8fDDDx+WYwZDF69vfetbfOUrX2Hbtm28973vJRKJ0NTUxAMPPMDf/d3f8YUvfIF58+ZRX1/PF77wBVpaWigqKuK+++7bY+zHpk2bOO+887j88ss59thj8Xq9PPDAA7S3t48a2H0oju1Pf/pTPvrRj3LSSSdxxRVXUFlZyY4dO1i1ahVnnnkmP/nJT/b6+mOPPZazzz6bk08+mbKyMl588UXuvfde/uEf/iH3/kuXLuU//uM/yGQy1NbW8thjj9HU1HQQR/nAlJWVsWTJEq6++mra29v5wQ9+QENDwx6D8Ec666yzuP7667nxxht5+eWXWbZsGT6fj82bN7Ny5Up++MMfctlll3H77bfzX//1X1x66aXU19cTj8f57//+b4qKinIPRXL0WrZsWa6l6vrrr6e/v5///u//pqqq6oBb1a+//np+8pOfcOWVV/LZz36WqVOncscdd+QS2IZbyt7KNWv4Zv+v//qvXHHFFfh8Pi6++OLDVsj93//933nyySdZvHgx1113Hcceeyw9PT289NJL/PGPf6SnpweAiy66iPvvv59LL72UFStW0NTUxM9+9jOOPfbYUUHttddeS09PD+eeey51dXVs376dH//4x5xwwgm5HqRly5Yxffp0PvGJT/DFL34Rj8fDL3/5y9z16EB88Ytf5KGHHuKiiy7iqquu4uSTT2ZgYID169dz7733sm3btr1eGw/kt71ixQq+973v8a53vYsPfehDdHR08J//+Z80NDTwyiuvvJ1DPqaamhpuuukmtm3bxty5c7n77rt5+eWXufXWW0eNK9/df/7nf7JkyRKOO+44rrvuOmbPnk17eztr1qyhubmZdevWAfu/fue1ccs3PoI88sgj5pprrjHz5s0zhYWFxu/3m4aGBvOZz3zGtLe3j1o3k8mYG264wcyaNcv4fD4zbdo085WvfGVU6rkxxjiOY/75n//ZVFRUmIKCArN8+XLT2Ni411Ile0vDf+ihh8wZZ5xhQqGQKSoqMqeddpq58847R62zdu1a8773vc+Ul5ebQCBgZsyYYS6//HLzpz/9aZ/7PVxCZOXKlaOW722bhksWdHZ2jlp+3333mSVLlphwOGzC4bCZN2+e+fSnP23eeOON3DqvvfaaOf/8801hYaGpqKgw1113nVm3bt2oVP6uri7z6U9/2sybN8+Ew2FTXFxsFi9ebO65557DcmyffPJJs3z5clNcXGyCwaCpr683V111lXnxxRf3edy+9a1vmdNOO82UlJSYUChk5s2bZ7797W+PKqXT3NxsLr30UlNSUmKKi4vNBz7wAdPa2rpHCZu9HdOPf/zjJhwO7/HZZ511llmwYMGofQDMnXfeab7yla+YqqoqEwqFzIoVK3LldUa+58gyMMNuvfVWc/LJJ5tQKGQikYg57rjjzJe+9CXT2tpqjDHmpZdeMldeeaWZPn26CQQCpqqqylx00UX7PU5yZBqrDMxDDz1kjj/+eBMMBs3MmTPNTTfdZH75y1+OWYpkbyU0tm7dalasWGFCoZCprKw0n//85819991ngFElWIw58GvWN7/5TVNbW2ts295vSZjdfxv722bAfPrTnx61rL293Xz6058206ZNMz6fz1RXV5vzzjvP3Hrrrbl1XNc1//Zv/2ZmzJhhAoGAOfHEE83vf//7PX5f9957r1m2bJmpqqoyfr/fTJ8+3Vx//fVm165doz7zb3/7m1m8eHFune9973t7LQOzt2Mfj8fNV77yFdPQ0GD8fr+pqKgwZ5xxhvnud7+7RzmzkQ70t/2LX/zCzJkzxwQCATNv3jxz2223jVnCZqxjOlyyZffyXmPdf4a/wxdffNGcfvrpJhgMmhkzZpif/OQnY77nyDIwxhizZcsW87GPfcxUV1cbn89namtrzUUXXWTuvffe3DoHcv3OV5Yxh2iEuYgcdn/+858555xzWLlyJZdddtlEb47IKD/4wQ/4p3/6J5qbm6mtrZ3ozZEj3Nlnn01XV9eY3fBy+OXlGEAREXl7dp/WMZlMcssttzBnzhwFfyJHgbwcAygiIm/P+973PqZPn84JJ5xAX18fv/nNb3j99df3WmZIRI4sCgBFROQtW758OT//+c+54447cByHY489lrvuuosPfvCDE71pInIANAZQREREJM9oDKCIiIhInlEAKCIiIpJnDmgMoOu6tLa2EolExnW6HhGRw8VxHBobG2loaNjrvKUiIkcTYwzxeJyamhpse99tfAcUALa2tjJt2rRDsnEiIiIicvjs3LmTurq6fa5zQAFgJBLJvWFRUdHb3zIRkQnW3NzMggULdF0TkUkjFosxbdq0XNy2LwcUAA53+xYVFelCKSKTwvC1TNc1EZlsDmS4npJARERERPKMAkARERGRPKMAUERERCTPKAAUERERyTMKAEVERETyjAJAERERkTyjAFBEREQkzygAFBEREckzB1QIWkRERN6e1rYuotEYpaVF1FRXTPTmSJ5TACgiInKQDjSoW/3MWh5ctZqOriiO43D6qcfx/veeq0BQJowCQBERkYOw+pm13LnyMaK9cUpLIlz5gWUsPfPEPdZrbeviwVWr2dXWxbbtu+js6uWvf9vIn596ic9+6oNjvkbkcFMAKCIi8ha1tnXxs1/cz46d7Xg8Ns0tHXR03o1rDHMbpgPkWgaj0RgdXVF2NLfRE+3DH/DiZB12NLdx58rHaKifppZAGXcKAEVERN6iTZu3s6WphaLCMIWFBexq7+KVDY388D/vIhQKYFkW4XCISGEBS05fRGdnlJbWTlzHxePx4A/48Pt8RHvjRKMxBYAy7pQFLCIi8lZZ1pv/b0hnMsRi/WCgtCTCjp3tbN/RRnVVGcYY7rj7D7S0duJkXbKOS9ZxcBwXx3EpLYlQWlo0sfsieUkBoIiIyFs0t2E6DbPrGBxM093dSzqTpbS0kK6ePvr7E2QyGVLpDIXhEFu3teL1eZgxvZpAwI9xDdlslqrKEq78wDIANmzcSmtb1wTvleQTdQGLiIi8RTXVFVx/zaXcee9j7NjRRl9sgK7uXna19eC6LrZtsfGNJsrLSrBti8JwmHQ6QyDgw3EcwqEg7734bABu/tEdxPsTRAoLuGTFUiWFyLhQC6CIiMhb1NrWRXlZMaeedCyuMSQGk6RSWVzXxbLAGMOG15oYHEwxY/pUbI9FW0c3/f2DAHi8HlY9+gx3rnwMYwwNs+owxvDgqtVqCZRxoRZAERGRt2D1M2uHWv52ttHS2kkmk8Xjsclmnf8bGmhZOK5LKBigtz3Otu27SKUy2LZFRUUJZcXFNG1rxXVcTj/tODwem+qqchqbmpUUIuNCAaCIiMgBWvvKJm7+0R1s39HGQGKQwWQaCwgE/AAYM7SeZVnYlsVfX9qIz2cTDoVIpzMYA4mBFMFAEtu2CIUCtHV0U11VTltHN5HCAiWFyLhQF7CIiMgBWP3MWr7zg1+z/rUtRPvi2B4br9fGNQbHdfB6Pbl1XXco2zfaG6Ozs5feWBxjhrqGY/EB2jt6qKwo5QOXnodlWTQ2NWNZFpesWAooKUQOP7UAioiI7MfwbB7GGDweD9msSzbrEAz4cd0UxhgKw0Hi/YO4rsEYg+O4ABjAuCb3XpZlURAKUloSYfGpC1l86sJc0ejGLTuVFCLjQi2AIiIi+xGNxoj3J2iYPY3S4gi2ZZHJZHFcQ0FBkPrZdVz87qUUFhZg21ZuLOBYbMsiEPCRTKZz4/0WzJ8NkAsylRQih5sCQBERkTG0tnXlumJLS4uIFBYwkBjk1JOPpaS4ENu2yaQzpFIZOjqiPPfCqzhZB6/Xi8dj47HHvsUaDL29/WzbsWvUZwwHmdVV5bmkkHh/gmg0Ns57LvlAXcAiIiK7Wf3MWh5ctXpUV+wlK5by4KrVZLJZTlg0lx3N7fT2xvB4PAwMDLJ95y7S6ez+39yA6zrE4wP8+Gf3UFJcSGVFKUtOX0SksEBJITIu1AIoIiIywsjxfiO7Yhvqp/H5f/wwn/37D3LVh1dQVVFKKBhkcDCFa8Bx3FGJIHtj2RbpdJbEYIpNjTvY1dbNrrYunl6zjgXzZ9PX18+rG7fkkkJUEkYOB7UAioiIjJAb7zerbo/6fAvmz6amumKoW7gkwoaNW4j3J4ChzF+f14tt22AMrjFjvr95898sC7weD9lsls6uXvpiA0R742SyWXxeL0tOX6QEEDls1AIoIiIywvB4v7aObhzHHbMrtqa6gmXnLcbj8QwFdK6L9WZiiGWx1+APyI0RDBeEyDoOPp+XaDRGZ1cUv8/LcfPrKSku5Ok165QAIoeNAkAREZERaqoruGTF0j3q8+3eFTu3YTrHL2zgmIbphEIBbNvGALY9dgqwx7YpKS6kOBKmrLSIYNCPbdv09MSwPTYVFSXMnlmrBBAZF+oCFhER2c3SM0+koX5arj7fWOPwSkuLCIdDxAcGKS8tHir8HB1KCnGyyVwroGWBbdtMr61i0XFz+du6N+jvT5DJZKmsLGX6tCksmD+b5uYOJYDIuFEAKCIiMoaa6op9JmDUVFdQWVZCd08fFuDzeamtqaKltQOvz0s6ncmtGy4IkXUNA4NJTjhuDpFIAR2dUXp74/i9vqGp5QYG6Yv10z8wmMs8VgKIHC4KAEVERA5Ca1sXnd29lJcW4/N68HhtOrv6cFyXcDgExrxZFNqioryYgoIgyWSaY4+ZhcdjEwoGeH3TdsrKimmYVUdbRzeDyRQffN/5zJ0zQ8GfHFYaAygiInIQotEYjuty8onzCBUEicUHiMX7Ma4hmxlK7rBtD5WVpcyeVcvsWbWUlkRyySU7WzoAqKupyo37cxyXyopSBX9y2CkAFBEROQjD2cKBgI+F82fjuga/30dd3RQ8HhvHdTHGpSgSpqK8hCsvW8aVH1iWSy4JBv3Uz6plIDG412xjkcNFXcAiIiIHYThb+MFVq2nZ1YnX66GqopSCUICiwqns6uhi9owaPvP3H2Ruw/Rcq97I5JLGLTt5cNVqGpuaNe5PxpUCQBERkYM0nC28afN27r7/j/T19dPZ1Ut3Tx/hghAfueJCzl5y0qjXjEwuqamu2G+2scjhoABQRGSSaG3rUiAxAYYDOtu2eXDVarxeD9PrpnDRhUu49OKz91h/9+9pf9nGIoeDAkARkUlg9TNreXDVauL9iVxXoqYRG18HUjtQ35McKZQEIiJylGtt6+LBVasxxtAwqw5jDA+uWq1pxCZATXVFbr7g3el7kiOJAkARkaNcNBoj3p+guqpc04gdwfQ9yZFEAaCIyFFuuBzJcH05lRM5Mul7Ojxa27rYsHGrWlLfIgWAIiJHueFyJMP15SzLUjmRI5C+p0Nv9TNruflHd/DDn97NzT+6g9XPrJ3oTTpqKAlERGQSOJAEBJl4+p4Ond3HVLZ1dPPgqtVEImH8Pq+O734oABQRmSRUTuTooO/p0BgeU9kwqy43pnLNX9fzk5/dg8frUZb1fqgLWERERI46u4+p3Lqthc6uXnw+r7KsD4ACQBERETnq7D6mMp3JUlVRyuyZtcqyPgDqAhYREZGj0sgxlelMlt/c9QhtHd1UV5Ury3o/FACKiOSpA5k6TtPLyZFu5JjKeHyAB1etprGpOTcGUOf22BQAiojkof1NSdba1sUfn3yBNS+sx3FcDaiXI9bIQO5As6w1JZ8CQBGRvLO38hkN9dOoqa5g9TNruXPlY7y07g18Xi8nHD8nN6B+eB2Ridba1sWfnnyBZ59fj+OOfkjZ1zm6v/M/XygJRETkKPdWZkJobevixZc20tEVHXNKsuGbYzKVJhjwEwr52dS4k8JwSAPq5Yix+pm1fPOmX/CTW+9l/YYtZDMOPdEYd658bL+/A03JN0QtgCIiR7G30pU1vG5nV5TNjTvp642z6Li5DCQGc4Plh2+OZSURthhDJuNgTJadLR2UlRZpQL1MuNa2Lu5c+RidXVG8Hhsn6/D8i69SUhwh6zj88ckX+NiV797r60eWj8nnZBG1AIqIHKV278oa7qZd+8qmPVoER64bKQyTSWfYuGk7jzz+LG3t3bnB8qWlRQwMDPL8i6+RGEyyq62L3r5+gkH/qAH1mn9VJsofn3yBl9a9wa62bnqiMbqjfTiOg2Vb+Lxe1rywfp/npabkG6IWQBGRo9RYMyH85Zm1/Nt3biMUClBZUZprERxed0plGa++tpWy0mKwoLCwANcYGuqn5d7XGANAuCCE1+OlqrKEv7v6Uk48fi6gAfQycVrbuljzwnp8Xi+hkJ9kKkhHZy9+vxcLmDNnGtFonE2NO/YZ0GlKPgWAIiJHrd27sp5/8VUat+yksLCAstIislknN7i9tLQIj8dm4xtNDAwMMpAYJNobp7snxo6dbfy/G3/OZz55OZ1dUQoLCzhn6clkMhl8Ph/tnT34fUO3Cw2gl4kUjcZwHJcTjp/DpsaduK6LhcHn9ZBKpXn5lc34fF7uvvdxotEYcxum7zXAy/cp+RQAiogcpYa7sh5ctZpXN25hZ0sHgaCfKVVlxPsTtO7qwuv1EI3G2NS4g6Ztrexs6SCRGCSTyebexxh4+JGn2fD6Vupn1ZFOZygMhygMh9jZ0kEw6M+Njxqr1bGxqZloNJbXN1MZH8MPPcYYjjt2Nk8+1UukKEwoGKC7J4Zt27zzjEX0xfq5+ce/ZfbMmlEt4SOpDqCIiBy1hruyXnxpI7++6xGaWzrYvmMXAKl0huKiMM+/uIH/vGUlsXgCy7YwZijosywACxjq8m1p7SSTdigtjZBKZ2hu6QCgflYtjVt2UlNdQTqTJZt12Lqthdkza/N2AL1MjFEPPa9tId6fIOj3EYsN4Loutm3T1tHD4GCSwcEUjuPSuquTO1c+NqqVevdhDEtOX7TP1sLJSAGgiMhRrqa6glNOms8jjz3Llq3NGAOu6+KxbRxjWPXoM/TFBygIBcGycB2XTCb7ZhBoRr2Xz+ehsyvKjGlTOXbeLOpqqhhIDPLgqtV09/Tx9Jp19PbG6eiK0t7Zw+yZtXk5gF4mztIzTyQSCfPdH/ya7p4+EokUtscim3RwXJdNm7fjOC5YFi++tBHHNXg9FnV1VfzrF67eYxjDy+s37be1cDJSFrCIyCRQU13BGYuPw+fzUlpSSFVlKUtOX0RhQYj+/gS2ZeE4Dh7bwn7zfzDUEjjMcVzSmSyZTJZ4f4K6mipKigupriqnoyvK7x95GmMM7zh1IQvnz6aspIiPXHFhXtws5cji93kJFxYwb+5MXONivRnOWBZkHZdMdug8zmazJJNJ4v2D3PY/v+ffvvurUXUAE4NJOrt6yaSzTKkqy2XS50N2uwJAEZFJ4rxzTuOkE45h7pwZnP3Ok/D5vG8GehaO6xLvT9DdE8NxXWbUTSESKSAQ8OVen806dHT2kEymaO/o5vEnnmfrtlbaOrrxeb1kstlc8dzZM2vxeD255BCR8TQ8FrAwHKK6qpxsNovrGpwRrdvGGJKpDJZl4fXYuMbwu9//hda2rqGEqE3b6OyKEosPUFQUJlwQyqui0AoARUQmiZrqCq68bBllpUWsf20Lr27cSl9sgM6uKKFggFAwgM/rIRQMcOHyJbzjlIXMnFFDIODDsiwsCxzHkHWGuo+jvXGeXvMyg4MpLr5wCZUVpbR1dOM4rsb+yYQaHgsYDofw+byk0kOBHli82bidY1sWtsemIBQgnc7w0suv09fXz2uvN/HcixvIZh0qK0ooCAXz6rzWo5uIyCSy9MwTSaUz3PrLB6ifVUtVZSldXb0URcLUz66jry/Oth27WL+hkWw2S2dXFMuyKCkuBAtiff14PD4qK8soSqWIxROcf+5pXHrx2ZSXFfPgqtU0NjXn6v9p7J8cLrtn6e7+93AC1O8e/gs//fl9BPw+unp6sSyLTCaLz+sdSlpyXGxjcByHwsICNr6xjeop5dTPqqO5tYNYbIDi4sK8O68VAIqITCKrn1nLnSsfY+u2VspKi/D5vBQVhYnFBwj6fbzR3kMwEGBuwzR2tnTg9XioqiwlGPAD0NfXj4WF1+PB9frw+byUv9kaouK5Ml52z9KdNaOGDa9vJRqNU1oa4crLluXGnk6ZUkYg4Md1HUKhIJlMBtv2jxrf6rqGRCLFiYuOwe/35YYyRAoLaGxq5oPvO5/KitK8Oq8VAIqITBLD2Y1+n5ey0iIGB5Ns39FGIODDjRne2LKDxGCSE46fQ3FRIf0Dg1iWRVGkENd1iUZj+P0+gsEA/QMJHMelflYtc+fMyH1GvhfPlcNv9yzdrdtauO3XD+Pz+/DYFluammlu6eCV9ZvZ8HoTO5rb6R9IkEymsWBozJ/Xg+O6FEXCZLMOxhgCQR+pVBpgjzJGc+fMyLvzWgGgiMgkMbJIs8/n5fVN22lt6yJcECQU8BPtiZFOZ9i0eSfx+CC72rpwHIe+WP9QK8vMGpbVTyPaGyfaG6e0JMKVH1iWdzdGmVi7Fxu3bZveWD8lxYUMZl36+xO0tffQuKUZv99HYjCJcV18Xg+VlaWUlRZjjEtPT4zO7l6Ma8hks7jGZVPjTmqmVrCjuX1UGSOADRu3qgVQRESOPiOnhptWO4VYbIC+WD+zZtbQ0xPDdV38AT/JVIq1696guCjMgvmz6IkO1fUrKRkqAL3svMV5VxRXjhy7T3EY7Y1hAQOJ5FA5I3eoRS+VTpN1HJKDqTcLnBu6u/soKAgyo64ay7LY1daFawyWBa5jSCbTLJg/m0QiSSaT5SNXXEg8PsDNP7oj7+a2VhawiMgkMZwZaVkWjU3NeLwe6mqqmF43hUwmS0V5KQG/j6qKUrKOw8Bgir+tfYPGLc3E4wkCPi/GGJ5es07Bn0yY3c/jkuIIdbVVpFIZBhJJMhkHy3qzzEsyhWuGyr+4rmFwMEVfbIAzT1/ElMoyQqEgPp8H27ZxjYsxLp1dvbkyRn2x/lHdzflUB1AtgCIik0hD/TQufvc7wRiKiyP85q5H6OiM4jgObe1dhEIBdrR04DouicQgMDRAvrAwRMuuLmbNrKW9s0dz+8qE2j3h6A+Pr+HG795Oxhi8Hhvb4wEMrjt6JhvXuIRDQRafsoATFx3DYDLF5sadeLweentjGAPNLe2EC4JECgvAmLyd21oBoIjIJLF75uQlK5Yya0YNz7+4gd7eOJlMFtcYUqk0RZECBhJJXOMChoDfh+u6NLd2UFZalBd10OTINjLhaPEpC5haXU57Rw/pdAbjung8HjweG2MMtm1j2xbGNVRWlORasC+84HS2NLUQ8PuGxgZiiMUTpDNZPrRiKQ3100Z1N6sOoIiIHFV2z5xs6+jmzpWPgQUL588mUlhAR2eUaF+cdCZDSSSCZfeRTmfIZLJ4PB6SqTTBgD9v6qDJ0SP95sNLaUmEoqIwPdEY2ayDZVm4riGTyZBKZ/D6PFy47AxgKKlj4YIGTjrhGJLJNNNqq+jojJLJZPmHT17OicfPBeCSFUvzsr6lAkARkUlg98zJ6qpyXlr3BgAnLToGj8emsqKUVzduATOFnmicQMBHOp0hEing+IUNnHvWKZx/zml5cfOTo4vf56WyooT+/kHS6SylJUUUFoY4bn49Tzz1N/r7ExQWFvDei86iYXbdqKSOBfNm07S9lbaOnlyANxz8Qf7Wt1QAKCIyCeyeOdnW0U1pSQQsRi2rrCjl0ovP5rE/PU+0N04w4Oe8s0/hPAV+cgQrLS1i9sxaBgYGiRQWEO9PEA6H+OR17+fSS85hZ3M70+qmMKWqjJt/dMeolvCm7a185IoL8fu8ew3w8rG+pQJAEZFJYDhzcmRX1pUfWAawR/fW0jNPZPGpC/OuxUOOXiPP7774wKiu2prqilyL3oaNW8dM6vD7vCyYP3uC9+LIogBQRGSS2FtXViQSzrWQDN8o87HFQ45Ow3MAN9RP4/P/+OF9PriM1RKeL0kdb5UCQBGRSWT3wG73zOB4fOCgi9wO34jVaijjZazM9n2dv2O1hOdLUsdbpQBQRGSSGisz+MFVq2mon/aWb4hv9UYs8nYd7Pmbr0kdb5VmAhERmaSGM4Orq8pz46Hi/Qmi0dhbep/db8T5NFuCTJy3c/7WVFewYP5sBX/7oABQRGSSGjkeynHcPcZDtbZ1sWHj1v0GcocqkBR5K/Z3/srbowBQRGSS2n1OVcuycuOhVj+zlpt/dAc//Ond3PyjO1j9zNq9vo9uxDIR9nX+ytunMYAiIpPYWOOh3urYKg2sl4mi8XyHjwJAEZFJbvfM4LFmDWlsaiYaje31BqsbsUwUlSw6PBQAiojkmYOtlaYbscjkoTGAIiJ5RmOrREQtgCIieUhduiL5TQGgiEieUpeuSP5SF7CIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGe9Eb4DI/iSSGVLZLAGvl4Kgb6I3R0RE5KinAFCOaLu64+zo6CPjuPg8NtOriplaHpnozZI81drWRTQao7S0iJrqioneHBGRg6YAUI5YiWSGHR19ZB0Xv9cmk3XZ0dFHcThIQdCnlkEZV6ufWcuDq1YT708QKSzgkhVLWXrmiRO9WSIiB0UBoBxRRgZ1qWyWWCJF1nFxjcG2LLwem1Q2S193Ui2DMm5a27p4cNVqjDE0zKqjraObB1etpqF+mloCReSopABQjhi7d/eWhIMMpjIYA0G/h0zWxXFcEoMZmrtie20ZFDnUotEY8f4EDbPq8HhsqqvKaWxqJhqNKQAUkaOSAkAZN8Ote65jsD3WqK7b4e5egJDfw0AyS2NrD5msi+u6ZBwHv8dDQdBH1rjEEykyWQeXoVR2n9dDKpulAAWAcuiVlhYRKSygraOb6qpy2jq6iRQWUFpaNNGbJiJyUBQAyrjY1R1n664o8USKVMYh4PMQKQgwe2opU8sj9A0kiSdSWBakMg7pjEMy4wDgscFr22CB3+ch1p+ifzCNbVuEAl5SaQcnPRRYihwONdUVXLJiKQ+uWk1jU3NuDOCBtv4peUREjjQKAOWwGNnal3Yc1je1k0hlSKaHgrrBdJZ01iGTdYgNpGhqi9I7kALAAkaGco4LxnXweGziAyl64oOksg4W4BpDwOfF67GxPda476fkj6VnnkhD/bS3HMgpeUQkv418AASOmIdBBYByyA2P5YsnUiTTWbKOm2vNGymVydITd+mJD5LJurnlY7XjuYDruBjj4vV68NgWxoBtWxQEfAT9XgJenc5yeNVUV4y6aO+vZU/JIyL5beQD4MDAIMYYPF4PPq+Xiy9cwqUXnz1h26Y7phxSw2P5Uuksg+lsbvzeWBwXHNfFZuygbyxZF7LpodY/y4JM1sWyLKZXFSsBRMbVgbTsKXlEJL/s3to3/ABYXVXGE3/5G4ODSSKRMIPJFJsadwBMWBCoAFAOqVQ2m2v5S6azWBa4+4nu3H3/85gMYAz4LJhSEh6zBEx3X4L+VJrCgJ/y4oKD+BSRsR1oy97uySNbt7XgZB1a27py/65AUGRyGH4o7OyK4vN6OWHR3NwDYHe0D9e4DCSShAuCTJ1Swa62Ln7/yNPMnFGD3+cd9+uBAkA5pFzHkExn36zbB1nHHHDr3sEI+X30DiRJJDOjWgA3bOugaVeUrGvw2hazppayYGbVYdwSySf7a9lrbeti0+btYFksOX0RT69Zx3N/fZWOrig+n5cbbvw5VRWlzJpZozGBIpPA8EPhrrYuOrt6icUG2PB6E7U1lRSGQ1hAIpEkk3UoLAyTGBykqChMW0c3P/nZPXi8nnEfI6wAUA4p22MR8HtJpjJk3cMb/NkWFAT9ZBx3VAmY7r4EW1p7MGaopEwm69K0K0p1aaFaAuWQ2FdZmNXPrOVnv7ifLU0tADTMruOUk+azY2cbtdWV9PTGyKQzuTFBGhMocvSLRmN0dkXp7OrFsmBqdQW72rvIpDNs39lGc0sHjuMChubWDqZUlVEUKaC9M0pFeQllZUXE44lxvR4oAJRDKuD1UhDwMZBMH/jAvoPkGujs7ac4HByVANLcFSOZzmLbFm7KDBWLdgz9qTTlKACUt29vZWEA7lz5GDt2tlNUGAbLsPGNbWx8Yxuu6xIpLCCVylAztZLevjiRSAF9sQGNCRQ5ypWWFpHNOnR391JdXUEiMUhRJAyWRSaTpX5mLQ31dWzYuJWdLR0UhALYto1tWbTu6mL7jjZ8Pi+RwoJxux4oAJRDYuQUblWlBXT09mPYs6TLoZZxDPFEmo5oP1WlhfQNJOmOJbAta+izXZdEyiXg81AY8B/GLZF8M1ZZmA0btxLtjePx2BQWFpDOZOjvT+DzeSgqKiSTydKfGKStvYtIJEw8nlBBaZFJoHHLTlLpDLH4AL19/RQVhSkrLWJXezeZdIbKilIqKkpYfMpCkqm1+Pw+0ukM7R09hMMhptdV09UdZTCZIp3Jjss2KwCUt233KdymlIQpCPpJZ5LYbyaBHK4gcLjy36aWbnZ09pFMZxlMZSkIehlMOThm6JNrKtT9K4fe7mVhSkuLKC2J0NzSQX9/gp5oH4PJFI7jxWMPEgz5sd5sEbBtC8u2ci2HGzZuVVKIyFFg9/JPw+P/An4foVCQ3r54bhzgrJm12LbF4GCSV15tZGtTC21tXSw8tp6qGVNp2r6LZCpNW3vXUAtgURi/b3xCMwWA8raMnMKtqMDPYCpLU1sv5s3U38M9OYcBUlmHbL9DwufBtizSWQdjbKaUhUkkM3hsi1lTyg7vhoi86YzFx9HRGWXbzl10R/swBtKZLJlYP3a/RXVlOTNn1gytbOCVVxtVKFrkKDFW+afysmI6u6Ls2NlGOpOlsDBEOpXFGEMylaJ2aiV9ff00t3ZijAEMr73eRG8sTrggSF+sH9cYEskUBtjUuIMF82cf9n2xD/snyKSWymbJOC6hgBfLsvB6LKL9SVIZB793/E4vx0A64+D12AS8HjKOS6w/RTrj4BpobO1hV3d83LZH8s/qZ9Zy84/u4Kk164hECjj+2AYKCkL4fF4wBsdxyWQc2jq7yWQynLToGIwx3HXf4wwMDNIwqw5jDA+uWp0rEyMiR47dyz8N/17TmSx9ff3sausmOZgkmczg9XowxtC6q5PGLTvp7I6SyWTxemyMgV3tXaxdt4ld7d3E4wkGEknKSoqYXjeFp9esG5drgAJAeVsCXi8+j83gm087sUQax3XxeixCgfEtzOwaSKazeDw2Po+N12tTGgkxpTQMwI6OPrr7EkT7B0kkM+O6bTK5tbZ1cee9j9ETjZHNZmnc0sxf175GPD5AOp0ZVQvTGENHZ5TEYJJIYQGDgyk8HpvuaB+F4RDx/gTRaGzidkZExjRc/qm6qjxX/inen6Av1o/X58X22GBZuK5LMjlU8sXJOvTG+nPDPgrDQ/cjY8BxXGzLwuOxKYoUsOi4OZxw3NxxuwaoC1jeloKgj+lVxezo6COWGMr89do2tm29mfI+vjKOi8d2CQd9Q+MpCobGXIUCXtqjA7y2oxPLsvB5bKZXFY9ZQFrkrfrTky/w0stv4PXY9Pb14/N6SSRSuGNUQc9mXXa1ddPc0kEmkwVj+NvLbxDw+3AclxnTq5UUInIE2lv5J4yhvKyYpWecwPN/20BfXz+ua7AsKCsrwTWGnp6hgC4+MJB7P9uysD02jusSiw1gwaiSUoebWgDlbZtaHuG4WVNYMLOS42dPoaQwiOMYsuYwDwAcg8e2CAe9lEaGCm8Ot0zGE2lS6Sy2ZVFUMJQNvKOjTy2B8ra1tnXx7PPr8Xm9WLZNOp2lpzdGOp3e62uSyRRrXljPG4078Pm8OI5DNuuQzWbp70/Q3tEzjnsgIgdiuPyTZVk0NjVjWUNJXHPnzCBSWIDP56WyvISS4gjBgJ9AwE9vX5xUMo0xBtc1pFKZXOOIYWhoiGVZpNNZtu1sy72nysDIUaMg6MsVYp43vYKtu6Kksg42MJAan5R2y4LicIBEKkt/Mg7GEPR7yTgBXHfov0e2CMYS6VEFpEUORjQaw3FdFh03h9deb8J9c7yfx2PhjJEFZVng9XpYMH82sfgAW5paKC2JkEqlsWyL5tZOfvyze7jqwyuUDCJyhBmr/BPAJSuW8qs7VhGLJygvKyYUCtDd04fjuKRGPAwGg36SyaG/PR4Pruvi9/uYOX0qH73iQk45af64VQJQACiH3NTyCMXhIC3dfXREPWR7+0llDn93cNDnYTA11MoXCnhIpR3SWYe68giRggCNrT0MprKEAl4GU1l8HntUAWmRt2K4FEQ6kyVSWIAxhiWnL2L1My+xpamVvTWA+3w+QqEAFeUldHRGcV2XdDrDwEASgLraSvw+r2YIETlC7V7+CYYCw0gkzE9+dg8+n5ftO3fRuqsTY8C4DgDBgI+qihJ6ojESgymKi8KUl5YQ6x9get2UcQ3+QAGgHEY9sSSO45J13MNeEBrA47FJZ1wiBV5syybgh4FkFstjUV5cQDrr5MYqDo8BHDl/sMiB2r0UxKwZNTRtb6W9s4dZM2tpa+8mk3EwxuSKulpAIOjHtm2m100hHA7S2d2Lk83S3dOH6xo8tkUoFGT2zNpRcwuLyJGrta2LTY07wBguunAJj/3pedrau7HtoVF2w12+yVSGbNYBLPx+H0VFYRLJJKFQgIsuXDLuv3UFgHJYDJeHSaQzjEcuiAV4LAvbhlTaIeAf+n+vbeVmABlumRyesUTBnxyM3UtBtHV007S9lY9ccSF+n5cn//Iia9dtwjVDLXpFkTA+v5ey0mL8fh+WBZHCAl5+ZTPx2ACuMdj2m49IlkUsNsDWbS2aIUTkKLD6mbXc8ssHaNzaDED9rFpOX3wcO1s6iMUGSKZGjzNv74xSVlrE1OpyIoVhjDFcuOwMLr347HHfdgWAclgEvF6SqQx9/alx+TwDWLZN0GeRTGfJJg1e22LW1NJRM4CMHKsocjCGS0E0zKrLlYJobGrG7/NSWlrEhtebiBQWECkMkU5nyGazeL1eZs+sYfbMWto6uumJxggGA/gDfpLJZG6soG1ZxOIDpDNZPjROA8FF5OAMl3/avqONokgBGIsdO9vx+30MDCT2CP4AQsEA//DJy7Eti/997Fksy2LDxq088PCfmdswfVxnA1IAKIfNQDJ92Lt9hwW8NpXFIbKOIZnOUlMRoawwpOnf5JDbWymI0tKiXEJIdXU5G17bQjqTxXUcysu9VFWW5gLG1l1duI6D12vj8XiwbRsnm8Xj8TCnYRqf+eTlnHj83IneVRHZh2g0RjQ6NPf3UGsepDMZ+mL9ZDPOmK/x+320t/fwyOPPkslkKYqE6enp5alnX2bm9KmUlxdz+mnHcf45px32QFABoBwWOzv6SO3lB3CoWQxlVboGQgEvGceloriA0sLQuHy+5JfhUhAPrlpNY1Nzbjqo4Yu1x7bZ2tTy5uBvQ9YxdHVHeexPL3DKSfPw+32Ulkbw2BZ+31Br9OBgCsu2CRUEuGTFUgV/IkeB0tIiSkuH5v6O9w+AGap/WxAMUFZWRHtXD4OJFO6bGWG2bVEztZLXNm4hk84ytbqCltYOOrt7sS0Lx3Fo3NrMSy+/wZrn13PlB5Yd1koACgDlkNvVHWd7Ry9j1MA9LAzgugbbQtm9Mi72VgqiprqCBfNm8cc///XNxCdDMODHGJfBZIpnn1/P8QsbuOajF9Pd08f25nYGkylKiyMUFxcya0YN559z2sTunIgckJrqCq68bBmx2OgxgJddeh5Pr1lHKp1h0+YdpNJDXcHhghCLTz6WbTt3UVQUpi8WJzGYxHEcggUhEokUljXUTZxMpQ97JQDdJeWQSiQz7OjoI+Ab31Mr6PcymHaU3SvjZqxSEAALFzZQXBTGtmz6BxJ4vV4Sg0n8fi+pVIb0mzeD4UHfDz/yNJlslqqK0nErACsih8bww+BwFvDcOTOoqa6gvKyYxGCS9o5uslmXWTOmUjO1ks7uXsLhEJUVJex88wHQY9uEgn7SmSyO4+Dx2NTVVNHe2XNYKwEoAJRDajj7tzDkw2PBGHVwD4uqkjAzp5You1cm3NyG6RwzZwZbmlpwXJfMYHJovk/bZkpVGUWRcO7J/tKLz2bxqQv3aEkUkaPH3uoCuq5LNBpnTv00SooLcRyXxqZmlpy+iHXrN5PNOqTSGSKFIVzX0NzSiW1bNMyuYyAxeNgrASgAlEMq4PXi89gMJLMEAz4GxmmqtfboAMdMq1DwJxOuprqC66+5lDvvfYytTS10dfWSTGeIRMLMmzuDabVTRtX421tLoogc3ebOmUFtTWUumBtOGDv/nNM4/5zTiEZjbGrcwdNr1tHRFaUoEsbr9eD1ecdlSjgFgHJIFQR9TK8qZuuuKMYYgn7PmwPhXRwDtgWueTNxw2Pj8Vgk0weWLGIxNI2WZZGrLWhZYBlIZbL0DSQVAMoRYeQYwda2Lu5c+Rh+n5dptVPGdbJ3EZk4+0oYa23rAmDxqQtH9QIA49YjoABQDrnRU8ElyDoutm0R9HoYSGXo7U9iWRbhgJeBVBa/1yabddlXvWgL8Hk92DZ4bIuBZBabof/2eT3/t5LIEWK4ZW/B/NkE/L69Zg2LyOQ1VsLY7jMJXbJi6ahsX9UBlKNaQdDHnNoKasszo2beSCQztHTF2NHRS3wwTTrjEvR7KC0M0jeYIpt1KY+ESGUcBlOZofIuriHg8xApCFBbESHg9/LKlnaS6Sy2bWEB4aCf4oLgRO+2yJga6qdx8bvfOWqQuIjkh5HDPMaaSWii5v1WACiH1e4zbxQEfZRFQrR0xYgE/STtLAZIZhwKA36SVhbbtimN+Jk3vYLiwiCuY7A91ugEDwNbd0VJZR0CXg+zp5aq+1eOSHs87du2AkCRPLW3mYQmYt5vBYAyrnZ1x9nc0kPvQIqA1yYU9JFKZ0mmswTCAebUlVNVGt5vNq/m9ZWjwZH0tC8iE29fMwmNN3vcP1Hy1nCNQI9tEfB5cFxDKp3F47GxLQsD9PYnSaayBxTQFQR9lBaGFPzJEWv4ab+6qjz3tB/vTxCNxiZ600RkAgwnhliWRWNT87hk++6NWgBl3AzXCCwq8GNZ0DeQIpnO4nFcigoClBeHGExl2dHRR3E4qMBOjnpH0tO+iBwZ9jaT0HhTC6CMm+EagYOpLIUhP0UFAYoKAhSG/JQXh7AsKzeXbyqbnejNFXnbjqSnfRE5cgxXCJjIa4FaAGXcDNcI3NHRRyyRxuexqZ9aSnvvAIOpLKGAV3P5yqRzpDzti4iMpLusjKuxkjcCfu+ooFBz+cpko9k+RORIowBQxt3upWGU0SsiIjK+FADKEWH3oFBEREQOHyWBiIiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEieUQAoIiIikmcUAIqIiIjkGQWAIiIiInlGAaCIiIhInlEAKCIiIpJnFACKiIiI5BkFgCIiIiJ5RgGgiIiISJ5RACgiIiKSZxQAioiIiOQZBYAiIiIieUYBoIiIiEie8U70BoiIyNvT2tZFNBqjtLSImuqKid4cETkKKAAUETmKrX5mLQ+uWk28P0GksIBLVixl6ZknTvRmiRw2euA5NBQAiogcpVrbunhw1WqMMTTMqqOto5sHV62moX7amDfG1rYuNjXuAGOYO2eGbp5y1NEDz6GjAFBE5CgVjcaI9ydomFWHx2NTXVVOY1Mz0Whsj+Bu9TNrueWXD9C4tRmA+lm1fPIT79PNU44ae3vgiUTC+H1etQi+RQoARUSOUqWlRUQKC2jr6Ka6qpy2jm4ihQWUlhaNWq+1rYs7732MLVtb8Pu8uMawectOfvnrh/faWihypBnrgWfNX9fzk5/dg8frUYvgW6QsYBGRo1RNdQWXrFiKZVk0NjVjWRaXrFgKwIaNW2lt6wKGbpxbt7bQ2xenoyvKrl1ddHf3se7VzfzpyRcmchdEDtjIBx7Hcdm6rYXOrl58Pi8Ns+oYGBjk9jtWsfaVTaNe19rWNer3IEPUAigichRbeuaJNNRPyw2Kb9yyk5t/dAedXVF8Xi8XXbiEmTNq6IsPkM1myWSzGAMWFhh49vn1nHfOaWoFlCNeTXUFS05fxMOPPE1ndy8+r5eqilJmz6ylubWD1zdtpyca4zs/+DWf/MT7aKifxh+ffIE1L6zHcVy1EO5GAaCIyFGuprqCmuqK3BipXW1ddHb1EosN8EbjDj74/vMpCAUxBrJZFwDLAtu22d7cxosvbeSUk+YrCJQj0nDW76bGHTy9Zh3ZbBa/18uZpy9iw8atbN3Wwrr1m+mJxrAsi61Nrfzbd39F9ZRyNr6xDZ/XywnHz8EYMypJKt+ziRUAiohMEtFojM6uKJ1dvVgWTK2uYFd7F39+6m8kBpMEAz5SqTQGMAZ6e2P09fXzi9sf5He//wsXX7iExacuzOuboky8kYFZ45adPLhqNR1dUZq2tTK9bgonHDeXto5uNmzcyoL5s/nd7/9Ce2cPwUCAsrIislmHzVua8fm8BAN+/H4vmxp38o5TF9DW0UM0Gsu9bz5nEysAFBGZJEpLi/B5vcRiA0ytriCRGKQoEsZ1DKFggO6evqGmP2MASKYyALzRuIPwrk7++rfXqKwooaAgREV5Mdd89OK8uynKxBpZ5sXjsenr66d6SjnVlWVs2ryD1l1dlJcVU1oSYf1rW+jtjeM4DhiDawx9ff0MDiZJpTJk0hkCAT/GNaTSaXa2dFBWWkQ6k31L5ZMmKwWAIiKTRE11BRdduIQ3Gnewq72LokiYyooSiosLGUymcBwX27ZwHDPqdfF4gmQyRSbj0BON4fd78ft8tHd0591NUSbO7mVeNm7axpamFurfzPq1bZvm1g6SyRRgkclmmVJZxpSqMl7b2EQ8PoBlAViAYVPjTqqry0km09i2RTDo55IVS/H7vAdcPmkyUxawiMgkcunFZ/P5z3yIBfNnU11dztTqCpaduxifz4ttWbiuO+brMhln1N+WDY1bW1jzwvrx2GyRXJmX6qpyEoNJCkIBHMehubUDx3FJpdJ4bBuPx4PjumSzWQoKgrz+xnZs2wLebNw2Btu2SaUzbN/RRld3L16Ph7qaSlxjSGeyo7KJ91Y+abJTC6CIyCRz6cVnM3NGDTub25lWN4Wnn32Z5tYOLNsa7v3dK8sayhC2LZuMm6G/PzE+Gy15b7jMy8vrN+WSmIyBWKyfzYMpfD4vS05fRGVFCel0lmeef4XNW3bSHe0Dy86NbjCA4/zfg47juLTs6uS/f/UQ9z/8F2ZNn8qxx8yis7uXxqbm3BjAfGr9AwWAIiKTzshxVN3dfWxpaqavr590Jrvf1xoDruuSyTiUFBVy/MI547DFIv9X5uXmH/+WTCZLUdHQEAa/38dJi44hGPTj83lxHJfEYJL6WbWk0hkymSyu4xLw+3LjWnfnugbLcnEyDus3bGHbjl0sPLaec886hfPztAySAkARkUlk5DiqKZVlrF33Bv39Cdz9Nf2N4PF6KCku5OMfXsGJx889jFsrMtrchunMmllDdWUZ4XCIzq5eXly7kWQyTV+sn/aOHizLIhQKcMX7L+DM0xfxpX/9EVu2teDzeclk3aGkkDFYWKTSGbDA6/FgWRbr1m/m/HNOy8uSMAoARUQmkZHTZfVE+4a6cjNZss7YY/8AbNvCsiwcx8VjW/h9Pqqryzl+YUNe3hhl4pSWFlFVUYrjujiOy7r1m98M+Pxs2xHHY9tMn16NMYYNr2/lfZecwzlnncK2nW2k0lm8XnuvAWDWcUgmU/j9Prw+L3U1VbR39vCnJ1/g5fWb864kjAJAEZFJZHgc1dZtLXT19NLd07fP4A+GuseGRk5BMBRgSlUp0WicH/30birfvBkP3xhHzjqigFAOteHpDR9ctZrNW3YS709QEArw+hvb6eyK4rouvbF+bNtiy9YW7nvwCbp7+jh+QT07mtvp6u7Dtq03z+nRLBgaBmENjQvc0tRMcXEhzz6/nlAokCsJc+fKx3CNYW7D9El9jisAFBGZRGqqK5g1o4bf3P0HOjp7cBwHr9eD67oY12B7bIwxBAN+XNcllcow8lZZVBimrLSYto5utm5rJRIJM3/uTNo6uvnZL+6nuLhQ02rJYTU8veGaF9bznR/8Bo/HJhQM0LKrE8dxCQUDACQGk6z6w9MURQqHip63dWNbFn6/D+O6OC5ks9lcC3ckUoDrGEqKC3Edlx3N7XzwtONY/9oWqqvK8XhsUqkML617g2hvnNqaykl9jqsMjIjIJNLa1kXT9laKi8J4PB78Ph8AheEQwVCQ4qJCKitKWXL6IsLhAgoKggSD/tzr+2ID9ET7yGSGbpx1NVV4PDbhghBbmlpIJtM0zKrLTavV2tY1Ubsqk1hNdQXz5sxganU5oVCQ/oHBN8ftgTEml6y0s6WTLVubefb59fj9XgIBP16Ph1AokCt5ZNs2kcIQ6VSWKVPKOPP0RSxdcgKzZ9Zw3MKGXEmY3r5+1q3fjM/rZW7DtEl/jisAFBGZRKLRGB1dUVKpDH6fl1AoiMfjIZXOUBDyU1tTyayZNfT29TOQGCSdGcqa9HiGbgfpdIZYPMGMadXMbZjOQGIQx3Fpbu0AYFptVa54brw/QTQam7B9lcmttLSI2TNrqZ9Vy4mLjqGsrJhQMEhhYQEGF6/HQ2VFCZUVpcT7E0OZ6yWFlJUW4fF4KSoKM7d+GtPqplBSHAELiiIFQ2MMHZfKilLmNkznkhVLsSyLzVt2kslmOeH4ORQXFU76c1xdwCIik0hpaRFO1qEv1k9ZaRHtHT1ks1k8Hg+lpUUUhkNMn1ZNYThE49ad9ETjQ8kfHg8e20NRUZhP/91lXLjsjNx8qY1NzQQDfhpm19E/MEhhuCBvi+fK+Bk5HjDen2BuwzQ6Onpo74ySTmeZUlXG8QsaKCkupLOnl6nV5cw/ZhadXVFi8QH8fh9lpUVgYEdzG6WlxdTWVO1R+6+muoKG+mls2rydu+//I36/Ly8KRCsAFBGZRBq37CSVztA/MEg8PoABSooKKSsrZmp1OVu3tXLsvFkUFxVSV1tFtDcOb3anRQoLqKut5B2nLhx1YxxO+hgZEOZr8VwZX8PjAaPRGJsad/DYn54nEPDT1t5NTXUFdTVVtHV0M3P6VPx+H9t27qIwHOJdF5xOpLCAlQ/8iS1NLQA0zK5j2bmLmdswfY8kpuHz3bbtvDnHFQCKiEwSwzUAZ0yrprgozEsvb6Iv1k+4METD7DqmVlewdVsrO1s6MAai0TihYJBg0EcymSGVStPdE+P5FzcA5G6SwzfA3QPCyXpjlCPL8Hn29Jp1hEIBzlpyEi+v38SO5nZe3biFbNYhGPDj8dj09fWTGEjy9Jp1eGybdDrDsfNmMa22io7OKL9/5Gn+4ZOX7/XcHRlwTvZzXGMARUQmiZFzqS6YN5tj583Ati0GBpI0NjWz7tVN1NVWEQz62bxlJwaYP28mrmuwbQuv10sg4OOXv36Ym77/P9z8oztY/czaUZ9RU13BgvmzJ/WNUY48I89tj8fmhOPmMrW6gul1U0il0kQKC6goL6G1rYuW1g6mVJaRTKXZ2dLBtNoqevv62dLUwsvrN/Pjn92zx3k9Ur6c42oBFBGZJIZrALZ1dFMYDrFjZwclRYUA7NzZDpbFCcfNYdm5iyktiXD3/X8kmUzT3d2LZdtYDM2WkHlzfJXjuDy4ajUN9dMm/c1Qjmwjz+1wQYiX129iZ3M7mzZvp39gkEhhAV6vh77YAB7bZtuOXcycPpXXXm+icUsznd29DA4mKSstwu/z6rxGLYAiIpPG8KB5y7LY1DiU0VhbW0liMIXtsfF4LNKZDE+vWcfcOTO48rJlBIP+oULRBmZMq2YwmaKoKEy4IDTpsyDl6DF8bre1d/O/jz/LxtebiMUGsLDwejz0ROPEYgN43qxzubO5nc6uKPWzasG26InGCIWCzJs7g9kza3VeoxZAEZFJZXgM06bGHfzq179nw+tbMWaoeK7jOMTjCTq6okSjsdy6f3ryBZ59fj39iUF8Pi+VFSUUhIKTPgtSji4N9dMoLi5k1vSpvJHK0Bfrpy/W/2bQ5+K6LkVFYXxeL/H+BOlMlk9+4n1EImF+/LN78Pu8TKudovP6TQoARUQmmeHEjZ0723h14xYM4DgOVZVlJAaT+Lze3M2vprqCj175bs4757RcpuXTa9blRRakHF2i0RiO4zK9rpoNG5uwLQvXGLyWjYVFOFzAWWeeyEAiSSaT5R8+eTknHj8XgKs+vCJvsnsPlAJAEZFJ6rxzTuPZF9bTtK2Fvr4BYvEBQqEAF1+4ZI+b33DQuGD+bBafujAvsiDl6DI8DrAnGiMcDuI4Dql0BsuGgnCQmdOrib85HvCSFUtzwR/kV3bvgVIAKCIySdVUV7Bg3mzWb9jCYDKF3+/j3cvO4NKLz97v63SDlCPN8DjAO1c+hsfjoay0mLq6SnxeLyUlEf7u6kvx+7yjArzWtq5RQZ/O6/+jAFBEZJIanhd44fzZRCIFxOMJunv6aG3r0o1QjkrDLXl/fPIF1rywHsdxx2zxA1j9zNrcLCLD6yw988QJ2vIjjwJAEZFJarh2WsOsOjwem8ryUhqbmolGYwoA5ahVU13Bx658N+e/OW51rC7d4aLoxhgaZtXR1tGt0i+7URkYEZFJamTttHyY21Tyy74KNu9eOFoljfakAFBEZJIaWRewsakZy7KU/Sh5QQ8/+6cuYBGRSUzZjzLZ7J7YMZbhhx+Vftk7BYAiIpOcsh9lsngriR16+Nk3dQGLiIi8Ta1tXWzYuJXWtq59LpODt3tihzGGB1et3ufx3dc4wXynFkAREZG3YaxWKUAlSA6x3bPaq6vKldX+NigAFBEROUhjlRu5c+VjYEEoGNijBAmgLsmDNDKxo7qqXIkdb5MCQBERkYM0VqvUS+veAGDWoppRLVV/evIFXl6/mY6uKD6vl4svXLLfWVnk/yix49BSACgiIrIfe8s8HatVqrQkAhajlnk8Ns8+v56+WD+dXb1EozFefW0L0d4413z04gncs6OLEjsOHQWAIiIi+7CvzNOa6gqWnL6I3z/yNF3dvVRWlHLlB5YBjGqpOvaYmTzxl78R7Y2TSqdJZ7IM9A3ys1/cT2lJRC2Bb4Gy2g8NZQGLiIjsxf4yT1c/s5an16wjnc3i9XpZcvqiXCvVGYuP48x3LGLB/Nm89sY2tu3cRXNLO9FoHMdxCBeEcB2Xhx95WpnCMu7UAigiIrIX+8o8BXLB4XHz62nr6ObpNesAWPnAn9jS1ILjOBjXcMzcGZx0/DH85Zm1DCQGKS4qpDAcIhDwEY3G2NS4Q61aMq7UAigiIrIX+5pSbKz5Znc0t/OrXz/MpsYd+HxefF4vffEBWnd1Ma1uCqedfCx+vw+PZ+j22xcboLWti7vvfZzVz6yd4L2VfKIAUEREZC/2NZ/yyOCwL9bPU2teZmtTM69v3kFnVy8dnT30DwxijKEv1s/AwCDVU8o54fi5TKurJhZPAIYF82YTCgX2W9RY5FBSF7CIiMg+7C3zdDg4vOWXD/CXzduJxQYIBnxD3b4GMhkH28pgXIMxhm0725heN4Vz3nkyq/7wNAOJQdIZL69v2saJi44hk82qqLGMGwWAIiIi+7G3zNOG+mkURcLMmj6VXW3dWLbFQCKF3+chncmSToPBUFAQxHUcykqLePGljUR744SCAcDQ29fP39Zu5MRFx6iosYwbBYAiIiIHKRqN4bgu84+ZRSyeINobI5vN4vF48Pt8AFiWxeBgilde3cKGjU14fR5CgQBTq8vp6u4jMZgknclyxuLjqKmu2GvNQZFDSQGgiIjIQRoeBziQGKSuppKm7a3Yto3f58WyLBKDSSrKixlMpklnMpi0wZv2MDiYoiAcorSkEL/fy4L5s1m4oIFf3/m/PPv8ehzX1RzCclgpCUREROQgDY8DHBxM0djUjOO4lBQVUllZypSqMiwsBpNpEokktm3hsW2KImFc19DbG2MwmWZuw3ROOXE+P/yvu/j+f97F2nVv4PN696g5KHIoqQVQRETkbRpMpujr68fjsfH5vPT2xuntjWOAWGwAANu2CAUDhMMhvD4PH7vy3Rx/3ByKiwr57g9/w6bGHcT7E/i8Xta+8gbnnXUq7Z09SgyRw0ItgCIiIgdpeKYQy7IoKY4QLgjS0Rkl+mbwN5LrGgaTKbq6e6koL+GyS8/j7CUn0Rfrp3FrM36fD7/Pi+s69PTEaNy6M1dzUORQUwAoIiJykIaLQU+rrcK2bdLpDLZt7XV91zX4fD7Ky4r/b6ExJJMpor1xMtksyVSaZCqNPaLmoMihpgBQRETkIA0ngfQPDFJbU8FgMo3juvt8jWVBJpPNTSdXXBzB4/HgOA5+nw+fz0c4HOTcs0+loX7aeOyG5CEFgCIiIgdp5Ewh6XQWr9dDpDCE37/3Ifax+ABbtjbzxF/+SmtbF36fl5nTpzK1uoJIYQElRYV4PV4ef+IFbv7RHZoiTg4LJYGIiIi8DcMzhbz40kYGEoP09MQYHEzvdX2vxybruPzm7kd57fVtLDtvMbNm1lBVWYrHY/O3ta8TCRQwt2Ea/QODPLhqNQ3109QVLIeUWgBFRETepprqCk45aT7FRYVksg7WXsYBhsNBystLqKosIRjwk0yleXrNOpacvohwOER3TwwDnHD8UHZwdVU58f5ErrtY5FBRC6CIiMghYlkWwYAf2/q/ANC2LcLhEMGAn9mzaunu7gNjEQj4qaupor2zh7kN01l86kI2bd7O3ff/Eb/fh+O4tHV0KxNYDgsFgCIiIgdp5LRt0WiMcDhE/exa2jt7CPh9OI6D7fFgXMO7l51BtC9OS2snjuOy6Lg5DCQGcwHe8HzDtm3z4KrVNDY152YDUfevHGoKAEVERA7C6mfW8uCq1cT7E0QKC1hy+iIGBgbZsHEryWQar8cmFAoytbqcaG+cd5x2HKecNJ8/PfkCzz6/nkw2SzDo3yPAGx5TqPmA5XBSACgiIvIWDReANsbQMKuOto5uHnvi+aHAz+vBti0cd6gU9OBgisLCAqbVTQHgpBPmsXBBA36fd68B3nBroMjhogBQRETkLRouAN0wqw6Px6a6qpyX1r2Bz+flwgvO4KV1b7ClqZl0OoNrDFe8/wLi8QF+c9cjuRbDS1YsZcH82RO9K5KnFACKiIi8RcMFoNs6uqmuKqeto5vS0ggYGEgMctaZJ1JRVkxiMMnfXXMpC+bP5uYf3TGqxVDlXWQiqQyMiIjIWzSyAHRjUzOWZXHlZcu48gPLcssqKkr4zCcv54JzTsu1GFZXledaDFXeRSaSWgBFREQOwt6SNcZaNlaLocq7yERSC6CIiMhBqqmuYMH82aO6cfe2bPcWQ5V3kYmkFkAREZFxsLcWw5G1BBUQynhRACgiIjJOdi/vsnstwUtWLGXpmSdO4BZKvlAXsIiIyATYvZagMYYHV62mta1rojdN8oACQBERkQmgzGCZSAoARUREJsDIzGDHcZUZLONKAaCIiMgEUGawTCQlgYiIiEyQvWUGixxuCgBFREQm0O6ZwSLjQV3AIiIiInlGAaCIiIhInlEAKCIiIpJnDmgMoDEGgFhMtYlEZHIYvp7puiYik8Xw9Ww4btuXAwoA4/E4ANOmTXsbmyUicuTRdU1EJpt4PE5xcfE+17HMAYSJruvS2tpKJBLBsqxDtoEiIhPFcRwaGxtpaGjA4/FM9OaIiLxtxhji8Tg1NTXY9r5H+R1QACgiIiIik4eSQERERETyjAJAERERkTyjAFBEREQkzygAFBEREckzCgBFRERE8owCQBEREZE8owBQREREJM/8/z1iSWj5iDePAAAAAElFTkSuQmCC", "text/plain": [ "
" ] @@ -149,16 +138,16 @@ "eval_data_source = next(valid_dataloaders.source_iter)\n", "eval_data_target = next(valid_dataloaders.target_iter)\n", "\n", - "plot_samples(eval_data_source, eval_data_target)" + "_ = plot_samples(eval_data_source, eval_data_target)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Next, we define the architectures parameterizing the dual potentials $f$ and $g$. We first parameterize $f$ with an {class}`~ott.neural.models.ICNN` and $\\nabla g$ as a non-convex {class}`~ott.neural.models.MLP`. You can adapt the size of the ICNNs by passing a sequence containing hidden layer sizes. While ICNNs are by default containing partially positive weights, we can run the {class}`~ott.neural.solvers.neuraldual.W2NeuralDual` using approximations to this positivity constraint (via weight clipping and a weight penalization).\n", - "For this, set `pos_weights` to `True` in {class}`~ott.neural.models.ICNN` and {class}`~ott.neural.solvers.neuraldual.W2NeuralDual`.\n", - "For more details on how to customize {class}`~ott.neural.models.ICNN`,\n", + "Next, we define the architectures parameterizing the dual potentials $f$ and $g$. We first parameterize $f$ with an {class}`~ott.neural.networks.icnn.ICNN` and $\\nabla g$ as a non-convex {class}`~ott.neural.networks.potentials.PotentialMLP`. You can adapt the size of the ICNNs by passing a sequence containing hidden layer sizes. While ICNNs are by default containing partially positive weights, we can run the {class}`~ott.neural.methods.neuraldual.W2NeuralDual` using approximations to this positivity constraint (via weight clipping and a weight penalization).\n", + "For this, set `pos_weights` to `True` in {class}`~ott.neural.networks.icnn.ICNN` and {class}`~ott.neural.methods.neuraldual.W2NeuralDual`.\n", + "For more details on how to customize {class}`~ott.neural.networks.icnn.ICNN`,\n", "we refer you to the documentation." ] }, @@ -171,7 +160,7 @@ "# initialize models and optimizers\n", "num_train_iters = 5001\n", "\n", - "neural_f = models.ICNN(\n", + "neural_f = icnn.ICNN(\n", " dim_data=2,\n", " dim_hidden=[64, 64, 64, 64],\n", " pos_weights=True,\n", @@ -181,7 +170,7 @@ " ), # initialize the ICNN with source and target samples\n", ")\n", "\n", - "neural_g = models.MLP(\n", + "neural_g = potentials.PotentialMLP(\n", " dim_hidden=[64, 64, 64, 64],\n", " is_potential=False, # returns the gradient of the potential.\n", ")\n", @@ -198,7 +187,7 @@ "source": [ "## Train Neural Dual\n", "\n", - "We then initialize the {class}`~ott.neural.solvers.neuraldual.W2NeuralDual` by passing two {class}`~ott.neural.models.ICNN` models parameterizing $f$ and $g$, as well as by specifying the input dimensions of the data and the number of training iterations to execute. Once the {class}`~ott.neural.solvers.neuraldual.W2NeuralDual` is initialized, we can obtain the neural {class}`~ott.problems.linear.potentials.DualPotentials` by passing the corresponding dataloaders to it.\n", + "We then initialize the {class}`~ott.neural.methods.neuraldual.W2NeuralDual` by passing two {class}`~ott.neural.networks.icnn.ICNN` models parameterizing $f$ and $g$, as well as by specifying the input dimensions of the data and the number of training iterations to execute. Once the {class}`~ott.neural.methods.neuraldual.W2NeuralDual` is initialized, we can obtain the neural {class}`~ott.problems.linear.potentials.DualPotentials` by passing the corresponding dataloaders to it.\n", "\n", "Execution of the following cell will probably take a few minutes, depending on your system and the number of training iterations." ] @@ -259,7 +248,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The output of the solver, `learned_potentials`, is an instance of {class}`~ott.problems.linear.potentials.DualPotentials`. This gives us access to the learned potentials and provides functions to compute and plot the forward and inverse OT maps between the measures." + "The output of the solver, `learned_potentials`, is an instance of {class}`~ott.problems.linear.potentials.DualPotentials`. This gives us access to the learned potentials and provides functions to compute and plot the forward and inverse OT maps between the measures." ] }, { @@ -520,7 +509,7 @@ "source": [ "## Solving a harder problem\n", "\n", - "We next set up a harder OT problem to transport from a mixture of five Gaussians to a mixture of four Gaussians and solve it by using the non-convex {class}`~ott.neural.models.MLP` potentials to model $f$ and $g$." + "We next set up a harder OT problem to transport from a mixture of five Gaussians to a mixture of four Gaussians and solve it by using the non-convex {class}`~ott.neural.networks.potentials.PotentialMLP` potentials to model $f$ and $g$." ] }, { @@ -578,8 +567,8 @@ "source": [ "num_train_iters = 20001\n", "\n", - "neural_f = models.MLP(dim_hidden=[64, 64, 64, 64])\n", - "neural_g = models.MLP(dim_hidden=[64, 64, 64, 64])\n", + "neural_f = potentials.PotentialMLP(dim_hidden=[64, 64, 64, 64])\n", + "neural_g = potentials.PotentialMLP(dim_hidden=[64, 64, 64, 64])\n", "\n", "lr_schedule = optax.cosine_decay_schedule(\n", " init_value=5e-4, decay_steps=num_train_iters, alpha=1e-2\n", @@ -721,8 +710,8 @@ "\n", " input_dim = 2\n", "\n", - " neural_f = models.MLP(dim_hidden=[64, 64, 64, 64])\n", - " neural_g = models.MLP(dim_hidden=[64, 64, 64, 64])\n", + " neural_f = potentials.PotentialMLP(dim_hidden=[64, 64, 64, 64])\n", + " neural_g = potentials.PotentialMLP(dim_hidden=[64, 64, 64, 64])\n", "\n", " lr_schedule = optax.cosine_decay_schedule(\n", " init_value=5e-4, decay_steps=num_train_iters, alpha=1e-2\n", @@ -804,7 +793,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.10" + "version": "3.10.6" }, "vscode": { "interpreter": { From 67202c2ee259aa8a9cf9be4873cd65c97340992e Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 11:31:58 +0200 Subject: [PATCH 177/186] Update ICNN inits --- docs/tutorials/icnn_inits.ipynb | 44 ++++++++++++++++----------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/docs/tutorials/icnn_inits.ipynb b/docs/tutorials/icnn_inits.ipynb index 5741e44e5..1f9d01b3c 100644 --- a/docs/tutorials/icnn_inits.ipynb +++ b/docs/tutorials/icnn_inits.ipynb @@ -8,7 +8,7 @@ "\n", "As input convex neural networks (ICNN) are notoriously difficult to train {cite}`richter-powell:21`, {cite}`bunne:22` propose to use closed-form solutions between Gaussian approximations to derive relevant parameter initializations for ICNNs: given two measures $\\mu$ and $\\nu$, one can initialize ICNN parameters so that its gradient can map approximately $\\mu$ into $\\nu$. These initializations rely on closed-form solutions available for Gaussian measures {cite}`gelbrich:90`.\n", "\n", - "In this notebook, we introduce the *identity* and *Gaussian approximation*-based initialization schemes, and illustrate how they can be used within the `OTT` library when using {class}`~ott.neural.models.ICNN`-based potentials with the {class}`~ott.neural.solvers.neuraldual.W2NeuralDual` solver." + "In this notebook, we introduce the *identity* and *Gaussian approximation*-based initialization schemes, and illustrate how they can be used within the `OTT` library when using {class}`~ott.neural.networks.icnn.ICNN`-based potentials with the {class}`~ott.neural.methods.neuraldual.W2NeuralDual` solver." ] }, { @@ -20,7 +20,7 @@ "import sys\n", "\n", "if \"google.colab\" in sys.modules:\n", - " !pip install -q git+https://github.com/ott-jax/ott@main" + " %pip install -q git+https://github.com/ott-jax/ott@main" ] }, { @@ -39,8 +39,8 @@ "\n", "from ott import datasets\n", "from ott.geometry import pointcloud\n", - "from ott.neural import models\n", - "from ott.neural.solvers import neuraldual\n", + "from ott.neural.methods import neuraldual\n", + "from ott.neural.networks import icnn\n", "from ott.tools import plot" ] }, @@ -50,9 +50,9 @@ "source": [ "## Setup training and validation datasets\n", "\n", - "To test the ICNN initialization methods, we choose the {class}`~ott.neural.solvers.neuraldual.W2NeuralDual` of the `OTT` library as an example. Here, we aim at computing the map between two toy datasets representing both, source and target distribution using the\n", + "To test the ICNN initialization methods, we choose the {class}`~ott.neural.methods.neuraldual.W2NeuralDual` of the `OTT` library as an example. Here, we aim at computing the map between two toy datasets representing both, source and target distribution using the\n", "datasets `simple` (data clustered in one center) and `circle` (two-dimensional Gaussians arranged on a circle) from {class}`~ott.datasets.create_gaussian_mixture_samplers`.\n", - "For more details on the execution of the {class}`~ott.neural.solvers.neuraldual.W2NeuralDual`, we refer the reader to {doc}`neural_dual` notebook.\n", + "For more details on the execution of the {class}`~ott.neural.methods.neuraldual.W2NeuralDual`, we refer the reader to {doc}`neural_dual` notebook.\n", "\n", "## Experimental setup \n", "\n", @@ -114,8 +114,8 @@ "### Identity initialization method\n", "\n", "Next, we define the architectures parameterizing the dual potentials $f$ and $g$. These need to be parameterized by ICNNs. You can adapt the size of the ICNNs by passing a sequence containing hidden layer sizes. While ICNNs are by default containing partially positive weights, we can solve the problem using approximations to this positivity constraint (via weight clipping and a weight penalization).\n", - "For this, set `pos_weights` to `True` in {class}`~ott.neural.models.ICNN` and {class}`~ott.neural.solvers.neuraldual.W2NeuralDual`.\n", - "For more details on how to customize {class}`~ott.neural.models.ICNN`,\n", + "For this, set `pos_weights` to `True` in {class}`~ott.neural.networks.icnn.ICNN` and {class}`~ott.neural.methods.neuraldual.W2NeuralDual`.\n", + "For more details on how to customize {class}`~ott.neural.networks.icnn.ICNN`,\n", "we refer you to the documentation.\n", "\n", "We first explore the `identity` initialization method. This initialization method is the default choice of the current ICNN and data independent, thus no further arguments need to be passed to the ICNN architecture." @@ -128,8 +128,8 @@ "outputs": [], "source": [ "# initialize models using identity initialization (default)\n", - "neural_f = models.ICNN(dim_hidden=[64, 64, 64, 64], dim_data=2)\n", - "neural_g = models.ICNN(dim_hidden=[64, 64, 64, 64], dim_data=2)" + "neural_f = icnn.ICNN(dim_hidden=[64, 64, 64, 64], dim_data=2)\n", + "neural_g = icnn.ICNN(dim_hidden=[64, 64, 64, 64], dim_data=2)" ] }, { @@ -141,14 +141,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "/home/michal/projects/nott/src/ott/neural/solvers/neuraldual.py:276: UserWarning: Setting of ICNN and the positive weights setting of the `W2NeuralDual` are not consistent. Proceeding with the `W2NeuralDual` setting, with positive weights being True.\n", + "/Users/michal/Projects/dott/src/ott/neural/methods/neuraldual.py:154: UserWarning: Setting of ICNN and the positive weights setting of the `W2NeuralDual` are not consistent. Proceeding with the `W2NeuralDual` setting, with positive weights being True.\n", " self.setup(\n" ] }, { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "243d6aa24b1d45cba5ba10522373dc3a", + "model_id": "62abc21c2f8b47c09c328cb9ef44efd1", "version_major": 2, "version_minor": 0 }, @@ -191,7 +191,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAG9CAYAAAAobB0hAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAACP6UlEQVR4nOz9e5Dk2XXYd37vvb9XZlZmVlV3dff0Y2YwAwxBEARIASQk0lKAkkIUpbVEhZYhr2mvwFUgJJu0KUMbNhB2mOJaIVhBhkO2Q2E9IkRpI0yL61CQ9JILBblikJZFUgJBiRKWBAbAvPox3V3dVZXv3+M+9o+bmV1VXf2crq6q7vOJqJmuqqzMrMfvl+d37rnnqBBCQAghhBDiCOijfgJCCCGEeH5JICKEEEKIIyOBiBBCCCGOjAQiQgghhDgyEogIIYQQ4shIICKEEEKIIyOBiBBCCCGOjAQiQgghhDgyEogIIYQQ4shIICKEEEKII3PogcjVq1f5D/6D/4BTp07RarX41m/9Vn7rt37rsB9WCCGEECdAcph3vr29zXd/93fzPd/zPXzhC19gY2ODr33ta6ytrT3U13vvuXbtGt1uF6XUYT5VIYQQQjwhIQRGoxHnz59H6/vnPNRhDr377Gc/yz//5/+cf/bP/tljff2VK1e4dOnSE35WQgghhHgaLl++zMWLF+97m0MNRD70oQ/xvd/7vVy5coVf+7Vf48KFC/zH//F/zKc//ekDb19VFVVVLd8fDAa8+OKLXL58mV6vd1hPUwghhBBP0HA45NKlS+zs7NDv9+9720MNRIqiAOAzn/kMP/ADP8AXv/hFfvRHf5S//bf/Nn/+z//5u27/V//qX+XHf/zH7/r4YDCQQEQIIYQ4IYbDIf1+/6Fevw81EMmyjI9//OP8+q//+vJj/+l/+p/yxS9+kd/4jd+46/b7MyKLiEoCESGEEOLkeJRA5FB3zbzwwgt86EMf2vOxb/7mb+add9458PZ5ntPr9fa8CSGEEOLZdaiByHd/93fz1a9+dc/HXn/9dV566aXDfFghhBBCnBCHGoj8Z//Zf8Zv/uZv8tf/+l/n61//Oj/90z/N3/27f5cf/uEfPsyHFUIIIcQJcaiByHd8x3fwsz/7s/wv/8v/woc//GH+m//mv+Fv/s2/yQ/+4A8e5sMKIYQQ4oQ41GLV9+pRil2EEEIIcTwcm2JVIYQQQoj7kUBECCGEEEdGAhEhhBBCHJlDHXonhHg+hBCYlA3WeRKj6RTpnkGVD/q8EOL5JYGIEOI9GUxKrm6OGExLvA9orei3Cy5sdOl3igd+XgjxfJNARAjx2AaTktev3KaqHe0iwRiNc56t0YxJVXP+VJdrt0f3/PxrF0/Ra+eSLRHiOSaBiBDisYQQuLoZg4xeJ1sGDzox9Dqawbji9cu3SRNNr5Pf9fnhpOYb17bIk4ThrHrobIks8wjxbJFARAjxWCZlw2Ba0i6SuwIBpRRZqtkel5xZ6xz4eWMU126PWClyep3swGzJ/mBElnmEePbIrhkhxGOxzuN9wJi9p5EQAnXjqK3HeX/wSSbAtGywLtAuEtLEoJUiTQy9TkZVO65ujtjdb3GxDLQ1mpGnhpV2Rp4atkYzXr9ym8GkPNxvWAhxKCQQEUI8lsRotFY455cfq2rLrcGUmzsTtoczrPNsjUuq2u752to6ytqSGo3Re09DSinaRcJgWjIpG+DuZaCHCVyEECeDBCJCiMfSKVL67YJpaQkhUNWW28MZs9qitUIpyFJDVTfcGkz3BCPOe6xzpKnGeU/duD1BhDEa7wN2HuQ8aBlof+AihDg5pEZECPFYlFJc2OgyqWoG44qyWRSQKqz1ZGlCt5UxmtWUtY31IqsdnPcMJxXOB8rKsWmnaKXIU0OvnZNnCc55tFYk82Wfey0DLRij8ZVdBi5CiJNDAhEhxGPrdwpeu3iKN6/tsDWaoYCAIk8SVldyijwlSw3b4xmzyrIzLlEKfAikiQEgSzQhwKy2NNaz3iuoas96t0WnSIG9y0B6/nW77Q9chBAnhxy1Qoj3pN8pONVvYbRGKXAuUDUx6CirhjxL2Fjt0O/kvO/cKr1WQStLObfWIUsMdROzGJnRVI3l+u0JWsGF093lMsz+ZaDdQghMS0u/XSwDFyHEySEZESHEezKYlFzeHNI4B8TAwDooG8ukajjT76B1DChmlWUwLcmzhCJLOdVTDKc106qmahzeeVCKsrFcvTUiEEiMwTrPWq9gUtYMJ/We5mjT0pJnhgsbXeknIsQJJIGIEOKx3dnN0qCUorGONNFxiSYoGuu5dntEjA8Um4MJIcTlmG47jzUhqWEw8XH3TQAIzCrLlVsDLt8a0MoSjI5LM3mS0C5UDFqqWBS73m1JHxEhTjAJRIQQD23R1bSxjsZ6Gue4NZxgvUcp0Aqs9WijIIDzu5dR7vx7VjusK5f35ZxHKUUgEAJMyhqIRahaKc6sFjjvmZYNWap5+dwqrSyVzqpCPAMkEBFCPJRFV9NbwwnjWdwhoxTUjcX7gNJ3gg/rD+7nEYtZo8Z5rPeEQFy6CWH+73gjF0D5QFk3zKoGYzRFbpiVlu1hybmXViQAEeIZIIGIEOKBFl1Nx7OasrJ478mSWFzauHlo4cKyf8i9+ort//DidiEEmAckiVZYH9AEnI/LNLcGU5RWaKVIjOLWcMKk7LPSymT2jBAnnAQiQoj7ulMHEjMfPgSKPMG52Mp9N+8DirhEc4+kyJ6syJ3HiFt0k3lRawh+GaT4EHuW5InBh0DdeMq6Ymdc4ryX2TNCnHASiAgh7mvR1TRJDKNZTZponItZiIOCjcC9MyKLz+8PRhRgdOwvYn24+37VfFCeUgQTKBvPlVsDtFLUjd+zi+Z+Q/OEEMeP9BERQtzXoqtpLOMIaKWY1Q3WubsyGw/roK+rGk9t42PtpogzbKzzBEIclJcn7IxKpqWV2TNCnHASiAgh7mvR1XSxRFI1bj4b5sk9xv3uSmtFCIGytlS1m9eBZFTWkWdaZs8IccJJICKEuK9FV1NrHVmiKWuLX+xueQISHWtK7kUrBSga58lSw6leC6UUCkgOaPcOdw/NE0IcXxKICCHuazHcLs8SQoh1HAD+PbzGLwIPraA7b80ew434Bou6EUUrT+i0UorUsNop4kTfxsXlmHvcv8yeEeLkkKNUCPFAi+F2a91WXKpR6pHqQ3YHGEar5bJOYvS8JfydqGbRGE3NC1SdD8teI3Fyb02nSDm7usKscjJ7RogTTnbNCCEeyiIYmVYNznvG05pZ3eD8/Ipm15Zdo2JDsoXdoYJWoI2icTEAmZQ1SqllLciiD4kPcdkmSzVl5cgSQ4BlS3eA16/cltkzQpxwEogIIR7aSivjVLfN1mjG2fUOmztTRtMaD6hd0cYiINm9TVcRMxyLLMfidt4FMgOp0dTWsbuso2k8JZb+SsH7L6yzutLa07DstYun7vQRkdkzQpxIEogIIR7aol5kUtVUtWO9W2Cdp7EWu6u32UG9QiAumzQ2kCYaoxW1jVFH4wMJ/q6dOAGonWdcNkxLy+oKd3VS/eCLp5hWVjqrCnFCSSAihHgkiyWaq5sjdiYztFL4EFuvp4mmahzBB/bXsi5iDKOhyOKpp7H1YuAu1oU9gUsyX+/RGqZlw7998wbv3NzBaI0xajmRd9FJdXVFMiBCnEQSiAghHlm/U9Br54xnNV9+6yY3dyYYpTBGxWm683TI7gLVxXKMUjGIUAqMUcu28G7Xck5q7mzZBUWiwYXAcFqhlSJPE073WxijpZOqECec7JoRQjyWRb0HwNnVDu0iw3uWu1wSo8gSvew3slgtCWGxMyYGJInRtPIErWK2JE01PsQlmUDcLuxCnGPjvKfIE3wIjGYNqZFOqkKcdJIREUI8tkX795V2RqfImJQ1ZW0JBNJ5Dw8d4v+9X+yICfN27ZCnsSFZY30MakLAubBs866JQYsPYbmEo1CkiaJqLLV1ZKnZ00l1pZU9/R+EEOKxSSAihHhsi/bvznosMRuSJHqevQjzXiDEybnzyb3MA4tWltDOU5wPbI/KGIQEUPMtvIrYO2T3FL3Ff7XWMQiaf9wYjZ8XrAohThYJRIQQj61TpOSp4frWBIjBR2M9RscAxPmA0QqlFf2VjLL2OO9p5yk+BLZGJbWNTcmSROOamCnRgCdmT0JQy94iQAxkfIi9R+brPdJJVYiTSwIRIcRjG04rZpXFeY9SkCWxqVhjHVorijRhrduiyBISo9galljnmVYW5zxKAwSM1qDA6LgssyhcXcy0UXF9BqVicav3gVaexiZn806q692WdFIV4gSSQEQI8VhCCPMCUTi33mE0rakaFzuc+kAIAWM0nVaK94HRtKHTSjFaMZo1tLOEwaSiVo48M8sAJktjVqOxd/qKpMagU0XdOMraUaQJ3VZK45x0UhXihJNARAjxWCZlw2Ba0i4S0sSQpwmNjXUb1jpGs5qydgzGFVlqWO+2WOsVvHV9h147IwSw3pMmGoXCmLiDxjrPSiullalYjJokOO+wLlBkCa0sIUk0tfVoH6STqhAnnAQiQojHstgxY+Z1GUopsvkuGLKEdpGxMy555YU11ubLJoNJtfyauom1IXq+v1cRl3ImrqG2njw1GK3od3Iq60iN5pXza5xd7UgnVSGeIRKICCEey3LHjPPoxNz1eec9WWpY67aWW2r3fM28D4mfF7RC3CWTZ4Y8MdTWY33Aes+ZfmdP1kO26Arx7JBARAjxWDpFSr9dsDWa0evoPVmJexWQ7vmadkaeJsyqBp3FbbqNiztq1rsFO+Oabjvjmy6eotPKJOshxDNKAhEhxGPZPQBvOKlpF0ksVHX+ngWke75mWtPKE+rGMistSkNiDK08ZTyzdFop77+wzko7P8LvUjzL9g9QlGW+oyGBiBDise0egDeYlvjKorW6bwHp/q+JA/AsASjmNSZSgCoO22BS3vm79WHPAEX5u3u6JBARQrwniwF4j3JledfXaEVg3plVrkzFIRtMSl6/cpuqdnsyeTJA8WhIICKEeM+UUo9cQPo4XyPEe7Xof1PVjl7nTu2RTgy9jmY4qbm6OaLXziUYfkokEBFLsl4qhDiuntT5aXf/m/1fr5SSAYpHQAIRAch6qRDi+HqS56f9/W/2kwGKT58EIkLWS4UQx9aTPj89sP+NDFB86uQn/Zzbv16aJgatFGli6HUyqtrN54mEB9+ZEEI8QYdxflr0spmW9q6vW/S/6bcLGaD4FD21QOS//W//W5RS/OW//Jef1kOKh/Ao66VCCPE0Hcb5adHLJs8Mw0lNYx0+BBrrGE5qGaB4BJ7K0swXv/hF/s7f+Tt85CMfeRoPJx7Bw6yXusoynFZSxCqEeKoOq57jcfrfiMNz6IHIeDzmB3/wB/l7f+/v8df+2l877IcTj+hB66XTsmE8rfjGtS20UlLEKoR4ag6znuNx+t+Iw3HoSzM//MM/zJ/8k3+SP/pH/+hhP5R4DPdbLy2rhls7U6wLGA1ZZsgSzdZoxutXbjOYlA/1GCEExrOanXHJeFZLvYkQ4i77zxPeewiBLIlLKN77u27/Xus5Fr1sVlcKVmSe0ZE51IzIP/pH/4jf/u3f5otf/OJD3b6qKqqqWr4/HA4P66mJuXvNC7HW8e7WmLrxZKlme1ShdU2eGrrtO0ViD2r6I9uChRAPsv884bzHuYAxCucDk7JmWjWsdQvaRXrfeUbi5Dm0jMjly5f50R/9Uf7n//l/pige7gXn85//PP1+f/l26dKlw3p6YpfFeul6t0XVOLaHM67dGjOrLD54rItvBJjVlq1hiTHqgUVii213W6MZeWpYaWfkqXnkjIoQ4tm1/zyRJZrJrGE4rZiUDZ1WyuleG4DbwxnbwxlV41jvtqS1wDNChUPKk//cz/0cf+bP/BmMubOu55xDKYXWmqqq9nwODs6IXLp0icFgQK/XO4ynKXYJIXB9e8w3rm5xezijaixZokGpmM1QiiJLcCFQpAl5avjw+86yunL3iSCEwO+9fWs+Ij67a0T8cFKz3m3xzS+dlqsZIZ5Td50nUGwOpsyqhizV1NbTyhJO99sQYGtc0m1lfODCOloprMwmOraGwyH9fv+hXr8PbWnmj/yRP8K//bf/ds/HfuiHfogPfvCD/Bf/xX9xVxACkOc5eS4jvx/WYbRk3x6WsSbEKBIfgxCtFMrEgrHaeorMUNaWLDX3LBKTNspCiAfZf56oG0fVWNJEo5UmNVA1jsZ60kTTygw74xlfefsWpbXLIYnrKy0unulJduSEOrRApNvt8uEPf3jPxzqdDqdOnbrr4+LRHUbtxeKkkGeaSakwOq7PKq0IBJRSWOfwXtM4TztPaecJ41l9VzC0f9tdCIHGenyImRVjYpZF2igL8fzaf57wIRBCQOv4vlYKGzxlbRlMLNOyYVo1bI1KjFYkxmC0YjSt2ZmUfPh9Z5bnvxACk1nNcFYD0G1lUpB6TEmL9xPosFqyL04KWWbQWpGphFndUDWWABAgAN7XZKlhdaXgK+/cPjAY2r3trvGB4bSiatwyEEmMIk8TaaMsxHNs//ZcrRRqvhRstMKHgA+BwaQkBKitwwdIFIQA3ntSY/AhsDWa8Y2r23zb+89yfXvC167cYmdc4bxHKUWeGs6tdXn1wppkTo6ZpxqI/Oqv/urTfLhn0mGOsF6cFDSQp4bRNGY6/L4qolhVpHjnxgClFb1WhjaKqrbc2B6zM5nxoRc36LcLbmyPqa3Dek9qNKnSuPm2uxDiCUgI8XxatA+INSKaLDHkacKsalApNNYTPHgdSHTMxGoFaRIvYJyH2nryVGNt4MqtAaNZyebOFLvrxKUVOO+5vDlgVjd7Mifi6ElG5IQ5zNqL3SeFPDHcnl99LB4lEP8dCJR1Q20trSyhbhwQcD7g59mPf1Vf5/0X1rh8y1HWlmKeZfE+YK0nTw1porl6a0Svky+/N2ksJMTJ8KRq1NZ6BdvjGVujkl4ro9tKqWrLtLQxY6og0ZqqcRAgSfSdx1Gxbs35mM21PjCt7F2P4QMEFyA4tkYzrtwc0ns5XqwdRq2deDQSiJwwD2x5rDV1U7M9mgE80kG16CkynlVc357g5lcUuxMiiyUapVikRhhN406ndp6Sp3HNdjirePP6gERr2kWKdR7XxF1TrTyl187QOm4Bvr49ZntYSq8RIU6IJ1Gjtvs+GuuY1ZayaiiyhE4rpXBxh95kVuN1IEsNtXXo+fnM+4BzcenGEJdx7icA1gdUY7k9mjIpG5z30ufoGJBA5IS5X8vjqrZsj0tmleWNd7fJbg0PPKgedAUQAlTN3VcVy8/Pb0OIFe0QC8wa5+NOmkTjfCwws85xut9eFqUuUq+oWJhWTSreuLZNCDzRehchxOF4EjVq+++jlScUVcN42oCC919Y59zaCrcGM37vnc1lN+Zp2WC9J1F6vmwc7tSuPWQjisYFplXDzrjk3a3RE6+1E49OApETZv+a6iKAqGrLrcGUqnG0i5R+J6ds3Lxmo+RbXtqgv1Lc90oG4PUrt5mUNQ+TQ/HENVylYjalcQ7nY3CktSZNNKNpxY2tCdrEbcB5aui1Ic8S7PwqCKVY7xZPtN5FCPHkPYkatf33UTeOnXEZi9l9YFI1fPWd23TyjNpahpMqnidCYH79g3NuT6a2sY9Wa1ZVjss3d6isZ32lQGk59xwlCUROmANbsmvN9vxALrKETp6yNSqpGov3nuGs4re/ZvnAxXXe3RrvuQKw1nFzZ8LWaEZqdMyEKPYUet1PAFSI2Q3vA7W1KGIQUlYNYV71XiSGMO/M2ljPeq9gMrMooHfAljrpNSLE8fMkatR230fdOG4PZ3eK2Y1eLu1+8fVrTGY1tY3Z2UUQAnuXix/HrLFc356QJQbvA712Rp4lj/R9iCdHApETaP8I67qpmVWWdpHSyVNG874eaaJJkwRtPYNJyb998yZFlnCq10KpuMtlOK0oa0vVuOVSTSw+fXi7r0WmpZ0PoFJYFyjmB3dZufh8jKa2js2d2fLgN8mTHfEthDgcD6xRe4hj1jqPcx5rFDujkto68uUumIDSoAjsjGc4F9BqbxDypPgQSIxiVjU01nGq11oGI3LuebokEDmhdo+w3h7NeOPdbfqdnK1RiXWePDOo+QJLajRWa8o6VqErYhCy+0rEeU/VBLx37+mADwGsC/hgCQQIClSgcYHKOsy8mVmSwIWNHte3xocy4lsI8eTdr0YNHu6YndVNvFgae6omZkXrJp6rFkkW58OyWN4d0rDuxnrGZcPKvJh+OK3ZmNevybnn6ZKf8gm2GGG91m2RpYZyV3tktavKw4d4lWG0orHxNsNphfWePInbanfvkHkvK6KBuPwyKRvqJgY1eZrQa2d0ipTEaIzWFFnCaqeg3y7mPUX2nm2exIhvIcSTtahRe9xjdjApuXJzOG9GFiDEgKOZD9b0Pm7H9Q9befoeNdYzmdVoragaS22dnHuOgAQiz4DFyWFWNngfI/mFxW6WPIlzYWIdh6NqHKnRyy6Gcahd/JonVZrlfCBPY+ChlCJLDK0iwftAWVtSo7mw0SXPDMNJTWNj59XGOoaTWkZ8C3HMLGrUHueYXRapNo6NfovEKKy/s9slbq99Ct8DYFT8vyLWw1WNxXlPLeeeIyGByDPgzskhwfo40yWEEJdbrCMxmtWVgjSJXU3DvLh0uR9/flWSGI1We2s+3osATCu7fC6LtWF2rfku6l3Wuy2qxjGe1jLiW4hj7HGP2d1FqkWeHkkRqIJ4oaYUSjGfV6OxNp6frPNy7jkCUiPyjOh3Cr7lpQ1++2tx2WWxNbeVJfTaeezvMTO0srg8QwAXAmo+jE6ruIQClvo9XpbE7qvM134dw1ACsYNhABJjaGV6uRy0u95FuhsKcfw9zjG7u9B1sfyxOFfsd2gLM/OnF0K8gNNa0coTytpxZrXDR953lo4MxnvqJBB5hvRXCr79A+f43bc3l1t08ywuhQwnNSvtjBfWV9jcnjKeNUzLhizR5InBe89svt32cS1OKot7UCqmXa2LA6yUUiQ6XomUjWNWN6xSzG+rZJucECfIox6zuwtdfQjM6oZAnAPzlEpCltlgpeJkX6PVfIeh4X0vrLHSzp/OExF7yNLMM2Z1pcW3vHyGs2sr+ACTWbNMm54/1WU4qSmtpcgTzLxItZ63XldP8ISg1bIDPK08YWU+grtdpGgVm5ttDWfvKfARQpwcuwtdq9rh5nVp+885h52LWDRgRCm01hhtOL/e5dzayiE/srgXyYg8gw5Kmzrnef3qnZbK7SJlOku5vj2mco4iM6RJgg+WOMDu0R93944bM5+UqYhXQsbEoti6iVcfvU7OcFpJwyAhnhNKKc6fXmFzEBsountc9Rz2pYlSsNLK6BQJ3sfmZa9eWJPlmCMkgcgzanfaNITA77196662zFlqyJLYyTBPEzpFyvaoxGjNpKxxPjzySWERjCxSn1mqCYFl1mUx8C5NDeNpLQ2DhHhODCYlb7y7zWBS0jxi08T3Kk3ihZACtNEEH0iMYbXXkgF3x4AEIs+Be7VljmulijxbDJBiHkWE+da6x782SYwmMbDR75ClZrlLJ0sMKGisk4ZBQjwnBpOS1y/fZnMwRSnF6krOqGweeUbM41CARhEU9NsF3XZGbR3vP3+KjdW2ZEKOAQlEngP3asusVSwgdS729Yg9RmJTIWMe/uBU8yrVReFZYjTdVk6vnTOr5w3W9kz3jRXz692WNAwS4hm36B8yqRogkKcGozUrhWJc1ncFI2rXVpoAGGIWo3nM7GmiY0+jJDGsdQvS1OCmgSw1EoQcE3I5+hzYXa2+W5bEE8K0bOaBiqI9L2K19t7ZEKXu1IIogABGQ5Zozq6tcOF0bHh0/p6NjyqMVvQ6sY5FClaFeHYtMrJ5upjMHc8eidG0s7uvhROtYnNFFW+rE02eGfQBMcODwog4p0aRJJozq23yLJH27ceQZESeA4tq9a3RjF5nf3bCx6uO+U4WYzQFKeNZtadqTM3/Exb7c1WMYhMTa0ASo+m2M9a7LTyB8bSmlaV7hvP5KnYvdC7gTeDtGztc3lT024Ws0wrxjFpkZPN5BsL7uJ0fQGtNouenFBU7HSZGUwePCoHUKEKA1BiyxFPuqy3JUkMIIdaa7aqWDwG0hiJNKLKU1W5BkSWSjT2mJBB5Diw6r06qmuGkpl0kGKOZlQ1l42jnKWliYpAwLyot0tjkJxBIkxhsLNohOx/XYZRaNAaCLNW08pSysXuuOFZa2XIHz854xjs3BygCnVaKme/m2RrNmFS1dDMU4hm0yMhqFYviZ1WDzhSK2DJA63h+SRMdGzB2crbHM0bTetl4TKnYBqC2brndV8Eym5omGq00RWbI04QiS2KnVB/otTJMommsY1paad9+DEkg8pxYtGXen51IjWa1U8wDkbDsLbI1mtJpaWaVXR6wIQTMvAX8snGZglaWYrRiZ1zi/eLgz7EuXr0opegUKe/cGOA99Ffy5X3qxNDraIaTmqubI3rtXE4QQjxDdmdku600Dt6s3Xw45/xcEgJZYljrtsizBK0VZe2pG0eWxiDGE7s/1zb2IFGK5WgKo1XMmqQJnVbGaxdPAdw539UWrRXrXdklcxxJIPIc2d9fZHs84ytvb7I9LpdNfvI0Wa7lGqPIM0OeGCrr5nUeijSJ6dIsNaTmzvbcWAyryJK49vq1q1vLLMe9du5AfNx2kTCYltJXRIhnzO6MbGwhkDMtG8raxq6mRpNmmjyNAYgPAaM1nTyNS8cBynkg0S5S2iHO1NJK0djYpTU1mk4r5XSvsyfQkNERJ4MEIs+ZRX+RwaRkc2dCQBHwZIkhBJhV8QThvMcHRTtPOdVrYV0cXOd9WA7Je+WFNV6/fJvhtFoWfrXyO7Ntdmc57rVzZ8EYja+s9BUR4hm0PyObZwlZamjnKedPd2llCddujZfZWq0VF0736HUybu3MGExLIC7BrHZanD+9QmIMjXU01pMmmjQxdwUaMjriZJBA5Dm02E5XN56N1RZbw5LaxiuTLNWUlcP6QKIV3XaG1ppMA8TCsOGk5lS3Tb+dk2eGc0UHrTVaqT1bdXdnOXbv3NGJues5SSW7EM+2Bw3KW2RO93/uwumeZDWecRKIPId2L5OkieFUTzGcVlSNI4Q4tTfThpVWSlV7jHbLwtLdxV7WB0KAIo/zY/bbneXod/L77NyRSnYhngf3y1Dc63OS1Xj2SSDyHNq/TJJnCadTs1xvRUFVWV4+t8pwUu9Jl+4u9hrP6ofOctxr587+4EaudIQQ4vkigchz6KBlEqUU2bxItbExA7K60rpvWvT+/UnuznIctHNHKtmFEOL5JoHIc+hRAogHpVIfNcvxoHViIYQQzxcJRJ5DT3KZ5HGyHLLmK4QQYkECkefUk1wmkSyHEEKIxyWByHPsSQYQkuUQQgjxOCQQec5JACGEEOIoSfcoIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQONRD5/Oc/z3d8x3fQ7XY5c+YM3//9389Xv/rVw3xIIYQQQpwghxqI/Nqv/Ro//MM/zG/+5m/yy7/8yzRNwx/7Y3+MyWRymA8rhBBCiBNChRDC03qwzc1Nzpw5w6/92q/xh/7QH3rg7YfDIf1+n8FgQK/XewrPUAghhBDv1aO8fidP6TkBMBgMAFhfXz/w81VVUVXV8v3hcPhUnpcQQgghjsZTK1b13vOX//Jf5ru/+7v58Ic/fOBtPv/5z9Pv95dvly5delpPTwghhBBH4KktzfxH/9F/xBe+8AX+j//j/+DixYsH3uagjMilS5dkaUYIIYQ4QY7d0syP/MiP8Au/8Av87//7/37PIAQgz3PyPH8aT0kIIYQQx8ChBiIhBP6T/+Q/4Wd/9mf51V/9Vd73vvcd5sMJIYQQ4oQ51EDkh3/4h/npn/5pfv7nf55ut8v169cB6Pf7tFqtw3xoIYQQQpwAh1ojopQ68OM/9VM/xac+9akHfr1s3xVCCCFOnmNTI/IUW5QIIYQQ4gSSWTNCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY5MctRPQAjx/HHWcv13v8Z0e0B7rc+5D30Ak8jpSIjnkRz5T0gIgUnZYJ0nMZpOkaKUOuqnJcSR2x90lIMR7/zCL6HfvU5aVwSleH39FC/+e3+GV//gdx710xVCPGUSiDwBg0nJ1c0Rg2mJ9wGtFf12wYWNLv1OcdRPT4gj88avf4m3f/4L6Bs3MNaivKdVl6wohQJy70hCIHl3yuC//1t88Wtv8h3/tz931E9bCPEUSSDyHg0mJa9fvs2kashTQ54atFJsjWZMqprXLp6i3ykkYyKeK85afuf/9f9m8oVfJm8aqrU1fJaxcu0Kp12DBqqgmWiY6liqtuocg//PF/jGB17m1T/4ieX9yBKOEM82OaLfgxAC37i2xeZgCgSmJSilyNOEbiulqh1XN0eEELh2aywZE/FMWwQNl3/jt5j9iy+xsXOLtQAjDea2xxrDaWtpaUUKtAl0Q6DxgVlQTBR0neXtn/k5Xv4DH+Ptf/k7e7IpLkl4/exZXvx3v5fsxYvMrl2noxVnX3wBc2YDpaX2XoiTSIUQwlE/iXsZDof0+30GgwG9Xu+on85d3t0a8aXXrxECMROiFd4HGhuzHr1OTmMdidF4D+0iwRiNc55packzs8yYCHGSfeOf/Qve+Zmfo3/9Gme9JQW0CpReUaJwOqXA08ZiFCjAqZTbyRlWm02SUGMDjIPiRtGm+FN/guGv/DOS2ZSm14Msg7qGrW1qa6mKNl0VyDW0ul3OfvRDnP2+P4K5eOGofxRCCB7t9VsyIo8pZjlGWBfoFAlaxasxoxU6U1S1Y1rWVI2jlaWc6reWSzE6MfQ6muGk5urmiF47l2UacWJ98e//DOYXv8BL3tLVAaOgCTHYSFSgAJwyKJVgvF32DEhDQ8ePyEMDCoyCNASGdcnOr/5z0tmUZuNOpmOKYtxYPuJqXOXYOneecVDcHo6pf+O30Ds7bPxf/qwEI+JEkWV7CUQe26RsmFYNqdGEQDzrzikUaaKZlhYfPL12TtU4tIofV0qhlKJdJAymJZOyYaWVHdn3IsTjcNbyr//Rz5P/wi/QCwFPQBGDEKNAKQ0oVPCshBnOg9LQ6BwCJKEhCxVexYZGAdAKznmLu32TSW8NgKleJ/FTtrZu8JK3qETjvMdaR9Zuk55e4/LtAe03L7P6L34Lff4FWaYRJ8JBGx167Zz1XotWlj43gYkEIo/JOo8CiiyhbCx6HlyEEPAh4LynshZC/GNjqtBKkaeGXjsnz+Iyja8s1vmj/naEeKDdhaOja9fZ+dLvsPHG1zhLoFLQVorGFCS+QWEBRZWt06lvo1VYLskk3tLoHBVqdrJznKreRYcaT7w6zJXiom/Y3rnFZLTD+PQKLrHoqmItUUyBhJRRfZbe5E1oFXRXWlyZlZz/+htkt26jzmwc7Q9LiAcYTEpev3KbqnbLZftZ2fD2jQFvXt9hpUjJs+SB9YTPQkZFApHHlBiNMZq20VjnqaxDA43z2PmbD7C4MMsTgw+BWW1prOdUr4XWCq0ViZGrN3G87d6Gm81mrDYlRYAsQDVfVnGmRW06ZG6T0nSwKmW1uQWKGGQQMx8Gh/FTAE5XV7E6p1IFHTtEzUOYTCn6IdBzNWvX/w1vFm0mwZN5T8d7OtqSTN+lV+1Qj2DLGN5NW9iyIszKI/xJCfFgIQSubo6oakevk6GUoqotg0mF9x4UOB/IEn3XDszdDsyotHLW+ycroyKByCPYE3nOf+Hb45L1XsH2qGQ8q/EhLFdpknmgUdYOYzSpiVt7K+sYTirSxHCq16ZTpEf6fQlxP2/8+pd456d+OtZsdLvoyRiIQUVXexKlSBQ0eNrNJijo+Alh13qlCnDQuTAJlsRZPDreWsWTkg/x/mdBsaI8r5RjrNN0tKelwaHoVO/SBMgInHWe3DrqyRTVkuJvcXzsf90IwGhWc3s0pZPPg4QAw2mNdZ4iT/A+UFsHKHqd7MB6wkVGpawsaaIIBGal5dbOlDevb9NpZRQPkVE5DiQQeUgHRZ55aoDAeNrgvMcYTa4V1gXA0S4yFPN6krKhU4AxGq0Uo1nNxmqbCxvdYx+tiueXs5a3f/4Ly8JRypKWd8xMQu4siTboEJcWMx8zEVOzgiZQuAkBcMsgRBFPw1DrghAgCRUAhjvLk4pYK9JSUAA2KPoKPpp4CgV6fryszA8bG+KS6Gnl2bqxyde//Dqd07ek54g4crtfN8raUtWWQMyoT6uGunH0OzG4qBobawhRKBWwzjOrLZDQys2eesJFRmU8ramtYzpsaKxb1isapUgTQ7+d3zejclzIUfoQDlrLc84zGFeUdazxmDUNCvDGUKQJWsfCVAJkiaGydp4Z8WilyBLDi2f6x/YPQzw/dtd+FL0VCIFyNKG91sdbi75xg6bXQ2lNsA4VAso7TqvAJF2n2+ygcMv7y/1sGZxAXLYZmx4GT9vFbIoLhiudV1mtN5mZLmert8l9DErGSY/t7AW6zW1Wm1skCgjQNjGM8QFmaY8rrW/iwuyrtJshaIUK0BmNuPp3/wHvFgWvb2zQ/30foXv+nDRDE0/d7teNxMSll8Z5lGKeOVdMq5gpWQQXWut5ANLgXGB7NGNkNFliMFot6wknZcOt4YRJVVPWDuc8yz4cAWwIDKcVvU5+z4zKcfJcHpWPUtxz0FoeQDNPndXWkaWalkoxWuF8IBBQinkE7LDzP5LgLSmxWLWVp6yutJ7idy3E3fbXfrRtDcA0yahbLXxeUMxmlKurANi0Q5Wsct7eptCKVnM7fpwErzRWJWigCLEGZHFUddxwz6jvVphwfvJ1htlpzpZvkocGq+ISZceOYrAzv63efUfzf6fNmCQvqclYiZltAEwIVJ0O2jlOf+NrJN94nVtpQd1q8frZs7z0p7+PV77rY0/2hyjEPnteN9oZt4YznA+08gQClI1dZv4a65iWDUopGuspqwbrA4lRJDpG4dOqRinNrG5YpaCxjvG0pqodzu8KQoiHSiDWmOyMZqwU2bHfofncBSKDScmVzSFbo9kyEFnvtri40TswOzEpGwbTknaRLIOQEAKDSUVtHWmisDYsi06zNPYQaayjbmL0qua7BYyJf2hbo5ILeSq1IeJQPGxb9N21Hy5JaNmabF4o521NTYt8sMNqU3JzexvfbpPUNaebLdpG7YkNpmaFKumwWt/ckw1ZWGzPBUWlCxqds2J38HZAFhoArEpoVMZ2cYFLk99DERil68xMhzPlZQA8hsudD3J28jq9+jpj06Xf3EIrsEBQhnQ2o1OVy8BHe4dvtUivXuWdn/ppAAlGxKHa/brROL9n2QUVs+S1dWilcdz5/HjW4H2YZ01gVtu43BICaQpbgxnn1lZorKd2Huc9BDAalpsvVazJCsC0stTWkSTHe4fmcxWIDCYlX37zJoNJRWwoGxfURtOanXHJh9935q5gxDqP9wGza2fLZFYznMb7aADnPel8V0wrS0iNZlJ6drcXWWzv1Spu7x3PakIIxzJNJk6u/UPmFm3R92cC9tR+nD5N+9o10hAYJSkKaDtLMZ0yeeEFwjtv8dJwm+lohy6BrmH5Im9VilUJPbdDFWYEpTGh2fO3v+CVYZSs029ukfoaqxJafoKa388oWadWCbezFzDecrp8m8SVGHXnCs7g6NktTPD0m220iVeWhHiBcL14ER+mZGHKxCQkeFreMQWajQ3SzU3e+d++wEvf+VFZphGHZvfrRt245bLLglaxnUOvk1PWlklZAwrv57st58GE8yHWWAGhcbx1Y5v1fosiTZbBit5/oIU7GULnY5CjVHKsd2gez2d1CBZzYbZGM7z3pIkmzxLSROO9Z2s04xvXttjf8T4xGq0Vbh5JVrVle1TGPiJqUVinCAFq65jNI1A/j1K1UqDUslgvTQztPGVWWW4NZk/95yCeXcsMx9Wr+Hab5vRpfLu9zAS88etfWt72+u9+bVn7QVWRe88w7QIap3JGaZuWazA7OwC0VGANf9eViw6WRmVxa24AFQKNyhkka5SmAxAzICrHqYTUz4BAo3Ompk9p2mynpxknfQJgvCP1JSZUXO18E8PsDNOkzyTpca31KgDr1bukypFjWXfbGEARqIMiqzfp2gmlMSgUk2Qdq1soa1Fa03S7qOs3uP67Xzvk34Z4nu1+3VhchHp/57XFzy9CiyxhdSVnrdvi3HonvpYQsxuNi0GIVpDMm/BMSstX3rnFrG5oZfFoDAHmicz4/q7n4QPsjEsGk4p+uzi2WfjnJhCZzGpubk8AKPIEo2OazGhNMV+3u7k9YTLPVIxnMUtCCPRaeeyS6j3DaYULATPfhtXM+4V4H3DOUzaWsrZADELyzNAtUlZaOZ0ipV2kZKkmhEDV2CP8iYhnySLDkSzaordaKGNQrRbNxgbJbMo7/9sXcDb+zU23Bxhr4wwX6zDe4lRcQNGhwaocHTwrk9F8WUVhgEztPWl4DNv5OaxK0VgUgVK3GKansfOwxauEiVlhptuAYmq6oBRpKMlcSRIaUl9SJx0GxTm+ZfDPOVe+zenqKtpVbGfnyN2Mwk2Xj6vmb7vrRxSg/JTUTWicA9fgVQtncoIx8UZ5jnaO6fbgEH8b4nnXKVL67YJp2RDmrxfVPDMSQqBxnjw1JEYxqxydPGMwjq8tqVZ7trr7EIMS62I2fXs046vv3GalSNHz+qjF234KqBrHrGrorxzPQlV4jpZmhrOayjqKNF4p7aZQGKMoa8vbNwdxT/esWlYiJ0bhvGdrWDKr4lpe4xxN45czMtw8pcbujEoIZIkhTcyex4uV03FKrxBPwu4Mh9KagJ43EPPLTICeZwIufOSbaa/1cUkSB8klhqAULTfGz8OOthuQBlj3lkJDopj3+gjL5ZjCz/DacLp8mzQ0OAyzpENAs17fJPcljUppuTE5E0qzQsuNqVQOwDRZRemAR9P125yZvckwO7M8OnvNbUrdYuBGJKFhvX73wO99caGZqoB2HkPgNIFMK5LybWbBYEYt6ixl1li8MbTX+of+OxHPL6UU/ZWcK7eG3B7FzHesG4z1Gnma0MpTRtOGLNWgwvwCVtH4g0KKO4zW7ExKzCIKuY/EaPrznTKDccX5U8ezHOC5eiWMqyN7fwnWearG0ViL84HXr94mTxI6RULVuLg918f21GliqBqLcYrg4x+bIuB84KC/ncbDpKxZaeXLtTkfPHXj6LVzTvUKxrP6RLfmFcfDIsPhs1hPUakOaaiAuAuGPEePx8tMwLkPfYDXz54lvXqV5vRpZiZlxdZM5kWobedYUZ62Vssq/Eq3sDpjxe5Q61hLZVVGadrkTRO3DaqYaezZAdvJaUChaUiCp+OGOGWYJV1K3aLwUywGgyNg0FhSX7GTbrDabHK1/RpFM6Bbb95VczJOVjHBUrgxSsVjzQM9PG2j5iluhQdKbykmYwrvMEozefFFzn3oA0/j1yKeU4NJybXbI1KjUXlCY+NIkNp6nPMkefxrXu+2WOsVfO3K7eXW3gcFF1Vj8fdoELibVnHi+6l+C+eD7Jo5DrqtjDQxNNZh0rhmZ51nVjXL7U9KxV4EjXXc2K5RalFkukiN+WXA4X3skmc9e4IQBaRGz+fNxHTatKrpFCnWBirrSI3mwukuX728tadB2knogCeOpz0ZjlaLIoz23qCq9mQCTJLw0p/+vlhTcusWZatNPm7o2jgJNw2QqxiELNapCz8FP8XOa0IALAYdHE6lTJM2jSpo2xGNyum6AcN0HR9yCrvJOOmRBEfuJ+S+jLtkTE633uR66xVSP6NjY7ACQHC823o/07THmSrumrnSfo2VZgerUxJfE9DcKi5wfvy7aBRFEp+znz/vWYjHo/IeVc5QnRVO/bt/XApVxaHZvXX3VD+2aGisX3bdHs1qeu2c1y6eYqWVsTOOXbkb59CKXR15DuYXk621onEHRy2xrkTjXGBWxVlo07Lh2u0hK62cXiuj08qOzYXvU6kR+Vt/62/x8ssvUxQFn/jEJ/iX//JfPo2H3WOllXF2dYUQ4pqZ856ytnH7k4orzlorskTPi03jqpvRi5NxmHdMDfNeIVC7cFcQYkxc5tFa0cpiZXNjPYNJzaSK2xTTxPDWjQE3dsbkqWGlnZGnhq3RjNev3I5D8oR4BOc+9AH82bOkwyHB792iF7wnHY0I587uyQS88l0f48Uf+vdpLlwABbM0o9YahyJVcQthHaBRCcPkFMNkHYiFoTp4GpWThAYdLIRA4aasNZukwTJM+ngUK80Oa80NDJ6WnVC4CSYEEl8SUMx0l0G6wZX2a7zT/ibebr9G6iochsqs4FRCCIHb+Xk8mkkSC1y79U3adkDuJvSa25h5DYsnbuMdq4x32h/Ez78+V4oqQHZ6nZc/8NLT+8WI587+lg9KKbLUUGRJHGLXyamtW37u3a0R41lsTHavwGI/pbhrY8VuPoBzgWllub495u0bO2wOpvzO16/zm797mX/2b9/mX3393WPzWnPogcjP/MzP8JnPfIYf+7Ef47d/+7f56Ec/yvd+7/dy8+bNw37oPZRSvHphjfVuaz5gKPb6gJjxyFNDajS1i5GrXgy+IF5ZLS4B9bz6eRGV7reoelbzQtUiNSRa0ykyzq2v8PLZPlrDrGqoahuzIfN2vL1ORlU7rm6O7vtHJsR+iwyHbbVJNzcJ0ynBOcJ0Srq5iW23efFPfd9dmYBXvutjfPeP/99Z+9N/AvXJPwT//r/H5P/0J9lG0wQYepiYLrmbkIa4zDMzK+Q+rnsrPEElVKZFrVo4lZKFkiw03M5fYJyuLjMcQWk2swuMdI9xsoYjZSc/S9du8b7R7wCKa+3X+J3172GcrtGxA16efJkz5WVutt7H1fY38YHhb3G6vMyVzod5Z+XDGByr9c0YNJk2XscCvkYXbBcv0ATNdlBsKs0UhVFKhuKJQ3VQy4fdjNF4HzPsV28N+frVrQOX9u8nBLAPaAni5pPgy9qxiG9ciBfGk7Lh8s3hvJ3F0R8Ph56f/O/+u/+OT3/60/zQD/0QAH/7b/9tfvEXf5G///f/Pp/97GcP++H36HcKPvy+M1y5OeTGzhjr4jbeVp7QzjO2RjOaOq51Kx3nV/h5lbPWcYuUmzebUeytSwWWExNDYFmgaucRS7+TUWTpMrNS5AnWeobTmo00iU1olDr2HfDE8bXoE7LoI6LHY7wx2IsXePFPHdxRdH/fkZ0kwa+soEwCrqatApndJlUsc8Zduw1ApRJK06Lb7ICCcbJG3myjgMJNUaFhYrpYUlBwI7sIviEzhq1kHadyWm4SW8Lj8WheGv8uWkHuJqw022gctWlxevgOo/wMlzsfZGa6hKA5XV/e872M0zV0sPTrG/TdiA8Ov0RL1+QBZvOTdpZnMhRPHKo9W3f3bVQA4se1wmh4/fJtnPfLOqzDtOjk6kN87WqsY2s048rm8Mhbvx9qIFLXNV/60pf43Oc+t/yY1po/+kf/KL/xG79xmA99T/1OQe/lnI2dDr/3ziZFZmjlKYo4iG46Xz6BO9XFi+xETIfF5ZciTZhWzf5NMrgQ0CYu8ZSNo5lPXNwelxhdL+cFtEyCSjRVY+dt4uMfrDHHuwOeON5e+a6P8dJ3fvSRO6s2vV4sdK1r0p0dCmdJCCTq4KK4SudoLDp4NA4dAv1mc5klNKGhUV12shdo+5KJ6lG4EV5ntOyQ3E7Zzs8wNavUKmOUrDEozvHB0W9RmxYBxZfXP8m3bP0qZdKl19zm/Owb3MovkPqKrfwFUl/veU4tO0L5BhXAEOj44XI2TRtPqjWh30WdPvXkf/BCzC227m6NZvQ6es8LfAiBaWlZ77aYlpbRrCbRmoqncL6f73mPfa/CvKW84/ZweuQXvocaiNy6dQvnHGfPnt3z8bNnz/KVr3zlrttXVUVVVcv3h8PhoTwvpRQbq21uDTpsjWa0ckBBt5UymlZ4At7DovVAWHS6U2ADQKBs3N0ZERa3j1MTrYuRbitPyeadV6tmvoVLa9JUzwtg79zRIlo+rh3wxPFnkoQLH/nm+95m/1TduOWX2HckyyjeHqH0YmfYHbXKGKanKdyEthuS+QqvDDvJGsY31LqgcGNS3zBK1zg//Tp5mKFVRlAJ3js28xdpuwkdO6FRBZc7H2KcrPLB7d+k32zyxd6f4FX3JbJ535CzszdI5wPx2nZIS2nW6pt7riCtSriVX2S9ehf87E7wpO4U9nmteOfmDme8x2g5vsThUEpxYaPLpKoZTuo4KFVrZlXDrLIUWcKF091ld+5H/VNcXBA/qhBiDZWeHzmLMoO6cUd+4XusjsbPf/7z9Pv95dulS5cO7bEWfyx5ZhhOahrraM0bjul5s7IQFhmQ+KtbrMlZx3KQHcQgZXebXevj57MkZlsSE6Nio3XsY6Lj0CM37yeid2VepqU91h3wxLNhf98Rj6FSPQIKypJcBVxQNB7GpkujMkoddwCkvqTjhtS6oFEZw2SdhgyPYqy67KSnGWSn6DVboKHWLa613o9Tho4dMMxO827rVd5a+RCbrUtUOmOcrnOqvhYLvoPldhGDigRH5qtlMNTogtvZOYbpaYbZBjvpBgC1bmNNRsuPl8fiMN3AqpxcK1yScr23RrW9I11VxaHrdwpeu3iK9W6L8azm2q0RN3emlLWlcY6rt0bz7twK96BtMnNm3mHVKPXYL9xh3v5dzfcJBzgWF76H+uinT5/GGMONGzf2fPzGjRucO3furtt/7nOfYzAYLN8uX758122epN1/LFXjmMyamFbr5KyuFKy04m4Wo9UyAjV3NfaPv1w1/yNJjSI18XZnVlt0iozG+vmvPC5NtdIEH2JFc2I0idE01jGc1OSZ4cJG99hsqxLPpj2dVQGNIwvTuCNmOiNnPqxRgVMmBubKkIaavt2i0i1mpksaalabTTpuwLbZQKlAa749VwdHy81ouxHr9bt07RYGz1p9k5bdQXnoVbcwznF29iYtF7ccm2BJfUnia0rdxmHYzs4yTlYp3BircyrTolffwswH5rXciHOzN0nm7ytgmvSoVc6W1lw+8wKDbl+6qoqnpt8pOH96hcRoiizh7FqbF06vsNKK9YibgymtLMH5h4tE0tTEzRZaHbxT4iHdaUERl2fWVlpHfuF7qIFIlmV87GMf45/+03+6/Jj3nn/6T/8pf+AP/IG7bp/nOb1eb8/bYet3Cr75pdN86/vO8i0vn+H3feA8n/jgRV7c6C+DkZjZSFhbyQ8ORIizAZyLhapxToBnOG0oMkNi9K5xzbFnyKKzap4axrOaqnGsd1u8dvGU9BERh25P35E5TWz/njQ16byVuweMtyShoeXGy6K6zJd0bSxMHSereGXohDFoTWU6jEwPCLxbvIRVGaeam4T59ttz5Vv07S3e6X4LtS7I/YSt/Bzb+QsA9JpbbMwu45ThGyvfxtCscaN4mW90v52d9DQrzTaNysndhO38PJVuoQh07CB2dzUrBODM7E2CHbPlYODVXb1UhDhMIQSu3RrjPZzqtUiMiRelAXrtjLpxrLSzB2YjFkWm7SyJ74W40zJL9LJ84FHFJpyKTpHyvhdWj/zC99B3zXzmM5/hz//5P8/HP/5xvvM7v5O/+Tf/JpPJZLmL5jhQSt1VqNPr5EzKhuG04hvXtujkKc7HbU/3EgvjwnIOxmLmTLedUdaOqol1IyHEP6qPvHqWbitnNIsvBosmM0Ictj2dVfMcNV+oDiGQ2xKIA7csirYfL69Yap1T6xaFG1PrHOMsKniSYNlo3sU3cCs9j9KKQXqaioJhuk5jCqamR8tNOFVdYaO6yqXxl+nbLd7ufgTtSird4c3OR6h0m2ASStNmmJ3Ba83E9PnA6F/RsdvMTIdZ0kUTWKuuk/iKQbpBt7mF1Rm3iktcnPweGs9IG7o+UNYl6WyGvXhBuqqKp2LRTyQxilvDGVVjl0WiWWLIUkPdeF69sM7rV7aomrszI1rFnlSdIiXPkrhzcz6O1z+g58hih8xBFJAmmg+/vEFiDDvj8ki7ex96IPLn/tyfY3Nzk//6v/6vuX79Ot/2bd/GP/kn/+SuAtbjZhGc2Pn0RGM0twbTWNfBvbdaLVrvJloRfJwfoIC1XkFhDSEEaus5s9qhk2dcvjmU7qriqdvTWXVzk6bbhTxHDYf0rWccFLkOJPOJNYu/+MxX6HngYUOsk8r9lCTEoLtWLVCad9uvkbspg2wDW7U5O3uL0qyggmOaxIzEt2//CqDQvmGSruKVwipDpQtKXZD6mnOzr9GyQzaU4VR9jUF6mlnS55sG/xJFoGu3cCgG2WkKNybzM9arq8vv03lPBnRHI2a93oG9VIQ4DNbFpplVHceHpIlGa03TOAaTMm6CmA+4O7va4d2tcRyKx3yDi9rV4yrRjGYNrczE/TVhcRvmpQN7m2suPrYcireYx5RoUmPIEk2aGG4NZ1y5NTry15+nckT+yI/8CD/yIz/yNB7qsYUQmJTNXXNfFnvCB9OK8ayOe7AfcF9xexTU3hFszIyMyjqm4EKc/pslhtev3qaqXayqNhrnPFujGZOqliUacegO6jtirGWmFJtZwcvNlCZdAxoKFydXBxQqeJwyFH7KxMTMROrGAHhtGGbrrFY3Wa+vE9DkfsKKG+BrwzTpUdQ32CxewviGzJdoBedmb7HW3ETjudJ6jdKscHH6NbayMyTB0m9uA9C2I3Tw8zk60Y3Wq7zbepXzk6+ggI4dLk/m6wQmSmPPn+fF//OfOrCXihCHIdGKqrY0ztPKExSxdUM13/UQJ/FCliaUtSVN4jTeEOJMMq00wXumZcNoFjPx49md+9d63rNEKZyPF7gw7/CtobHzd+a7PrPEcGatQ54mWO+4uT1Fj+JwvqN+/ZFLA+KAoqubowMzE712TmYM17fGe3bK3M+ijGTR9CwQt+WGEFgpclKjeeP6NlliONVrLVNhOjH0OprhpObq5ujIm8yIZ9/+viOzrW22f/YX6E+GgCJrdsgVhHl93ChZIwk1VmV07fa8KNUu76/txmShYbW+yU62QepLRslpLF+nZUcUfkrLT2l0xpvdj3KqvMIoPRXvp/GUuk1tWrFbKoFT9Y3lMeeJGZOu3drzPfSqTXTrVd5tfxP9ZpPuPGhpAtigaF77AJ/4f3yWJJNlT/H0LBpyLwbZBWL7hkU37TDfHp8aTXulYFo2hBA4s9YGFMNxyeawvvf9h0VzsniEmPlSTJ4l84nw9s7FtFKstHK6rZxAYGt7ig+BbjtbNt88ytef5z4QGUxKXr9yj8xEWbPWLbg5mNBY91AtZxbLNonWeOdBBRKlaBcpzgeyVNNr51y9NYoD9faVP0t3VfG07e474qzl1/6/v8bGaBuvFNOgaUxOi4YkWLp2a/kX61HMzAodu7Pn/s7O3iLxNVv5Obayc4yTPkn321mtb/BC+RYeRbfZYpis0bYDNmZvM0tjYfrN/CLb2Rn6zeby/pr5pN9St/lq/xP8vtv/BDMPTwKK262LfPPgN0FpcjfdtXSqcImh+MTHJQgRT53zgVaWUDaOysaBdtY51Dxg0PMgwYeA0oq1bhGbi80sK0XC9nhv6/X9NR8+xBfwVh6LWJvGUVtHO0/otjKG04qqcXdGiLQzUFA3jmlpaRcpebo3BDiq15/nOhDZPSWx18n2ZCby1HN9e8w7NwfLtu4P24NXz7dXLVJvSWrI0wTnfWxoZh1Gx652u7uqLkh3VXFUTJKQXDiPfvcd3Hzw44yMlIDBoZYb0WNjpEW79xvFS6S+wqqUoAyFG3Nu9iYT06NvN8l8gwKutl/Do7lZvMRq9S5WZwRtGGUb9JvbpMHSabb3dGwqzQqlabNeXuX85Os4P28CFUCpQMfu0HJjzLwH/bznIJUx3MzbXHz1fU/xJyhElBhNPh90N6saplWzzLgnRt/JRMxfd9pFSlVndNsZ26NZHA8yt9itqcIiAI+s8zgXMEbRKlI6OuPUSptZ05CnCY2L2ZdeJydN4/T50aRGa8VqJz9wG/BRvP4814HI/imJC1VtuT2cUc/TaOohI5BFSZ8KzGfUxN9znsQ9B3re3n3x7zg87+77lu6q4ih1Lp1n9kWNBxIVSO0Qm3XJbBkb++mcMunSb24tazGcMgSdx3kzfkbHblP4Ke8f/SuutT9A5kt6zS0mySppqOmMt8n9jGFymqnpM05WaUg5W75JbQpqXfBO50NcmHyVlWaLzM9QwXFu9jWWC0HzQrx+vYkiZiwVwHy3z02dMLsgu2TE0djd6v1Ur0W7Srk1nJLo2Duqatyy6zbE836eJXzTxVO88e42t4azuHVX7xqzsK+rqjGa1W6LIjWUtWW92+aDL55iOg8kZnXD1nDGcFoxnsYAZG2lNZ8Sf/Dry1G8/jzXgciBUxIDDKc1jfXzX36IJ7jAcvDdfooYzVoXMx7LwXhqXqU8z3gsGshkSayCnlb2roB09yyCo24yI55PZ77lm9j6OU3mPTtK0fKw1gypicuMaahIm+rOGjix2LTROSt2QMsO0YuujcFxufNNfHj71zB4rM55N3+FFydfYcXGxmKNyhiZVW61LnB+9ia5m7HS3GYWesssR+EmLFbLKx8w8xSlRrHZukTH7tBrbuMDeAI1ilFWyC4ZcWR2t3ofTRtauaGVpczqZj5w9c5yyZ7zfiuj3ynijpd916laqXiRO38/0bGJZlk78izhwkYXrfVySWWVgnNrK3s2YrTzhK+8c/uBs3Ce5uvPc32EHjQlsbax34cxitqG5XZcWM4MWp58A7E6OTUmzrMwevnHo7XChBBnWgQIKtBYv4yAE21oZYqytvG289qUaWmlu6o4Uuc/8s38//qneOX2DTaIk6gNIRbDEUcYbHtFT3k682VIPa+g2snO0OgMp1LadsR2fpZXxv+Grt0hCQ1r1bs0OqXb3Jo/muJm62XW6nfJ57NlNsp30DhWXJw1FeYHXDI/ttJdIxgqpWjK29RuwmS+XdEqzThA8Qe/W3bJiCO16N692AwRl1hib5DdyyX7z/sXN7r8zjcSprWdByNhuZ1396yZxBicD6x3W/fcdntQn6y7ZuEc8evPcx2IHDQlMS6pxDXoZU8Qo3F+vtSiA8HvLRfpdXLK2uFDoFNklI1FK81KK2VaWcrGEnzMjrTyhOG0ZqWdcf5Ul8G4irt1qhiQ3O8PSoin4Y233uUrSZuXlCZVHgfUKIoQ0CrQoLmGInhFe7keGVuz95otMhdbxZtgWa1v0plnPqxKafkJL01+D4BheppBcprV+gYb5ZXlEugiCwJ3jrMw33WgUCQKqgBbQTNDk7khpYLGJNg0Z5xm1Ghe/cS3P70fmhD30O8U9Nr5Mitx0HLJ/vO+MYZvfmmD33njOtbNm2TOl/sXO3FWVwq+5aUzrK4Uj9yIbH+AdNSvP891IHLQlESIJ73GehKjsG7e3c4orA/4eQGRUvHjZr6W9tLZFdZ7rWXqbWswYzirKHxgFgJKQ5HF+9/9yz5/6uD+JUIcBe89X/ilX+dcM6Na7XN5MqPtPBmWSYhN+to68JqOwckkxKZnhVKY0NBbZjqiZB6E1LrgRut9XJx8ZRlwNDpDE3uE6H170gKKWmd4DARP5mdYNDtaMUUxaa1AVTNYWyf1nkyByTNqbUhv3cJffEFqQ8SxsTsrcdByyUHn/Q9cPAXA7761SdnY5RFitOLs2gofffXsewoY9gdIz3Rn1eNuf2TofMBohQpx/e72cP4xo0h07EtgtKKVJ6y0UkJQfPDSBhur7eUvcP8fmtFxk6714a5f9kFpMyGOytVrm9x6/Rt8ZwqT1hrl+hmuunOsTr+GSjSj2nJ28yYXcrhZdNGVJfOBNTejrQLp/O/acefkohQYH5dlFkGIJ078fWH2jeVY8t0qXXC9eIms2cEqTbd6FxtggKbvHM1sSsdZujffZau7iltbxVUV6Wgb225LbYg41h72vP+Bi6d45YXVeQfuitQYLm6s0G0XTyRgOC6vP3KkcnDq7PLNAVXtaOUJs8riXNzhopWi285Y6xZUtWe919oThCwcl1+wEI9iPJmhqopcwdAYtIJOOkb3VpjOSvTODhvak3jN+nTMOFvHJy2a2RUmATIgUyx3g5nFlngcHbtzZ6lFKRqd312sTfzabVsxLW9h/QhvMhoUGzj6FioFo7XTTOqK9eEOG6MdBtWMumhhL17gxT/1fVIbIp4ZxhhefmHtqJ/GoZJAZG5/6qydp1zdHBGIRaYuBNppSr+dzfeFOykqFc+clU6LkOdU0xGJczSJIg8jprOSyY1NvsnVtDRYBWOlqesBbV2RJJraeVQIJEA5b7ZUKGh8rOnYNOucZ0IRKjQOE2omSY+OHdwJSAJc9xqvwIQhTimCr+jPd6JZYJLksNIhqBVura6SX79OubrGB/7i/5Xz3/pByYQIccLIEXsPd7IkfXbGJZuDCWVt8fOe/lJUKp5FF85vcPq1V3nri7f4oJ6xY1YIwNbWgJd8Q6YBpakCOBV3nY3dhMxo/EqXN8uGM60M+5Fvxf6L3+KVee8RFLh0hdpZclfhAqS+Infx8/sHiQag7SyV0RA8KyrQBEWpNVunXkXrnDyMUMZQra1hZjO0MRKECHECyVF7H4ssyUor48Lp7rEo6hHiMGmt+b4/9l383NvvsPbuO5yyAwYmRU+nrCUepRXOGDazl6hn18hVg9aGkXV0ypKNdpv2qy/z7le+RqY12yiSAJkKXKiuUiiHne9GW6niHJnxPAgJKm55zwkMlabSho5rWAkxG7JpDKP1M/iVLmrX0DvyHD0eM90eHMnPTAjx3kjrzoe0CEpWVwpWWpkEIeKZ9YH3X+L7P/3vcf0j38ZbKsUNh5zD0TGaut3l5voGOlOc0h7jPd7Hret9AxdevsjW1pBkNqM6d46dJKdSilvacFMFJj7QhLDMgDjgZutlBkmHaVC8a1JuJwmt4HEmYae1wlurp3krLRhunIPuCh1/mzyM7zzhqsIbQ3utfyQ/LyHEeyMZESHEXT7w/ku8+pkf4uqVG7zzb36PX/+5X6IfZpSdFZo0pUhrtOqwVpYE5wje0z6zQfnRj1L//BfwvR7KGG5sfCvh1pv0mi1KrRmowCk8hsDMK3aUYorDOEetNMO1U4RWi+bGDfQn/yAvfdd3sPHa+/jnP/aTpFev0hQFSutlTUnwnnQ0wl6UVu5CnFSSERFCHEhrzaUXX+AP/IlPkv++b+dr3tCpSwiBMs240Vvj+vppvp60aFbXaP3B72K8toaxFubTbvNWYNrPGCcZaQhooA6KaVBM59One+U1SgM7p06jul1UXdMUBZe+6zu48JFvJisKXvrT34dttUk3NwnTaQx+plPSzU3ZrivECSdHrhDivg6qGylbbaY+MB5N2Eg16x94H+nv/w7aW0NckkBdQ6tFEQbQ7TDttJiWJXo6Zds2VN0+rZ1tqpUVSFOYZzruleFYbMd9++e/gL5xAz0e442R7bpCPAMkEBFCPNCibuTXf+6XGH31K5waDFhRilZ/hTMf/RY2/vgfxly8wLlzZ3n97Nm4jJLnKB2TrkprQlFgRiPspUu8/Cf/GJf/nz9DOpvSpCmEEDMco9E9MxyvfNfHeOk7P8r13/0a0+0B7bU+5z70AcmECHHCqRAOmEN/TAyHQ/r9PoPBgF6vd9RPR4jnnveeq1duMLt2nY5WnH3xBcyZjWXAAfDGr3+Jd37qp0lmU5puF/IcqupOkPGpf59XvutjvPHrX7qT4XAObwzh3FnJcAjxDHiU128JRIQQT9zDBhnOWslwCPEMkkBECHHkJMgQ4vn1KK/fclYQQhwKkyRc+Mg3H/XTEEIcc7J9VwghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkTm0QOStt97iL/yFv8D73vc+Wq0Wr776Kj/2Yz9GXdeH9ZBCCCGEOGGSw7rjr3zlK3jv+Tt/5+/w/ve/ny9/+ct8+tOfZjKZ8JM/+ZOH9bBCCCGEOEFUCCE8rQf7iZ/4Cf6n/+l/4o033nio2w+HQ/r9PoPBgF6vd8jPTgghhBBPwqO8fh9aRuQgg8GA9fX1e36+qiqqqlq+PxwOn8bTEkIIIcQReWrFql//+tf5H//H/5G/+Bf/4j1v8/nPf55+v798u3Tp0tN6ekIIIYQ4Ao8ciHz2s59FKXXft6985St7vubq1av88T/+x/mBH/gBPv3pT9/zvj/3uc8xGAyWb5cvX37070gIIYQQJ8Yj14hsbm5y+/bt+97mlVdeIcsyAK5du8YnP/lJfv/v//38g3/wD9D64WMfqRERQgghTp5DrRHZ2NhgY2PjoW579epVvud7voePfexj/NRP/dQjBSFCCCGEePYdWrHq1atX+eQnP8lLL73ET/7kT7K5ubn83Llz5w7rYYUQQghxghxaIPLLv/zLfP3rX+frX/86Fy9e3PO5p7hjWAghhBDH2KGtlXzqU58ihHDgmxBCCCEEyKwZIYQQQhwhCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhyZ5KifgDh8IQQmZYN1nsRoOkWKUuqon5YQQgghgcizbjApubo5YjAt8T6gtaLfLriw0aXfKY766QkhhHjOSSDyDBtMSl6/cpuqdrSLBGM0znm2RjMmVc1rF09JMCKEEOJISY3IMyqEwNXNEVXt6HUy0sSglSJNDL1ORlU7rm6OCCEc9VMVQojHFkJgPKvZGZeMZ7Wc004gyYg8oyZlw2Ba0i6Su+pBlFK0i4TBtGRSNqy0siN6lkKI593D1rAddLvhtJKl52eABCLPKOs83geMOTjpZYzGVxbr/FN+ZkIIET1sDdtBt8tTw6yyhIAsPZ9wEog8oxKj0VrhnEcn5q7PO+fRWpHcI1DZbfeViNEKBVgfZAeOEOKx7alhyxN8CDTOc3MwYVLWvHYpBhIH1rpZz/WtCc57zq13SOfnOJ0Yeh3NcFJzdXNEr53L+ekEkEDkGdUpUvrtgq3RjF5H7zkYQwhMS8t6t0WnSO97P7uvRKraMqstCsizhCJLJA0qhHhkixq2srIYDbeGMxrrgIBSimnZYIzi2149t6fWbXEes/FeUApG05o8vbMELUvPJ48EIs8opRQXNrpMqprhpN6TupyWljwzXNjo3vdqYfeViDGKsnE47wkBQt2QGM2N7TE7k5KXz63SzlPJkgghHmhSNtwaTpiWDZOywYeAmWdoU6OwznPt9oi1buvAWjc/L0hNE03VOBrrydI7mV9Zej5ZJBB5hvU7Ba9dPHVnbbWyaK1Y77YemMXYveum2065PZzhvKdIE5z3TEpLWU8xWrE1mnJje8xqJ6fIU8mSCCHua2dcMphUNNYDgTSJS8TWBXzw5Kmmajzv3h7FWrd9mVutVAxMQqyHm9UxR5IlBtSjLT2LoyeByDOu3ynotfNH7qy6e9eNdYGqcaRG43xgOr+CCT7gvcL7ePKY1pYiT6VYTAhxTyEENgeTeXbVY4xGL85HKuCcY+Y8Wikms5oiS++qdcsSg9GK0bQmENgZzRhqRWoMrTzB+sBGv/3ApWdxPEgg8hxQSj3yOunuXTd14/AhkMzXbpt5utMH8MQKdq3Aec+sajjVazGaNlIsJoS4y6RsKGtLkSaMbc3i7OB9LFZ1PgBx6aUaOTqFp2osZ1Y7KB1vXTWWxrrlko5SUDeWyaxmZxJ31PRaOcNpJRdDJ4DkrcSB9uy6UQqtFHXjqPetuSoVr3BciOnSqnFYF/YUiwkhxMLiIqfbzkApGudpnKe2bh6E3BGAcVmzPS55Z3PAeFbjfWB7XGJdoFOkFFncxltbj9IKDaSJYVrVvH7lNoNJeSTfp3h4EoiIAy123UxLS2IUWaIpa0fwgd35DaUUIYAKcV3Wex+vUuaFscNpJR0PhRBLidE475lWTcykukBjPf4+pwfnA2Vt2dyZsjmYMKss7SKl38mxLhBgnplVmCTef5El0kH6hJClmWfUe524u3/XjdYaHzwB5tkPUMTqdaUURsdCs8TEk8G0bBjNar5xbStmVKTjoRDPvRACw2nJZNZQNpYiS4CYzTiIVsRdesTzi1aOxmramaHbydgalNTWLc87gbjEU3lH1TjZxntCSCDyDHpSE3f7nYLzp7q8fvn2/L7uvk0IoFSgcYCK+/qdd9waTDFa08nTeIUiHQ+FeK4NJiVXbg55Z3NA2TQ4D7PKYsy9L5D2Z0kSY6gaS42i8QHrPQrQSqPmF0dBBbwPTKuGTiuNtSfWMZ7Vj31hJg6XBCLPmIeduPswGZPBuOTtGwMCsNopGKmKoKCqHd7HoIOw62QRoKwb3r0dI5ZTvTx2PFTS8VCI59nivDSZNfMtu/PaMj+/iHloAaP0vIVAQydPsFoTCChiRiSEGLBY66lqi/Oet27sUNZW5tEcUxKIPEP2T9xdvNDvDwJCCFy7Nb5vxmRnPONffe06g2m5XFqJJw5QxDVZwqK2/Y7aBsCSpZrBpKKsHb12Rp4l0vFQiOfQ7vNSCJ6ybvDz5d1HrdyorUOhyBJDbWNQE3fsBQJhuVSczutQBuMKH5gX0KestFKcl+zscSOByDPkYSbu3hpO2JnM8P7eg6IAfucbN7g9mqEIeKVwjWd/k8LFSUTtex/iVQkhMKsaGus41WuRZ4l0PBTiObM4LxkNNyflMgjRWoEP9y1S3U3N34yJazBKKYxRBBSNtbgQC+mNhnKeCamsRStNYy3TMraCX+0W9DqZZGePEQlEniEPnLirNZNZQ5ElnOq3DsyYXLk5pGosO+OSEALGKPw8E3I/i4wJML8CiVvyWnlC3XiG05qNNNnT8fC9FtQKIY4/63wcLVE1OBsHZxJipuRRUiKBeG5pzZuZVU284FlbKdgKsWiVEJZLPsx35CjjaVzc3VfWlklVc2a1I9nZY0QCkWfIgybuzqrYjGz9PhmT26Mpo1kNxCuP4AONu//ZIhBTo2bebCieDKCxniJbzIOwVI2lrB3r3RbOeX7v7VvvuaBWCHG8JUYTiEGA1hqtY4Dgg8I/QiSyOGNVjZtfuGRY69geVyigSA1V7XDzC6JFnNPYQJYqzLyWxFrPzZ0pF06t4H2Q7OwxIH1EniG7e3/s3zcfQmBWWVKjybNkz8frxsVUZghUtaVpHHka2y7XDwhCdnM+Xo0YHU8aLnis83EnjfNxSmZm6K/kvH71NlujGXlqWGln5KlhazSTBkRCPGM6RUo7T3HzAnelYkb0XslPfa+k6Lw43s4bn53utWjlKT7EzqujWUPtYj+SPcvELHb3xaaLidFY57g9msk8mmNCMiLPiMUyR6+TszMpGU4q2kW6Z+JukSVxqcXHYKGqLcNpRTVv4U6IwYQnHrUHbdd98POA1Bicih1ZnQs0zkKA1ZWC972wyrVb4wcW1Mq6rRDPBqUU5093uXZ7RGMdzsXsqVZw4IaZe1z7hACOeO5aFKguOjv7RaPFebBydxG9Xxbdq/kDl5Xl7Foi82iOAQlEngH7+4Y473EuMJ7VGK3vTNw93eXqrVHMRGSerWGJ9Z7UaBKlKGsXr0ZCXMZ53JSl8x41P+idd7gARZqQGs2stg8sqJV1WyGeLefWVrhwqsfV20Nq63b1/4hZCrurYvWgOESxGCfBstZjaxwLYH24U/CqgHudtarGkSV6WfWqtWKj35ELnmNAApET7l59QyazBmMUL57ps7rSWkb907phazTlxtYUCGSpiZ0IG4fRirVuwdaopKyah65m321RUEaIu2wWAYnWis3hlJ1pRWMd7XtchciuGiGePUopXr2whvWO4aSmcQ7nfVwSnndkzrShmmdP968IL5ZX2PX/8aTCGAC9DF7u18k9AI2LF0lZalhdKVhdkXq040ACkRPsfn1D+itxmWM4qblwusdwWi2zJmUdC0chUDaxyc9iDbW2jjxJ5unLR49E1Px5QSxSS4wi0TpOyvSBLInru856dHp3Qe3uXTVCiGdHv1PwTZdOc3VzxM5kxrSK556GWEemjSJVBudiRvdBPICDcM8cyF5GA/PsbKeVcrrXkWWZY0ICkRPsYfqGDKYl17fHXNkcLrMmWudMyoaqscx3uS011tPY+rGejwLaeULj4sApRbwCqZ1fFsU2TpMYzXBWs54Ue553CIFpaVnvtuQEIcQzqN8p6LVzrm+PeePaNonRFKlhUjaUdSyyf9AuPYA0iTv67jGi5kAKhTKKPE/otnMubHRlWeaYkEDkBLtX35AQ4jRL6z1V47g6D0IWWZO4phrupDoBoxVKxYFRj7MkA3G9N88SVOPQSlE2br41N64DK8V8ySUu9g4n9Z7lpGlpyTMjJwghnnHbw5IQYL0bL0Y6RUZtHeNZxbtbkwd+vbrT3/mhJEaTJhqF4kyvw/vOr0qbgGNEApET7KC+Ibt3wjjncT4wLWvWuncamC2aCe0/jEO4e8jUo8hSsww44pJPTIcul4yUwrnYdKjIE7pFznBW4St7p6BW+ogI8UzbnckFqOe79rRSe2o8zH1Wh2MTRIPy7oHhSCtL2FhtU9WO1W7BR145g9ay9HucSCBygi36hmyNZvQ6mrpx3B7OsD52KnUO8jROqxxMqmUPkQB3ZRxCePxMCMR28e08IzUK6xWz2pNovXfpZb4OVOQJIcBLZ/uglHRWFeI5ssjkWu/ZGZfL9gFaxdYCEJd5tVZ4d3DeY5HFfZhF5FZuqK1npZ3xygtrEoQcQxKInGBKKS5sdJlUNYNxRdks2qUrrPWkiaHXydkezbDOMZxWnJ5nLRZdUOe7dR95+NRuiVasFBlGK1pZStXEwVTOe1B6mUa1zpMkhlPdFrX1WB9YXcmfxI9CCHFM7R/lkOh4bhgOKnwIpEaTKo0Pd7qcBogdUhcnqH2MVrhwpwuJmn/MmPn92Ds9WyezBueh15JzzXElgcgJ1+8UvHbxFG9e22F7XBLnSClaeRqn3qYJs8oymdWUtaWxnjTRFFnCrLaEEFOg2misvXMSeFhGK9JEM5xW5En8c1o0Glpc4YT5/Ick0ZxZbWOMRvsgO2OEeMbt73GktaJbZFSNpW7mxfMqngeMUhQpzCob69h8rDtTau+Scew/opbnq8UHF03SYso3/t9oOLe+QpYaplXD61duy8TdY+ipvBJUVcW3fdu3oZTiX//rf/00HvK50u8UvHSuT6+Tc2atw5nVDhv9dmzlrqDXzkgTTW09tbUEoNvKMVrPGwXFnAXsDUIWDYfuJTYjiieJPE04tdqi287JExOH4BHHceepod8puHC6x0orY1pa+u1CdsYI8Qxb9DjaP8rh1nDGrLJxaaXxOO/nGVNHWTvy1LDoO+bnDcwWk3e1jrVx8WJHxf4jqSHRsZeIdYFmnlVJjaLIUpLEkKUJvU5GVTuubo7uGoEhjtZTyYj85//5f8758+f5nd/5nafxcM+lNDHxADaadN/AuzxL6K8UMC6xLjCe1mituHC6y+3BlLKJvT1g71ZeYxRGKZr5/Ib9jFJorem3c3qdfDnD5vRqG4htlbPU0M4TWnka07GTWnbGCPGMu1+Po3aRsDOJ9WtxIKajatxyWSbRajkcT8fhNBitaOcJ7SIj0fFramtRSlGkCdOqpqzjfSymhhdZvNDR88eWzs3H16EHIl/4whf4pV/6Jf7xP/7HfOELXzjsh3tu7S9c3d+fw7nApTN9XjzTx82XRTpFymBS8ua1HTaHEyZlQ2Pd/MCdz2+Y73oxKtDKUlp5gnOedpHSOE87T2nle4tMlVL0V3IGk2rZV8TOaozsjBHiuXC/HkdGa1Kjsc5zut+mbhzb4xkQs6chhOXXuBA41Y3D7bLEzHf+x07QZ9dWIMD2uOR0v411gVlt2R7NKNJYoLr8usVjS+fmY+lQA5EbN27w6U9/mp/7uZ+j3W4/8PZVVVFV1fL94XB4mE/vmbK7cPVe/TkubvTotvcWbK2utHjlvGLWWFJjMImmaRzTsmFWNzgfaBcpa92CIk2YVY68Y3hhvcvbN3Yo8njVUe+pfPcMpzWTMta0p4mhlSWcP93l3NqKZEKEeMbdq8cRsKxRG89qrHPM6oZA3GaLgqr2tIuMbivl+vaE4bSmnad4Ykfm3ecziGMrRtOGdpGQZ2bZwyhLDL12tifNK52bj6dDC0RCCHzqU5/iL/2lv8THP/5x3nrrrQd+zec//3l+/Md//LCe0jNvUbi6LA57iP4cIQSu3hrhfeBUP/YaWTREm1UNO+MKrSD4uNSyuC+jNZc3VQxYqma5BS/MK9/jzhxNr52jtWJaWq5sDmnnqWRDhHjGHdTjaCEukaTMasto1lDVliSJu12aJu6s6bUz8izhdL/NzrhkUjXoWh14Ptt9zlsEGioo1rvFcrkYpHPzcfbIgchnP/tZ/sbf+Bv3vc3v/d7v8Uu/9EuMRiM+97nPPfR9f+5zn+Mzn/nM8v3hcMilS5ce9Sk+1xYtlHdvl7tff46DUqiLoVCxviNlUjW8cn6dXjtf3lcIgTxJuHp7iFZx50yi1Hx5Jw6W6rVT8nReMNuJs2+ubo7otXPJigjxDHuYpeLz610CgSu3hmADWus7u/3mAUS7SHHO88r5ddp5euD5bP85b1Y3XLk5jIM8jduVGW7ixVEn3lb6Fh0fjxyI/JW/8lf41Kc+dd/bvPLKK/zKr/wKv/Ebv0Ge710K+PjHP84P/uAP8g//4T+86+vyPL/r9uLRKaUeuhDrfilUAJNodK1o5+ld9xnwMQviQ+xLMm8Rv6hwh0AgxBkPUigmxHPjYZaKX72whlGKadWQGE2WmGUdyIJzHmNiZvV+54zd57xVCtp5uicz7HwcpOdN4O0bO1zeVPTbhdSrHROPHIhsbGywsbHxwNv9D//D/8Bf+2t/bfn+tWvX+N7v/V5+5md+hk984hOP+rDikNwvhQr3XlO9vj1mc2c6nx8TqMtmvt0uxGp4Y3A+LvFk8ym7UigmxPPjYZaKQwic6rbZGs1IC70nCHkvSym7syQ74xnv3BygCHRa6TIg2hrNmFS19BU5Bg6tRuTFF1/c8/7KygoAr776KhcvXjyshxWP6EEp1INOBINJyTeubVM2jlZmaGc6TtltHI3zpMbM+5bEupEFKRQT4vnyoKXih8mcPO5W/zhML+WdGwO8h/7KnSVhnRhZLj5GpLPqc+5RTwSL/gDWerIkBi5KKbLEkBrNcFozqy1ax5oTvevrpFBMiOfPg5aKH6fI/mHdbxuxLBcfH08tEHn55Zelm90x9SgngsWB3W2n+OBj0DEPRpRStPKESVkzrSwrrQxjFI117/nqRgjx7HrUIvuH9cAaOFkuPhYkIyKAhz8RLA7sJDH02jmN9VTWkRqNVgqtY8OiIkvIU8Nk1jyxqxshxLPrUYrsH9bj1sCJp0sCEbH0MCeC3Qd2niWc6rUYTqvYojl4QoAiS/i2V8/RbedP9OpGCCEexePUwImnTwIR8Uj2H9h5lnA6NTQ2Dq+alpaNfptz69JBVQhxtA6zGFY8ORKIiEdyrwNbKagbT6eVcvFMTw5ssRRCeOJr/0I8rMMshhVPhgQi4pHJgS0e1mBS3vk78QGtpZGUePoOqxhWPBkSiIjHIge2eJDBpOT1K7eparcnJX6/RlKSPRGH5TCKYcWTIYGIeGxyYIt7WfSbqWpHr5M9VCOpe2VPzp9eITFGghMhnlESiAghHsv9sheP2kjqXtmTG9tjLt8a0MoSjNaytCPEM0gCESHEfR0UcAyn1X1rPx7YSEpr6qZmezQjhMCVzeFd2ZPGB2rrKOdN886sFjgvM0KEeNZIICKEuKeDlkvyJGFWN4TAPWs/dvebUUbT2DipWSuF956dScWssrzx7jZaKyazmtWVYs8ogeG0wnpPkRnqxjEpG/LU0GtnDKcyI0SIZ4UEIkKIPRYZkMXUUufuTC211nF9e4zznnNrHdJ5t8r9tR8ffPEU/XbB9a0R1ntq64gTHgLWxVEPK62M/krOrGooG8fOuCQxsTdNYz1V49AoqsbFAYrDED+fJrRymREixLNCAhEhxNLuDMj2qKSxjpVWRssnpIkizmmPgcRo1lBk6XJ0++7aj2llSYxiZ1xinUdphVEKFwLeB9LE0M4SnAuEAIlRNM4znNZspAk+BKzz2PkEZ4VCAd4HJmVN3ViKLJEZIUI8AyQQEUIAe7fbJokGAnlqKBuLHXpO9VoEYhiSJZqqsdTWkaV3ZngshohdvTXkK+/covExUAg+4AkEwGiFUrA1KjHTCh/CcumGsqLfyYFAYx3WxbEBgRiYoEArqFA4H0i0LMsIcdLJpB8hxF3bbc38BT5JNHlisN4znFYoQCsFShFCiMHDLs55rHN8/eoWVWNJtCJPDVlqlvcJcXhi2TRopSjShCzRBA9l5dgel2yNZtTW48Mi/xLFGhNwPlDWDcNZ9RR+OkKIwySBiBDiru22WimUUngfUEqRGk3VOFCQp4bGOmAelMyFEJjMGmZ1w7Rq8CEGDLV1sWh1flPn4/KMAnwITCtLbT0heFwIbA1njCb1Xc8xwPz5gFLxfq7dGhH2BUNCiJNFAhEhxF3bbbPEkKexaDQQd7uEEOs5VlopzgWcj8snfv7/4aTGEygrRwhhUTpCCGB9oHF3AoYAoBRlbbHOxUyLVmgVgxPPwcFFIJakGBODpdGsZlI2h/iTEUIcNqkREULs2W6rEwMKeu2Mxjqq2qHnyyplbRlN62V24+bOlNRoOq2UU702w0mFn9+fdXFp5V6cCygT60qcD6BikKFgz3rMMqDZ9f/gA1prmBe1CiFOLsmICCHoFCn9dsG0tMuljjxLONVrUWRJ3D7rAzvjEoAzqx0unelxdq1NkSUkRrPWLbDek82DmgdZLLW4eSFICODmNSG7V1sW/1zcow8xe5IYQ5IYkns0TRNCnAySERFCoJTiwkaXSVUznNTLRmV6XmzayjsooLKO9ZUCNQ802kVGK08ZTmrevT2GEGjlKePZ3TUeBwnhzoZgdUDwcdD7SoHRMQBZX2nRKdL38J0LIY6aXEoIIYA4Ufm1i6dY77aoGsdoUjEpG1p5ygunVggEiiyhso66cXuigzRRDCYl1gfaeUKaaO5XQ7pImHigth7rPM391nFgvmPnztf2OzkXz/Sks6oQJ5xkRIQQS/1OQa+dc317zLVbI6ZVw6xqeONayXBaxVoSBaBIE0ORGhrn5t1PPYlWTGaKJFH3KDdlXpC692P3C0E0MWBZ3CY1houne7z/4rrMmhHiGSCBiBBij+G0Wg6h6xQpznl2xmVsOuY92miCj23gQ4ht14vMLDMV06rGl/e+/wckPu6+PTF4UUrR6+R84Pw6L59bjcWqQogTTwIRIcTS/sZmCsXOuIq9RBJFbT3ae9JEoVSY9wrxTKtYZar0owcaD/e84nPzPnBjZ0JZu+WkXyHEySaXFEKIpf2NzWrrYodUo5a1GC7EACSEO8sszof5TJgnL/YYmfcPUYo8MWyNZrx+5TaDyX1SL0KIE0ECESHE0v7GZj6EuJ1XxcAjndeI+Hlzs90ULCfrPmkKRZ4lWB9bnfU6GVXtuLopnVWFOOkkEBFCLO1ubAYsW73HKbmxvXpiNK0sJUk0Rus7/T24f9Hp4wrEpmd5Fofr+RD2TPqVzqpCnGwSiAghlvY3Nlu0enfzTIf3sTg1Tw2p0XcNvXsUj7KM08oSCPMGaPOuZ8ZovJfOqkKcdFKsKoRYOqixWbeVUlYNVROXaNJEz+tB4ryXe82FuR+twGi1Z/7MPZ8TMK0anA8Yrdgez5hWDa08mXdYlespIU4yOYKFEHvsb2xWW89KO6PfKcgSQ914GutpFymn+22S5NFPIw/RAX4pEJueOR+W036nZc2twZQ8NdJZVYgTTjIiQoi7LBqbTcoG6zyJ0bTzhBs7k2WjM6UURisunOpybWuMtS5us32I+w+o5XLPw9IKrHOEKpAYM+8NL11VhTjpJBARQhxIKcVKK9vzsRfWu5xbW7krQPlXX7vOO5sDrPW4XXUjavc/dgUp3geMBvsI5R0hzAtinScxhtO9NpW1TMrmrucphDg5JBARQjySgwKU919cZ1Y33BrMwDq0utNvRAFGazp5Qtk4GuswWuFDOLDd+70YHXfwaB37iiSJpm6cFKsKccJJjYgQ4j3rdwq+9ZWzXNrokSQaF2I9R5Zoeu2cC6e6bKx20Dq2aT+z1qHbzshTg3nI1RWtFcbEGTYhxJ4nUqwqxMknGREhnmMhhD3LLJ0ifexptv1Owe977QVeOL3C165sUVtHr5WR5wneBwbjCj3PpkxmDc5DK09JjGY0u7sXiCaWgCxWerRWcYXHQzCBqnGc6XekWFWIE04CESGeU4NJydXNEYNpifcBrRX9dvGeZrgopTh/qkenyJb3PZk1aK1Y7RZoDbOqwXpPnpi41JLGGTZ14wjMW7qreT3JIgiZx0Y+LFrJa/LE0OvEgtr3EkAJIY6WBCJCPIcGk5LXr9ymqh3tIsEYjXOerdGMSVXz2sVT72mg3L123fybb9zk7Zs7FHmyDByUUrTy2DTN+VjvEYi1I0pBojWJid1enQejIUs1jXO8fWOHy5vvPYASQhwdCUSEeM7cNWF3HhDoxNDraIaTmqubI3rt/D1lGQ4qaj292uLyrQFN41BpXG5ZdEttFwlGKyZVg5rvt9Hz4lSCAgN5aijyhDwxdFrpEw+ghBBPnwQiQjxn9k/Y3W3/DJeDtsW+l7qS1ZUW/U5ONd/tYp2fZ0RSeu0MpaBdWV5Y7zKaVZRVQ209SkG3leNDYFbZQw2ghBBPlwQiQjxn9k/Y3c8Yja/sgdti32tdSadIOd3rsDWaUmQJAea7awyBwHBSc6rb5tXzawB7Ap4QAl9+6+ZjB1BCiONJAhEhnjO7J+zqxNz1eXePbbFPoq5k9yybctf9NM4xLS15Zriw0V0GGrsDip1x+dgBlBDi+JIN+EI8Z/ZP2N0thMC0tPTbxZ5tsfvrStLEoJUiTQy9TkZVO65uju66v4Psn2UzntZUjWO927pvMLM7gDrIvQIoIcTxJhkRIZ4zB03YXWQ3DspKwHuvK9nvoF01D6o1WQRQW6MZvY7ec9tFALXebUlfESFOGAlEhHgOLbISy3qPyqK1Yr3bOrDe473UldzLQbtqHnT7Rw2ghBDHnwQiQjynHiUr8bh1JYfxnB8lgBJCHH8SiAjxHHvYrMRxWhZ5nGUdIcTxJYGIEOKBjtuyyKMu6wghji8JRIQQD0WWRYQQh0ECESHEQ5NlESHEkyaBiBDikciyiBDiSZLOP0IIIYQ4MocaiPziL/4in/jEJ2i1WqytrfH93//9h/lwQgghhDhhDm1p5h//43/Mpz/9af76X//r/OE//Iex1vLlL3/5sB5OCCGEECfQoQQi1lp+9Ed/lJ/4iZ/gL/yFv7D8+Ic+9KHDeDghhBBCnFCHsjTz27/921y9ehWtNd/+7d/OCy+8wPd93/c9MCNSVRXD4XDPmxBCCCGeXYcSiLzxxhsA/NW/+lf5r/6r/4pf+IVfYG1tjU9+8pNsbW3d8+s+//nP0+/3l2+XLl06jKcnhBBCiGPikQKRz372syil7vv2la98Be/j4Kv/8r/8L/mzf/bP8rGPfYyf+qmfQinF//q//q/3vP/Pfe5zDAaD5dvly5ff23cnhBBCiGPtkWpE/spf+St86lOfuu9tXnnlFd59911gb01Inue88sorvPPOO/f82jzPyfP8UZ6SEEIIIU6wRwpENjY22NjYeODtPvaxj5HnOV/96lf5d/6dfweApml46623eOmllx768UIIAFIrIoQQQpwgi9ftxev4/RzKrpler8df+kt/iR/7sR/j0qVLvPTSS/zET/wEAD/wAz/w0PczGo0ApFZECCGEOIFGoxH9fv++tzm0PiI/8RM/QZIk/If/4X/IbDbjE5/4BL/yK7/C2traQ9/H+fPn+d3f/V0+9KEPcfn/396dRzV1pn8A/97EkARIkE0lQFkEtxElSKGCU7H6E5ejVVtb64Ki0qlg0XHDrVo7CmOVUxGtS12mjjOKy2ARWyh1HfeNoCyCbKIsLriA7CT394eHO03BCpJ4CX0+5+Qcc3Pve5+8F2+e+77vfe/du5DL5foKt90rKyuDvb091WMrUB3qBtWjblA96gbVY+s1VYcsy6K8vBwKheKV2zNsc9pNeFRWVgYzMzM8e/aM/khageqx9agOdYPqUTeoHnWD6rH1WluH9KwZQgghhPCGEhFCCCGE8KbNJyJisRgrV66k23pbieqx9agOdYPqUTeoHnWD6rH1WluHbX6MCCGEEELarzbfIkIIIYSQ9osSEUIIIYTwhhIRQgghhPCGEhFCCCGE8MbgEpFjx47B29sbUqkU5ubmGDNmDN8hGayamhq4u7uDYRioVCq+wzEo+fn5mDFjBpycnCCVStG1a1esXLkStbW1fIfW5m3evBmOjo6QSCTw9vbG5cuX+Q7JoERERODtt9+GTCZDp06dMGbMGGRmZvIdlkH7+9//DoZhMHfuXL5DMTiFhYWYPHkyLC0tIZVK4ebmhqtXr7aoDINKRA4fPowpU6YgMDAQKSkpOHfuHCZOnMh3WAZr0aJFzZp+lzR269YtaDQabNu2DWlpafjmm2+wdetWLF26lO/Q2rSYmBjMmzcPK1euxPXr19G3b1/4+/vjwYMHfIdmME6fPo2QkBBcvHgRSUlJqKurw9ChQ1FRUcF3aAbpypUr2LZtG/r06cN3KAbnyZMn8PX1hUgkwk8//YT09HRERka26FEuAADWQNTV1bG2trbsjh07+A6lXfjxxx/ZHj16sGlpaSwANjk5me+QDN7XX3/NOjk58R1Gm+bl5cWGhIRw79VqNatQKNiIiAgeozJsDx48YAGwp0+f5jsUg1NeXs66urqySUlJ7MCBA9k5c+bwHZJBCQsLYwcMGNDqcgymReT69esoLCyEQCCAUqmEjY0Nhg8fjtTUVL5DMzj3799HUFAQ/vnPf8LY2JjvcNqNZ8+ewcLCgu8w2qza2lpcu3YNQ4YM4ZYJBAIMGTIEFy5c4DEyw/bs2TMAoL+91xASEoKRI0dq/U2S5ouLi4OnpyfGjx+PTp06QalU4rvvvmtxOQaTiOTm5gIAvvzySyxfvhzx8fEwNzeHn58fHj9+zHN0hoNlWUybNg2fffYZPD09+Q6n3cjOzkZ0dDT+8pe/8B1Km/Xo0SOo1Wp07txZa3nnzp1RUlLCU1SGTaPRYO7cufD19UXv3r35Dseg7N+/H9evX0dERATfoRis3NxcbNmyBa6urkhMTMSsWbMQGhqK77//vkXl8J6ILF68GAzD/O6roT8eAJYtW4YPPvgA/fr1w+7du8EwDA4ePMjzt+Bfc+sxOjoa5eXlWLJkCd8ht0nNrcdfKywsxLBhwzB+/HgEBQXxFDn5IwoJCUFqair279/PdygG5e7du5gzZw7+9a9/QSKR8B2OwdJoNPDw8EB4eDiUSiU+/fRTBAUFYevWrS0qp4Oe4mu2+fPnY9q0ab+7jrOzM4qLiwEAvXr14paLxWI4OzujoKBAnyEahObW44kTJ3DhwoVGzwTw9PTEpEmTWpzJtjfNrccGRUVFGDRoEHx8fLB9+3Y9R2fYrKysIBQKcf/+fa3l9+/fR5cuXXiKynDNnj0b8fHxOHPmDOzs7PgOx6Bcu3YNDx48gIeHB7dMrVbjzJkz2LRpE2pqaiAUCnmM0DDY2Nho/SYDQM+ePXH48OEWlcN7ImJtbQ1ra+tXrtevXz+IxWJkZmZiwIABAIC6ujrk5+fDwcFB32G2ec2tx40bN2L16tXc+6KiIvj7+yMmJgbe3t76DNEgNLcegRctIYMGDeJa5wQC3hsY2zQjIyP069cPx48f526712g0OH78OGbPns1vcAaEZVl8/vnniI2NxalTp+Dk5MR3SAZn8ODBuHnzptaywMBA9OjRA2FhYZSENJOvr2+jW8ezsrJa/JvMeyLSXHK5HJ999hlWrlwJe3t7ODg4YN26dQCA8ePH8xyd4Xjrrbe03puamgIAunbtSldVLVBYWAg/Pz84ODhg/fr1ePjwIfcZXd2/3Lx58zB16lR4enrCy8sLGzZsQEVFBQIDA/kOzWCEhITg3//+N3744QfIZDJufI2ZmRmkUinP0RkGmUzWaEyNiYkJLC0taaxNC/z1r3+Fj48PwsPD8dFHH+Hy5cvYvn17i1uHDSYRAYB169ahQ4cOmDJlCqqqquDt7Y0TJ060/J5lQlopKSkJ2dnZyM7ObpTAsfRA65f6+OOP8fDhQ6xYsQIlJSVwd3dHQkJCowGs5OW2bNkCAPDz89Navnv37ld2KxKiS2+//TZiY2OxZMkSfPXVV3BycsKGDRswadKkFpXDsHTWJIQQQghPqFObEEIIIbyhRIQQQgghvKFEhBBCCCG8MajBqqT5ysvLUVxczE0ERwghpOUEAgFsbGwgk8n4DqXdokSkndFoNIiIiEBsbCzfoRBCSLsxduxYLFmyhOYL0gNKRNqZiIgIHDlyBKGhoVAqlRCJRHyHRAghBquurg7JycmIjo4G8OIxI0S36PbddqSsrAzvvfceQkNDERAQwHc4hBDSbuzZswcbN27EyZMnqZtGx6iNqR1pmGFRqVTyHAkhhLQvDefVhueeEd2hRKQdaRiYSt0xhBCiWw3nVboBQPcoESGEEEIIbygRIYQQQghvKBEhhBBCCG8oESGE6ISfnx/mzp3LdxicthZPW1NdXf3K16/HQ0RHR8PBwQEdOnTAggULUFpaik6dOiE/P5+/L6EjEyZMQGRkJN9h/GHRPCKkSRqNBoVFD/G8ogqmJlLYKqzbzUQ+arUaDMO0m+/TFJZlUVFdh3q1Bh2EAphIRGAYhu+wXqm2thZGRkZ8h8ELVqMB+6gUbFU1GKkEjJUlGD3+jXp4eCAjI+OlnzMMg9zcXDg6OiIlJQXz5s3DDz/8AKVSCTMzMyxfvhzvv/8+HB0d9Rbjm7J8+XK8++67mDlzJszMzPgO5w+n/Z6JyWu7nX0Xm7YdwrqovdiweR/WRe3Fpm2HcDv7rt72eejQIbi5uUEqlcLS0hJDhgxBRUUFNBoNvvrqK9jZ2UEsFsPd3R0JCQncdqdOnQLDMHj69Cm3TKVSgWEY7krtH//4Bzp27Ii4uDj06tULYrEYBQUFqKmpQVhYGOzt7SEWi+Hi4oKdO3dy5aSmpmL48OEwNTVF586dMWXKFDx69EhvdaArzyqqkXHnEW7m3Uda/gPczLuPjDuP8KyiWm/7nDZtGk6fPo2oqCgwDAOGYZCTk4MZM2bAyckJUqkU3bt3R1RUVKPtxowZgzVr1kChUKB79+4AgPPnz8Pd3R0SiQSenp44cuQIGIaBSqXitv2949NUPG35yl19rxC1R46iJuYQag7FoibmEGqPHIX6XqHe9jlz5kzI5XJkZWUhLy9P6zV48GAMGTKESzLi4+Ph5eWFESNGwMbGBgCwc+dOzJgxQ2/xNVd9fX2ry+jduze6du2KvXv36iAi0lKUiBAtt7PvYueeOKSm58DCXI6uTrawMJcjNT0HO/fE6SUZKS4uxieffILp06cjIyMDp06dwrhx48CyLKKiohAZGYn169fjxo0b8Pf3x+jRo3H79u0W7aOyshJr167Fjh07kJaWhk6dOiEgIAD79u3Dxo0bkZGRgW3btsHU1BQA8PTpU7z33ntQKpW4evUqEhIScP/+fXz00Uc6//669KyiGln3SvG4vApikRCmxkYQi4R4XF6FrHulektGoqKi0L9/fwQFBaG4uBjFxcWws7ODnZ0dDh48iPT0dKxYsQJLly7FgQMHtLY9fvw4MjMzkZSUhPj4eJSVlWHUqFFwc3PD9evX8be//Q1hYWFa27zq+DQVj729vV6+e2up7xWi7sdEqPPugJHJILC1ASOTQZ1358VyPSUjAQEBqK6uxs2bN+Ho6Mi9TE1NcebMGcycORMA4OLiguXLl+P8+fNgGAYBAQH48ccfIRaL8c4773DlaTQahIeHw9XVFRKJBJ07d8a0adO4z1NTUzFixAjI5XJ06dIF8+fPR21tLfd5QkICTExMtLqDUlNTwTAMl2Dm5+eDYRgcOHAAf/7znyEWixEXF4eCggJMnToVnTt3hlQqRd++fXH27FmunIKCAkycOBHm5uawsLDApEmT8OTJE636GDVqFPbv36/TOibNQ10zhKPRaPBT0gU8flKGbi72XFO+zNQY3VzskZV9Fwm/XEBXZ1uddmsUFxejvr4e48aNg4ODAwDAzc0NALB+/XqEhYVhwoQJAIC1a9fi5MmT2LBhAzZv3tzsfdTV1eHbb79F3759AQBZWVk4cOAAkpKSMGTIEACAs7Mzt/6mTZugVCoRHh7OLdu1axfs7e2RlZWFbt26te5L6wHLsih8WI6aWjXkJkbc8RN0EEJuIkBZRS0KH5ZDbizWeTeNmZkZjIyMYGxsjC5dunDLV61axf3byckJFy5cwIEDB7QSOhMTE+zYsYPrktm6dSsYhsF3330HiUSCXr16obCwEEFBQdw2zTk+TcXT1rAaDeovX4Wm/DkEdor/HRdjKQRSBTT3ilB/+RoEChudd9NYWVlhzJgx2LVrF8aNG8ct37t3L8zMzDBmzBgAL1qn+vfvj1mzZmHy5MkwNTXFsmXL0K9fP63yIiIiEBMTg+3bt8PZ2RmFhYW4desWACA5ORkDBw5EaGgoNm7ciHv37mHixIno2LEjvvjiC26d3r17a51bVCoVFAoFrKysAAApKSkAgHXr1iE8PBxOTk4QiUTw8vLCu+++i7i4OFhYWODUqVOQy+UAgOzsbC7+ixcv4vnz5wgODsbChQuxY8cObl9eXl5Ys2YNampqIBaLdVrX5PdRIkI4hUUPkZ17Fwobq0Y/VAzDQNHFErdz7qKw6CHs7TrrbL99+/bF4MGD4ebmBn9/fwwdOhQffvghhEIhioqK4Ovrq7W+r68vd0JqLiMjI/Tp04d7r1KpIBQKMXDgwCbXT0lJwcmTJ7kWkl/Lyclpk4lIRXUdnlVWw1jSocnjZyzpgGeV1aioroOp9M2Mw9i8eTN27dqFgoICVFVVoba2Fu7u7lrruLm5aY0LyczMRJ8+fSCRSLhlXl5eWtsY4vFpCvuoFJrCIggsLZo8ZgJLc2gKC8E+KgXTyVrn+w8KCsKwYcNQVFQEhUIBANi9ezcCAgK4Y2Jqaor8/HwMGDCAS+ru3LnDrd8gMTERo0aNwqBBgwAADg4O8PHx4fYzZcoUrF69GsCLVpbAwEDEx8dziYhKpeIuFBqkpKRoLVOpVDAxMcHBgwe5bqMRI0bgnXfe0Wppc3V15f4dHByM4OBgraR40aJFWLhwoda+FAoFamtrUVJSwl0QkTeDEhHCeV5RhZqaWhhLm74akBpLUPPgMZ5XVOl0v0KhEElJSTh//jx+/vlnREdHY9myZUhKSnrltg1XT79+ZFJdXV2j9aRSqdaJXiqV/m65z58/x6hRo7B27dpGnzX0kbc19WoNNBoWQmHTV85CoQCamnrUq9/MzJD79+/HggULEBkZif79+0Mmk2HdunW4dOmS1nomJiYtLtsQj09T2KpqsLV1YCQvuQIXi8E+fgK2Sj9daoMHD4aDgwO+//57LFmyBNeuXcONGze0uihu3LgB4H+tlABQVVWllSgCwOjRoxEWFoarV69i/Pjx+OCDD2Bubo5bt27h2rVrjcZfGBkZoaamhnufnJyM0NBQrXVUKhU8PT259ykpKRg9ejSXhNy5cwc//fQTkpOTm/x+d+7cQVJSEs6ePat1V4xarW7UVddwTqisrGy6soje0BgRwjE1kUIsNkJlVU2Tn1dVVkNsJIKpye//iL8OhmHg6+uLVatWITk5GUZGRjh+/DgUCgXOnTunte65c+fQq1cvAIC19YurxF8//+HXAxpfxs3NDRqNBqdPn27ycw8PD6SlpcHR0REuLi5ar9f54XwTOggFEAgYqF+SaKjVGggEDDq8JFFpLSMjI6jVau79uXPn4OPjg+DgYCiVSri4uCAnJ+eV5XTv3h03b97U+pG6cuWK1jrNOT6/jactYqQSMEYioLrp/3OoqQEjEoGRSpr+vLX7ZxhMnz4du3fvBvCie8vHxwc9e/bk1lGpVI3+7q2srBqNsViwYAEyMjIwePBgfPPNN3BxcUFeXh7S0tIgEokatVKlp6dzyU1FRQVycnK0Wj80Gg2Sk5MbtYj4+flpvTcyMmrUytYgJSUFFhYWuHHjBlQqFfe6efMmTp48qbXu48ePAfzvnELeHEpECMdWYQ0XZ3sUFT/Cbx/KzLIsikpK4drVHrYK3f5HvXTpEsLDw3H16lUUFBTgP//5Dx4+fIiePXti4cKFWLt2LWJiYpCZmYnFixdDpVJhzpw5AF408drb2+PLL7/E7du3cezYsWbNB+Do6IipU6di+vTpOHLkCPLy8nDq1CmueTckJASPHz/GJ598gitXriAnJweJiYkIDAxssz9uJhIRzIwlqKyub/L4VVbXw8xYAhOJfp5F5OjoiEuXLiE/Px+PHj2Cq6srrl69isTERGRlZeGLL75olFA0ZeLEidBoNPj000+RkZGBxMRErF+/HgC4Vq3mHJ/fxtMWnxHCWFlCYKuApvRxk8dMU/oEAltbMFaWeoshMDAQubm5+OWXX7Bv3z6tsThA010mSqUS6enpjcrq1q0bFi1ahGvXrqG8vBzp6emQyWRQq9VaLZV5eXmIjY3FpEmTuPcajQY9evTg1klMTERpaSm377KyMuTn52s91FMkEqG+vv6lrRgikQjl5eVQKBSNElZbW1utdVNTU2FnZ8eNRyFvDiUihCMQCDD8//rDwlyOrOy7KC+vQL1ajfLyCmRl34WFhRzDhvTX+fwbcrkcZ86cwYgRI9CtWzcsX74ckZGRGD58OEJDQzFv3jzMnz8fbm5uSEhIQFxcHNcHLBKJsG/fPty6dQt9+vTB2rVruX7oV9myZQs+/PBDBAcHo0ePHggKCkJFRQUAcC0xarUaQ4cOhZubG+bOnYuOHTu22flHGIaBrbUMYiMhyipqUVevhoZlUVevRllFLcRGQthay/Q2n8iCBQsgFArRq1cvWFtbw9/fH+PGjcPHH38Mb29vlJaWIjg4+JXlyOVyHD16FCqVCu7u7li2bBlWrFgBAFx3QHOOz2/jKSgo0Mv3bg1GIEAHL08IZKbQ3CsCW1kJVq0GW1kJzb0iCGSm6ODVT6/ziSgUCowYMQLTp0+HWq1udGdYw3H4NX9/f6SlpXGtIl9//TX27NmDjIwMZGZmYunSpbC0tISPjw+8vb3RsWNHLF68GLm5uThx4gRGjhyJCRMmYNiwYQAAS0tLMAzDJaoXL17E7NmzIZFIuJaUlJQUCIVCrS4ib29vmJmZYdasWcjIyEB6ejq2bt3K3VXn7e0NuVyOgIAApKSkIDs7GwkJCU1OdPff//4XQ4cO1UmdkhZiSbuRkZHB9uvXj83IyGhVOVm3C9iob2PYzxesZz8NjWA/X7Ce3bglhs26XaCjSIk+PX1exablPWDPpxWwZ2/eYc+nFbBpeQ/Yp8+r+A7tte3du5cViURsZWUl36HoRf3de2z14SNs5cZv2YrIKLZy47ds9eEf2Pq7997I/o8ePcoKhUJ21qxZWsvVajVrbGzMxsfHN9rGy8uL3bp1K8uyLLtq1Sq2W7durEQiYa2srNj333+fTU9P59Y9c+YM6+HhwUokEtbZ2ZmNiIhg6+vrtcpbvXo1a25uzr711lvs1KlT2bCwMNbT05P7PDo6mv3Tn/7UKI6zZ8+y/fv3Z01MTFhzc3N2+PDh7NOnT7nPL126xPr5+bFyuZyVyWSsh4cHGxUVpVVGVVUVa2Zmxl64cOGldaSr8ytpjGHZ37QHEoN169YtTJ48GXv37tVq4nwd7Xlm1T8C1kBnVm2wZ88eODs7w9bWFikpKZg9ezb8/Pza9YRTb3pm1dY6duwYFi5ciNTUVIM/N2zZsgWxsbH4+eefX7qOLs+vRBvdNUOaJBAIdHqLLnmzGIZ5Y7fo6kNJSQlWrFiBkpIS2NjYYPz48VizZg3fYekVIxDo5RZdfRk5ciRu376NwsLCNjtZXHOJRCJER0fzHcYfFiUihJA2Z9GiRVi0aBHfYZBXaC8PFWyYRZbww7Db0wghhBBi0CgRIYQQQghvKBFpRxoGjDU1syghhJDX13BeNfSBuW0R1Wg70vAciJdNd0wIIeT1NJxXDekRAoaCBqu2I3K5HGPHjuVGfyuVSohE+plFkxBC/gjq6uqQnJyM6OhojB07FjKZjO+Q2h2aR6Sd0Wg0iIiIQGxsLN+hEEJIuzF27FgsWbKEumb0gBKRdqq8vBzFxcVt8vkahBBiKAQCAWxsbKglRI8oESGEEEIIb6iNiRBCCCG8oUSEEEIIIbyhRIQQQgghvKFEhBBCCCG8oUSEEEIIIbyhRIQQQgghvKFEhBBCCCG8+X+V9mljpEqBywAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAG9CAYAAAAobB0hAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAACP6UlEQVR4nOz9e5Dk2XXYd37vvb9XZlZmVlV3dff0Y2YwAwxBEARIASQk0lKAkkIUpbVEhZYhr2mvwFUgJJu0KUMbNhB2mOJaIVhBhkO2Q2E9IkRpI0yL61CQ9JILBblikJZFUgJBiRKWBAbAvPox3V3dVZXv3+M+9o+bmV1VXf2crq6q7vOJqJmuqqzMrMfvl+d37rnnqBBCQAghhBDiCOijfgJCCCGEeH5JICKEEEKIIyOBiBBCCCGOjAQiQgghhDgyEogIIYQQ4shIICKEEEKIIyOBiBBCCCGOjAQiQgghhDgyEogIIYQQ4shIICKEEEKII3PogcjVq1f5D/6D/4BTp07RarX41m/9Vn7rt37rsB9WCCGEECdAcph3vr29zXd/93fzPd/zPXzhC19gY2ODr33ta6ytrT3U13vvuXbtGt1uF6XUYT5VIYQQQjwhIQRGoxHnz59H6/vnPNRhDr377Gc/yz//5/+cf/bP/tljff2VK1e4dOnSE35WQgghhHgaLl++zMWLF+97m0MNRD70oQ/xvd/7vVy5coVf+7Vf48KFC/zH//F/zKc//ekDb19VFVVVLd8fDAa8+OKLXL58mV6vd1hPUwghhBBP0HA45NKlS+zs7NDv9+9720MNRIqiAOAzn/kMP/ADP8AXv/hFfvRHf5S//bf/Nn/+z//5u27/V//qX+XHf/zH7/r4YDCQQEQIIYQ4IYbDIf1+/6Fevw81EMmyjI9//OP8+q//+vJj/+l/+p/yxS9+kd/4jd+46/b7MyKLiEoCESGEEOLkeJRA5FB3zbzwwgt86EMf2vOxb/7mb+add9458PZ5ntPr9fa8CSGEEOLZdaiByHd/93fz1a9+dc/HXn/9dV566aXDfFghhBBCnBCHGoj8Z//Zf8Zv/uZv8tf/+l/n61//Oj/90z/N3/27f5cf/uEfPsyHFUIIIcQJcaiByHd8x3fwsz/7s/wv/8v/woc//GH+m//mv+Fv/s2/yQ/+4A8e5sMKIYQQ4oQ41GLV9+pRil2EEEIIcTwcm2JVIYQQQoj7kUBECCGEEEdGAhEhhBBCHJlDHXonhHg+hBCYlA3WeRKj6RTpnkGVD/q8EOL5JYGIEOI9GUxKrm6OGExLvA9orei3Cy5sdOl3igd+XgjxfJNARAjx2AaTktev3KaqHe0iwRiNc56t0YxJVXP+VJdrt0f3/PxrF0/Ra+eSLRHiOSaBiBDisYQQuLoZg4xeJ1sGDzox9Dqawbji9cu3SRNNr5Pf9fnhpOYb17bIk4ThrHrobIks8wjxbJFARAjxWCZlw2Ba0i6SuwIBpRRZqtkel5xZ6xz4eWMU126PWClyep3swGzJ/mBElnmEePbIrhkhxGOxzuN9wJi9p5EQAnXjqK3HeX/wSSbAtGywLtAuEtLEoJUiTQy9TkZVO65ujtjdb3GxDLQ1mpGnhpV2Rp4atkYzXr9ym8GkPNxvWAhxKCQQEUI8lsRotFY455cfq2rLrcGUmzsTtoczrPNsjUuq2u752to6ytqSGo3Re09DSinaRcJgWjIpG+DuZaCHCVyEECeDBCJCiMfSKVL67YJpaQkhUNWW28MZs9qitUIpyFJDVTfcGkz3BCPOe6xzpKnGeU/duD1BhDEa7wN2HuQ8aBlof+AihDg5pEZECPFYlFJc2OgyqWoG44qyWRSQKqz1ZGlCt5UxmtWUtY31IqsdnPcMJxXOB8rKsWmnaKXIU0OvnZNnCc55tFYk82Wfey0DLRij8ZVdBi5CiJNDAhEhxGPrdwpeu3iKN6/tsDWaoYCAIk8SVldyijwlSw3b4xmzyrIzLlEKfAikiQEgSzQhwKy2NNaz3iuoas96t0WnSIG9y0B6/nW77Q9chBAnhxy1Qoj3pN8pONVvYbRGKXAuUDUx6CirhjxL2Fjt0O/kvO/cKr1WQStLObfWIUsMdROzGJnRVI3l+u0JWsGF093lMsz+ZaDdQghMS0u/XSwDFyHEySEZESHEezKYlFzeHNI4B8TAwDooG8ukajjT76B1DChmlWUwLcmzhCJLOdVTDKc106qmahzeeVCKsrFcvTUiEEiMwTrPWq9gUtYMJ/We5mjT0pJnhgsbXeknIsQJJIGIEOKx3dnN0qCUorGONNFxiSYoGuu5dntEjA8Um4MJIcTlmG47jzUhqWEw8XH3TQAIzCrLlVsDLt8a0MoSjI5LM3mS0C5UDFqqWBS73m1JHxEhTjAJRIQQD23R1bSxjsZ6Gue4NZxgvUcp0Aqs9WijIIDzu5dR7vx7VjusK5f35ZxHKUUgEAJMyhqIRahaKc6sFjjvmZYNWap5+dwqrSyVzqpCPAMkEBFCPJRFV9NbwwnjWdwhoxTUjcX7gNJ3gg/rD+7nEYtZo8Z5rPeEQFy6CWH+73gjF0D5QFk3zKoGYzRFbpiVlu1hybmXViQAEeIZIIGIEOKBFl1Nx7OasrJ478mSWFzauHlo4cKyf8i9+ort//DidiEEmAckiVZYH9AEnI/LNLcGU5RWaKVIjOLWcMKk7LPSymT2jBAnnAQiQoj7ulMHEjMfPgSKPMG52Mp9N+8DirhEc4+kyJ6syJ3HiFt0k3lRawh+GaT4EHuW5InBh0DdeMq6Ymdc4ryX2TNCnHASiAgh7mvR1TRJDKNZTZponItZiIOCjcC9MyKLz+8PRhRgdOwvYn24+37VfFCeUgQTKBvPlVsDtFLUjd+zi+Z+Q/OEEMeP9BERQtzXoqtpLOMIaKWY1Q3WubsyGw/roK+rGk9t42PtpogzbKzzBEIclJcn7IxKpqWV2TNCnHASiAgh7mvR1XSxRFI1bj4b5sk9xv3uSmtFCIGytlS1m9eBZFTWkWdaZs8IccJJICKEuK9FV1NrHVmiKWuLX+xueQISHWtK7kUrBSga58lSw6leC6UUCkgOaPcOdw/NE0IcXxKICCHuazHcLs8SQoh1HAD+PbzGLwIPraA7b80ew434Bou6EUUrT+i0UorUsNop4kTfxsXlmHvcv8yeEeLkkKNUCPFAi+F2a91WXKpR6pHqQ3YHGEar5bJOYvS8JfydqGbRGE3NC1SdD8teI3Fyb02nSDm7usKscjJ7RogTTnbNCCEeyiIYmVYNznvG05pZ3eD8/Ipm15Zdo2JDsoXdoYJWoI2icTEAmZQ1SqllLciiD4kPcdkmSzVl5cgSQ4BlS3eA16/cltkzQpxwEogIIR7aSivjVLfN1mjG2fUOmztTRtMaD6hd0cYiINm9TVcRMxyLLMfidt4FMgOp0dTWsbuso2k8JZb+SsH7L6yzutLa07DstYun7vQRkdkzQpxIEogIIR7aol5kUtVUtWO9W2Cdp7EWu6u32UG9QiAumzQ2kCYaoxW1jVFH4wMJ/q6dOAGonWdcNkxLy+oKd3VS/eCLp5hWVjqrCnFCSSAihHgkiyWaq5sjdiYztFL4EFuvp4mmahzBB/bXsi5iDKOhyOKpp7H1YuAu1oU9gUsyX+/RGqZlw7998wbv3NzBaI0xajmRd9FJdXVFMiBCnEQSiAghHlm/U9Br54xnNV9+6yY3dyYYpTBGxWm683TI7gLVxXKMUjGIUAqMUcu28G7Xck5q7mzZBUWiwYXAcFqhlSJPE073WxijpZOqECec7JoRQjyWRb0HwNnVDu0iw3uWu1wSo8gSvew3slgtCWGxMyYGJInRtPIErWK2JE01PsQlmUDcLuxCnGPjvKfIE3wIjGYNqZFOqkKcdJIREUI8tkX795V2RqfImJQ1ZW0JBNJ5Dw8d4v+9X+yICfN27ZCnsSFZY30MakLAubBs866JQYsPYbmEo1CkiaJqLLV1ZKnZ00l1pZU9/R+EEOKxSSAihHhsi/bvznosMRuSJHqevQjzXiDEybnzyb3MA4tWltDOU5wPbI/KGIQEUPMtvIrYO2T3FL3Ff7XWMQiaf9wYjZ8XrAohThYJRIQQj61TpOSp4frWBIjBR2M9RscAxPmA0QqlFf2VjLL2OO9p5yk+BLZGJbWNTcmSROOamCnRgCdmT0JQy94iQAxkfIi9R+brPdJJVYiTSwIRIcRjG04rZpXFeY9SkCWxqVhjHVorijRhrduiyBISo9galljnmVYW5zxKAwSM1qDA6LgssyhcXcy0UXF9BqVicav3gVaexiZn806q692WdFIV4gSSQEQI8VhCCPMCUTi33mE0rakaFzuc+kAIAWM0nVaK94HRtKHTSjFaMZo1tLOEwaSiVo48M8sAJktjVqOxd/qKpMagU0XdOMraUaQJ3VZK45x0UhXihJNARAjxWCZlw2Ba0i4S0sSQpwmNjXUb1jpGs5qydgzGFVlqWO+2WOsVvHV9h147IwSw3pMmGoXCmLiDxjrPSiullalYjJokOO+wLlBkCa0sIUk0tfVoH6STqhAnnAQiQojHstgxY+Z1GUopsvkuGLKEdpGxMy555YU11ubLJoNJtfyauom1IXq+v1cRl3ImrqG2njw1GK3od3Iq60iN5pXza5xd7UgnVSGeIRKICCEey3LHjPPoxNz1eec9WWpY67aWW2r3fM28D4mfF7RC3CWTZ4Y8MdTWY33Aes+ZfmdP1kO26Arx7JBARAjxWDpFSr9dsDWa0evoPVmJexWQ7vmadkaeJsyqBp3FbbqNiztq1rsFO+Oabjvjmy6eotPKJOshxDNKAhEhxGPZPQBvOKlpF0ksVHX+ngWke75mWtPKE+rGMistSkNiDK08ZTyzdFop77+wzko7P8LvUjzL9g9QlGW+oyGBiBDise0egDeYlvjKorW6bwHp/q+JA/AsASjmNSZSgCoO22BS3vm79WHPAEX5u3u6JBARQrwniwF4j3JledfXaEVg3plVrkzFIRtMSl6/cpuqdnsyeTJA8WhIICKEeM+UUo9cQPo4XyPEe7Xof1PVjl7nTu2RTgy9jmY4qbm6OaLXziUYfkokEBFLsl4qhDiuntT5aXf/m/1fr5SSAYpHQAIRAch6qRDi+HqS56f9/W/2kwGKT58EIkLWS4UQx9aTPj89sP+NDFB86uQn/Zzbv16aJgatFGli6HUyqtrN54mEB9+ZEEI8QYdxflr0spmW9q6vW/S/6bcLGaD4FD21QOS//W//W5RS/OW//Jef1kOKh/Ao66VCCPE0Hcb5adHLJs8Mw0lNYx0+BBrrGE5qGaB4BJ7K0swXv/hF/s7f+Tt85CMfeRoPJx7Bw6yXusoynFZSxCqEeKoOq57jcfrfiMNz6IHIeDzmB3/wB/l7f+/v8df+2l877IcTj+hB66XTsmE8rfjGtS20UlLEKoR4ag6znuNx+t+Iw3HoSzM//MM/zJ/8k3+SP/pH/+hhP5R4DPdbLy2rhls7U6wLGA1ZZsgSzdZoxutXbjOYlA/1GCEExrOanXHJeFZLvYkQ4i77zxPeewiBLIlLKN77u27/Xus5Fr1sVlcKVmSe0ZE51IzIP/pH/4jf/u3f5otf/OJD3b6qKqqqWr4/HA4P66mJuXvNC7HW8e7WmLrxZKlme1ShdU2eGrrtO0ViD2r6I9uChRAPsv884bzHuYAxCucDk7JmWjWsdQvaRXrfeUbi5Dm0jMjly5f50R/9Uf7n//l/pige7gXn85//PP1+f/l26dKlw3p6YpfFeul6t0XVOLaHM67dGjOrLD54rItvBJjVlq1hiTHqgUVii213W6MZeWpYaWfkqXnkjIoQ4tm1/zyRJZrJrGE4rZiUDZ1WyuleG4DbwxnbwxlV41jvtqS1wDNChUPKk//cz/0cf+bP/BmMubOu55xDKYXWmqqq9nwODs6IXLp0icFgQK/XO4ynKXYJIXB9e8w3rm5xezijaixZokGpmM1QiiJLcCFQpAl5avjw+86yunL3iSCEwO+9fWs+Ij67a0T8cFKz3m3xzS+dlqsZIZ5Td50nUGwOpsyqhizV1NbTyhJO99sQYGtc0m1lfODCOloprMwmOraGwyH9fv+hXr8PbWnmj/yRP8K//bf/ds/HfuiHfogPfvCD/Bf/xX9xVxACkOc5eS4jvx/WYbRk3x6WsSbEKBIfgxCtFMrEgrHaeorMUNaWLDX3LBKTNspCiAfZf56oG0fVWNJEo5UmNVA1jsZ60kTTygw74xlfefsWpbXLIYnrKy0unulJduSEOrRApNvt8uEPf3jPxzqdDqdOnbrr4+LRHUbtxeKkkGeaSakwOq7PKq0IBJRSWOfwXtM4TztPaecJ41l9VzC0f9tdCIHGenyImRVjYpZF2igL8fzaf57wIRBCQOv4vlYKGzxlbRlMLNOyYVo1bI1KjFYkxmC0YjSt2ZmUfPh9Z5bnvxACk1nNcFYD0G1lUpB6TEmL9xPosFqyL04KWWbQWpGphFndUDWWABAgAN7XZKlhdaXgK+/cPjAY2r3trvGB4bSiatwyEEmMIk8TaaMsxHNs//ZcrRRqvhRstMKHgA+BwaQkBKitwwdIFIQA3ntSY/AhsDWa8Y2r23zb+89yfXvC167cYmdc4bxHKUWeGs6tdXn1wppkTo6ZpxqI/Oqv/urTfLhn0mGOsF6cFDSQp4bRNGY6/L4qolhVpHjnxgClFb1WhjaKqrbc2B6zM5nxoRc36LcLbmyPqa3Dek9qNKnSuPm2uxDiCUgI8XxatA+INSKaLDHkacKsalApNNYTPHgdSHTMxGoFaRIvYJyH2nryVGNt4MqtAaNZyebOFLvrxKUVOO+5vDlgVjd7Mifi6ElG5IQ5zNqL3SeFPDHcnl99LB4lEP8dCJR1Q20trSyhbhwQcD7g59mPf1Vf5/0X1rh8y1HWlmKeZfE+YK0nTw1porl6a0Svky+/N2ksJMTJ8KRq1NZ6BdvjGVujkl4ro9tKqWrLtLQxY6og0ZqqcRAgSfSdx1Gxbs35mM21PjCt7F2P4QMEFyA4tkYzrtwc0ns5XqwdRq2deDQSiJwwD2x5rDV1U7M9mgE80kG16CkynlVc357g5lcUuxMiiyUapVikRhhN406ndp6Sp3HNdjirePP6gERr2kWKdR7XxF1TrTyl187QOm4Bvr49ZntYSq8RIU6IJ1Gjtvs+GuuY1ZayaiiyhE4rpXBxh95kVuN1IEsNtXXo+fnM+4BzcenGEJdx7icA1gdUY7k9mjIpG5z30ufoGJBA5IS5X8vjqrZsj0tmleWNd7fJbg0PPKgedAUQAlTN3VcVy8/Pb0OIFe0QC8wa5+NOmkTjfCwws85xut9eFqUuUq+oWJhWTSreuLZNCDzRehchxOF4EjVq+++jlScUVcN42oCC919Y59zaCrcGM37vnc1lN+Zp2WC9J1F6vmwc7tSuPWQjisYFplXDzrjk3a3RE6+1E49OApETZv+a6iKAqGrLrcGUqnG0i5R+J6ds3Lxmo+RbXtqgv1Lc90oG4PUrt5mUNQ+TQ/HENVylYjalcQ7nY3CktSZNNKNpxY2tCdrEbcB5aui1Ic8S7PwqCKVY7xZPtN5FCPHkPYkatf33UTeOnXEZi9l9YFI1fPWd23TyjNpahpMqnidCYH79g3NuT6a2sY9Wa1ZVjss3d6isZ32lQGk59xwlCUROmANbsmvN9vxALrKETp6yNSqpGov3nuGs4re/ZvnAxXXe3RrvuQKw1nFzZ8LWaEZqdMyEKPYUet1PAFSI2Q3vA7W1KGIQUlYNYV71XiSGMO/M2ljPeq9gMrMooHfAljrpNSLE8fMkatR230fdOG4PZ3eK2Y1eLu1+8fVrTGY1tY3Z2UUQAnuXix/HrLFc356QJQbvA712Rp4lj/R9iCdHApETaP8I67qpmVWWdpHSyVNG874eaaJJkwRtPYNJyb998yZFlnCq10KpuMtlOK0oa0vVuOVSTSw+fXi7r0WmpZ0PoFJYFyjmB3dZufh8jKa2js2d2fLgN8mTHfEthDgcD6xRe4hj1jqPcx5rFDujkto68uUumIDSoAjsjGc4F9BqbxDypPgQSIxiVjU01nGq11oGI3LuebokEDmhdo+w3h7NeOPdbfqdnK1RiXWePDOo+QJLajRWa8o6VqErYhCy+0rEeU/VBLx37+mADwGsC/hgCQQIClSgcYHKOsy8mVmSwIWNHte3xocy4lsI8eTdr0YNHu6YndVNvFgae6omZkXrJp6rFkkW58OyWN4d0rDuxnrGZcPKvJh+OK3ZmNevybnn6ZKf8gm2GGG91m2RpYZyV3tktavKw4d4lWG0orHxNsNphfWePInbanfvkHkvK6KBuPwyKRvqJgY1eZrQa2d0ipTEaIzWFFnCaqeg3y7mPUX2nm2exIhvIcSTtahRe9xjdjApuXJzOG9GFiDEgKOZD9b0Pm7H9Q9befoeNdYzmdVoragaS22dnHuOgAQiz4DFyWFWNngfI/mFxW6WPIlzYWIdh6NqHKnRyy6Gcahd/JonVZrlfCBPY+ChlCJLDK0iwftAWVtSo7mw0SXPDMNJTWNj59XGOoaTWkZ8C3HMLGrUHueYXRapNo6NfovEKKy/s9slbq99Ct8DYFT8vyLWw1WNxXlPLeeeIyGByDPgzskhwfo40yWEEJdbrCMxmtWVgjSJXU3DvLh0uR9/flWSGI1We2s+3osATCu7fC6LtWF2rfku6l3Wuy2qxjGe1jLiW4hj7HGP2d1FqkWeHkkRqIJ4oaYUSjGfV6OxNp6frPNy7jkCUiPyjOh3Cr7lpQ1++2tx2WWxNbeVJfTaeezvMTO0srg8QwAXAmo+jE6ruIQClvo9XpbE7qvM134dw1ACsYNhABJjaGV6uRy0u95FuhsKcfw9zjG7u9B1sfyxOFfsd2gLM/OnF0K8gNNa0coTytpxZrXDR953lo4MxnvqJBB5hvRXCr79A+f43bc3l1t08ywuhQwnNSvtjBfWV9jcnjKeNUzLhizR5InBe89svt32cS1OKot7UCqmXa2LA6yUUiQ6XomUjWNWN6xSzG+rZJucECfIox6zuwtdfQjM6oZAnAPzlEpCltlgpeJkX6PVfIeh4X0vrLHSzp/OExF7yNLMM2Z1pcW3vHyGs2sr+ACTWbNMm54/1WU4qSmtpcgTzLxItZ63XldP8ISg1bIDPK08YWU+grtdpGgVm5ttDWfvKfARQpwcuwtdq9rh5nVp+885h52LWDRgRCm01hhtOL/e5dzayiE/srgXyYg8gw5Kmzrnef3qnZbK7SJlOku5vj2mco4iM6RJgg+WOMDu0R93944bM5+UqYhXQsbEoti6iVcfvU7OcFpJwyAhnhNKKc6fXmFzEBsountc9Rz2pYlSsNLK6BQJ3sfmZa9eWJPlmCMkgcgzanfaNITA77196662zFlqyJLYyTBPEzpFyvaoxGjNpKxxPjzySWERjCxSn1mqCYFl1mUx8C5NDeNpLQ2DhHhODCYlb7y7zWBS0jxi08T3Kk3ihZACtNEEH0iMYbXXkgF3x4AEIs+Be7VljmulijxbDJBiHkWE+da6x782SYwmMbDR75ClZrlLJ0sMKGisk4ZBQjwnBpOS1y/fZnMwRSnF6krOqGweeUbM41CARhEU9NsF3XZGbR3vP3+KjdW2ZEKOAQlEngP3asusVSwgdS729Yg9RmJTIWMe/uBU8yrVReFZYjTdVk6vnTOr5w3W9kz3jRXz692WNAwS4hm36B8yqRogkKcGozUrhWJc1ncFI2rXVpoAGGIWo3nM7GmiY0+jJDGsdQvS1OCmgSw1EoQcE3I5+hzYXa2+W5bEE8K0bOaBiqI9L2K19t7ZEKXu1IIogABGQ5Zozq6tcOF0bHh0/p6NjyqMVvQ6sY5FClaFeHYtMrJ5upjMHc8eidG0s7uvhROtYnNFFW+rE02eGfQBMcODwog4p0aRJJozq23yLJH27ceQZESeA4tq9a3RjF5nf3bCx6uO+U4WYzQFKeNZtadqTM3/Exb7c1WMYhMTa0ASo+m2M9a7LTyB8bSmlaV7hvP5KnYvdC7gTeDtGztc3lT024Ws0wrxjFpkZPN5BsL7uJ0fQGtNouenFBU7HSZGUwePCoHUKEKA1BiyxFPuqy3JUkMIIdaa7aqWDwG0hiJNKLKU1W5BkSWSjT2mJBB5Diw6r06qmuGkpl0kGKOZlQ1l42jnKWliYpAwLyot0tjkJxBIkxhsLNohOx/XYZRaNAaCLNW08pSysXuuOFZa2XIHz854xjs3BygCnVaKme/m2RrNmFS1dDMU4hm0yMhqFYviZ1WDzhSK2DJA63h+SRMdGzB2crbHM0bTetl4TKnYBqC2brndV8Eym5omGq00RWbI04QiS2KnVB/otTJMommsY1paad9+DEkg8pxYtGXen51IjWa1U8wDkbDsLbI1mtJpaWaVXR6wIQTMvAX8snGZglaWYrRiZ1zi/eLgz7EuXr0opegUKe/cGOA99Ffy5X3qxNDraIaTmqubI3rtXE4QQjxDdmdku600Dt6s3Xw45/xcEgJZYljrtsizBK0VZe2pG0eWxiDGE7s/1zb2IFGK5WgKo1XMmqQJnVbGaxdPAdw539UWrRXrXdklcxxJIPIc2d9fZHs84ytvb7I9LpdNfvI0Wa7lGqPIM0OeGCrr5nUeijSJ6dIsNaTmzvbcWAyryJK49vq1q1vLLMe9du5AfNx2kTCYltJXRIhnzO6MbGwhkDMtG8raxq6mRpNmmjyNAYgPAaM1nTyNS8cBynkg0S5S2iHO1NJK0djYpTU1mk4r5XSvsyfQkNERJ4MEIs+ZRX+RwaRkc2dCQBHwZIkhBJhV8QThvMcHRTtPOdVrYV0cXOd9WA7Je+WFNV6/fJvhtFoWfrXyO7Ntdmc57rVzZ8EYja+s9BUR4hm0PyObZwlZamjnKedPd2llCddujZfZWq0VF0736HUybu3MGExLIC7BrHZanD+9QmIMjXU01pMmmjQxdwUaMjriZJBA5Dm02E5XN56N1RZbw5LaxiuTLNWUlcP6QKIV3XaG1ppMA8TCsOGk5lS3Tb+dk2eGc0UHrTVaqT1bdXdnOXbv3NGJues5SSW7EM+2Bw3KW2RO93/uwumeZDWecRKIPId2L5OkieFUTzGcVlSNI4Q4tTfThpVWSlV7jHbLwtLdxV7WB0KAIo/zY/bbneXod/L77NyRSnYhngf3y1Dc63OS1Xj2SSDyHNq/TJJnCadTs1xvRUFVWV4+t8pwUu9Jl+4u9hrP6ofOctxr587+4EaudIQQ4vkigchz6KBlEqUU2bxItbExA7K60rpvWvT+/UnuznIctHNHKtmFEOL5JoHIc+hRAogHpVIfNcvxoHViIYQQzxcJRJ5DT3KZ5HGyHLLmK4QQYkECkefUk1wmkSyHEEKIxyWByHPsSQYQkuUQQgjxOCQQec5JACGEEOIoSfcoIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQONRD5/Oc/z3d8x3fQ7XY5c+YM3//9389Xv/rVw3xIIYQQQpwghxqI/Nqv/Ro//MM/zG/+5m/yy7/8yzRNwx/7Y3+MyWRymA8rhBBCiBNChRDC03qwzc1Nzpw5w6/92q/xh/7QH3rg7YfDIf1+n8FgQK/XewrPUAghhBDv1aO8fidP6TkBMBgMAFhfXz/w81VVUVXV8v3hcPhUnpcQQgghjsZTK1b13vOX//Jf5ru/+7v58Ic/fOBtPv/5z9Pv95dvly5delpPTwghhBBH4KktzfxH/9F/xBe+8AX+j//j/+DixYsH3uagjMilS5dkaUYIIYQ4QY7d0syP/MiP8Au/8Av87//7/37PIAQgz3PyPH8aT0kIIYQQx8ChBiIhBP6T/+Q/4Wd/9mf51V/9Vd73vvcd5sMJIYQQ4oQ51EDkh3/4h/npn/5pfv7nf55ut8v169cB6Pf7tFqtw3xoIYQQQpwAh1ojopQ68OM/9VM/xac+9akHfr1s3xVCCCFOnmNTI/IUW5QIIYQQ4gSSWTNCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY6MBCJCCCGEODISiAghhBDiyEggIoQQQogjI4GIEEIIIY5MctRPQAjx/HHWcv13v8Z0e0B7rc+5D30Ak8jpSIjnkRz5T0gIgUnZYJ0nMZpOkaKUOuqnJcSR2x90lIMR7/zCL6HfvU5aVwSleH39FC/+e3+GV//gdx710xVCPGUSiDwBg0nJ1c0Rg2mJ9wGtFf12wYWNLv1OcdRPT4gj88avf4m3f/4L6Bs3MNaivKdVl6wohQJy70hCIHl3yuC//1t88Wtv8h3/tz931E9bCPEUSSDyHg0mJa9fvs2kashTQ54atFJsjWZMqprXLp6i3ykkYyKeK85afuf/9f9m8oVfJm8aqrU1fJaxcu0Kp12DBqqgmWiY6liqtuocg//PF/jGB17m1T/4ieX9yBKOEM82OaLfgxAC37i2xeZgCgSmJSilyNOEbiulqh1XN0eEELh2aywZE/FMWwQNl3/jt5j9iy+xsXOLtQAjDea2xxrDaWtpaUUKtAl0Q6DxgVlQTBR0neXtn/k5Xv4DH+Ptf/k7e7IpLkl4/exZXvx3v5fsxYvMrl2noxVnX3wBc2YDpaX2XoiTSIUQwlE/iXsZDof0+30GgwG9Xu+on85d3t0a8aXXrxECMROiFd4HGhuzHr1OTmMdidF4D+0iwRiNc55packzs8yYCHGSfeOf/Qve+Zmfo3/9Gme9JQW0CpReUaJwOqXA08ZiFCjAqZTbyRlWm02SUGMDjIPiRtGm+FN/guGv/DOS2ZSm14Msg7qGrW1qa6mKNl0VyDW0ul3OfvRDnP2+P4K5eOGofxRCCB7t9VsyIo8pZjlGWBfoFAlaxasxoxU6U1S1Y1rWVI2jlaWc6reWSzE6MfQ6muGk5urmiF47l2UacWJ98e//DOYXv8BL3tLVAaOgCTHYSFSgAJwyKJVgvF32DEhDQ8ePyEMDCoyCNASGdcnOr/5z0tmUZuNOpmOKYtxYPuJqXOXYOneecVDcHo6pf+O30Ds7bPxf/qwEI+JEkWV7CUQe26RsmFYNqdGEQDzrzikUaaKZlhYfPL12TtU4tIofV0qhlKJdJAymJZOyYaWVHdn3IsTjcNbyr//Rz5P/wi/QCwFPQBGDEKNAKQ0oVPCshBnOg9LQ6BwCJKEhCxVexYZGAdAKznmLu32TSW8NgKleJ/FTtrZu8JK3qETjvMdaR9Zuk55e4/LtAe03L7P6L34Lff4FWaYRJ8JBGx167Zz1XotWlj43gYkEIo/JOo8CiiyhbCx6HlyEEPAh4LynshZC/GNjqtBKkaeGXjsnz+Iyja8s1vmj/naEeKDdhaOja9fZ+dLvsPHG1zhLoFLQVorGFCS+QWEBRZWt06lvo1VYLskk3tLoHBVqdrJznKreRYcaT7w6zJXiom/Y3rnFZLTD+PQKLrHoqmItUUyBhJRRfZbe5E1oFXRXWlyZlZz/+htkt26jzmwc7Q9LiAcYTEpev3KbqnbLZftZ2fD2jQFvXt9hpUjJs+SB9YTPQkZFApHHlBiNMZq20VjnqaxDA43z2PmbD7C4MMsTgw+BWW1prOdUr4XWCq0ViZGrN3G87d6Gm81mrDYlRYAsQDVfVnGmRW06ZG6T0nSwKmW1uQWKGGQQMx8Gh/FTAE5XV7E6p1IFHTtEzUOYTCn6IdBzNWvX/w1vFm0mwZN5T8d7OtqSTN+lV+1Qj2DLGN5NW9iyIszKI/xJCfFgIQSubo6oakevk6GUoqotg0mF9x4UOB/IEn3XDszdDsyotHLW+ycroyKByCPYE3nOf+Hb45L1XsH2qGQ8q/EhLFdpknmgUdYOYzSpiVt7K+sYTirSxHCq16ZTpEf6fQlxP2/8+pd456d+OtZsdLvoyRiIQUVXexKlSBQ0eNrNJijo+Alh13qlCnDQuTAJlsRZPDreWsWTkg/x/mdBsaI8r5RjrNN0tKelwaHoVO/SBMgInHWe3DrqyRTVkuJvcXzsf90IwGhWc3s0pZPPg4QAw2mNdZ4iT/A+UFsHKHqd7MB6wkVGpawsaaIIBGal5dbOlDevb9NpZRQPkVE5DiQQeUgHRZ55aoDAeNrgvMcYTa4V1gXA0S4yFPN6krKhU4AxGq0Uo1nNxmqbCxvdYx+tiueXs5a3f/4Ly8JRypKWd8xMQu4siTboEJcWMx8zEVOzgiZQuAkBcMsgRBFPw1DrghAgCRUAhjvLk4pYK9JSUAA2KPoKPpp4CgV6fryszA8bG+KS6Gnl2bqxyde//Dqd07ek54g4crtfN8raUtWWQMyoT6uGunH0OzG4qBobawhRKBWwzjOrLZDQys2eesJFRmU8ramtYzpsaKxb1isapUgTQ7+d3zejclzIUfoQDlrLc84zGFeUdazxmDUNCvDGUKQJWsfCVAJkiaGydp4Z8WilyBLDi2f6x/YPQzw/dtd+FL0VCIFyNKG91sdbi75xg6bXQ2lNsA4VAso7TqvAJF2n2+ygcMv7y/1sGZxAXLYZmx4GT9vFbIoLhiudV1mtN5mZLmert8l9DErGSY/t7AW6zW1Wm1skCgjQNjGM8QFmaY8rrW/iwuyrtJshaIUK0BmNuPp3/wHvFgWvb2zQ/30foXv+nDRDE0/d7teNxMSll8Z5lGKeOVdMq5gpWQQXWut5ANLgXGB7NGNkNFliMFot6wknZcOt4YRJVVPWDuc8yz4cAWwIDKcVvU5+z4zKcfJcHpWPUtxz0FoeQDNPndXWkaWalkoxWuF8IBBQinkE7LDzP5LgLSmxWLWVp6yutJ7idy3E3fbXfrRtDcA0yahbLXxeUMxmlKurANi0Q5Wsct7eptCKVnM7fpwErzRWJWigCLEGZHFUddxwz6jvVphwfvJ1htlpzpZvkocGq+ISZceOYrAzv63efUfzf6fNmCQvqclYiZltAEwIVJ0O2jlOf+NrJN94nVtpQd1q8frZs7z0p7+PV77rY0/2hyjEPnteN9oZt4YznA+08gQClI1dZv4a65iWDUopGuspqwbrA4lRJDpG4dOqRinNrG5YpaCxjvG0pqodzu8KQoiHSiDWmOyMZqwU2bHfofncBSKDScmVzSFbo9kyEFnvtri40TswOzEpGwbTknaRLIOQEAKDSUVtHWmisDYsi06zNPYQaayjbmL0qua7BYyJf2hbo5ILeSq1IeJQPGxb9N21Hy5JaNmabF4o521NTYt8sMNqU3JzexvfbpPUNaebLdpG7YkNpmaFKumwWt/ckw1ZWGzPBUWlCxqds2J38HZAFhoArEpoVMZ2cYFLk99DERil68xMhzPlZQA8hsudD3J28jq9+jpj06Xf3EIrsEBQhnQ2o1OVy8BHe4dvtUivXuWdn/ppAAlGxKHa/brROL9n2QUVs+S1dWilcdz5/HjW4H2YZ01gVtu43BICaQpbgxnn1lZorKd2Huc9BDAalpsvVazJCsC0stTWkSTHe4fmcxWIDCYlX37zJoNJRWwoGxfURtOanXHJh9935q5gxDqP9wGza2fLZFYznMb7aADnPel8V0wrS0iNZlJ6drcXWWzv1Spu7x3PakIIxzJNJk6u/UPmFm3R92cC9tR+nD5N+9o10hAYJSkKaDtLMZ0yeeEFwjtv8dJwm+lohy6BrmH5Im9VilUJPbdDFWYEpTGh2fO3v+CVYZSs029ukfoaqxJafoKa388oWadWCbezFzDecrp8m8SVGHXnCs7g6NktTPD0m220iVeWhHiBcL14ER+mZGHKxCQkeFreMQWajQ3SzU3e+d++wEvf+VFZphGHZvfrRt245bLLglaxnUOvk1PWlklZAwrv57st58GE8yHWWAGhcbx1Y5v1fosiTZbBit5/oIU7GULnY5CjVHKsd2gez2d1CBZzYbZGM7z3pIkmzxLSROO9Z2s04xvXttjf8T4xGq0Vbh5JVrVle1TGPiJqUVinCAFq65jNI1A/j1K1UqDUslgvTQztPGVWWW4NZk/95yCeXcsMx9Wr+Hab5vRpfLu9zAS88etfWt72+u9+bVn7QVWRe88w7QIap3JGaZuWazA7OwC0VGANf9eViw6WRmVxa24AFQKNyhkka5SmAxAzICrHqYTUz4BAo3Ompk9p2mynpxknfQJgvCP1JSZUXO18E8PsDNOkzyTpca31KgDr1bukypFjWXfbGEARqIMiqzfp2gmlMSgUk2Qdq1soa1Fa03S7qOs3uP67Xzvk34Z4nu1+3VhchHp/57XFzy9CiyxhdSVnrdvi3HonvpYQsxuNi0GIVpDMm/BMSstX3rnFrG5oZfFoDAHmicz4/q7n4QPsjEsGk4p+uzi2WfjnJhCZzGpubk8AKPIEo2OazGhNMV+3u7k9YTLPVIxnMUtCCPRaeeyS6j3DaYULATPfhtXM+4V4H3DOUzaWsrZADELyzNAtUlZaOZ0ipV2kZKkmhEDV2CP8iYhnySLDkSzaordaKGNQrRbNxgbJbMo7/9sXcDb+zU23Bxhr4wwX6zDe4lRcQNGhwaocHTwrk9F8WUVhgEztPWl4DNv5OaxK0VgUgVK3GKansfOwxauEiVlhptuAYmq6oBRpKMlcSRIaUl9SJx0GxTm+ZfDPOVe+zenqKtpVbGfnyN2Mwk2Xj6vmb7vrRxSg/JTUTWicA9fgVQtncoIx8UZ5jnaO6fbgEH8b4nnXKVL67YJp2RDmrxfVPDMSQqBxnjw1JEYxqxydPGMwjq8tqVZ7trr7EIMS62I2fXs046vv3GalSNHz+qjF234KqBrHrGrorxzPQlV4jpZmhrOayjqKNF4p7aZQGKMoa8vbNwdxT/esWlYiJ0bhvGdrWDKr4lpe4xxN45czMtw8pcbujEoIZIkhTcyex4uV03FKrxBPwu4Mh9KagJ43EPPLTICeZwIufOSbaa/1cUkSB8klhqAULTfGz8OOthuQBlj3lkJDopj3+gjL5ZjCz/DacLp8mzQ0OAyzpENAs17fJPcljUppuTE5E0qzQsuNqVQOwDRZRemAR9P125yZvckwO7M8OnvNbUrdYuBGJKFhvX73wO99caGZqoB2HkPgNIFMK5LybWbBYEYt6ixl1li8MbTX+of+OxHPL6UU/ZWcK7eG3B7FzHesG4z1Gnma0MpTRtOGLNWgwvwCVtH4g0KKO4zW7ExKzCIKuY/EaPrznTKDccX5U8ezHOC5eiWMqyN7fwnWearG0ViL84HXr94mTxI6RULVuLg918f21GliqBqLcYrg4x+bIuB84KC/ncbDpKxZaeXLtTkfPHXj6LVzTvUKxrP6RLfmFcfDIsPhs1hPUakOaaiAuAuGPEePx8tMwLkPfYDXz54lvXqV5vRpZiZlxdZM5kWobedYUZ62Vssq/Eq3sDpjxe5Q61hLZVVGadrkTRO3DaqYaezZAdvJaUChaUiCp+OGOGWYJV1K3aLwUywGgyNg0FhSX7GTbrDabHK1/RpFM6Bbb95VczJOVjHBUrgxSsVjzQM9PG2j5iluhQdKbykmYwrvMEozefFFzn3oA0/j1yKeU4NJybXbI1KjUXlCY+NIkNp6nPMkefxrXu+2WOsVfO3K7eXW3gcFF1Vj8fdoELibVnHi+6l+C+eD7Jo5DrqtjDQxNNZh0rhmZ51nVjXL7U9KxV4EjXXc2K5RalFkukiN+WXA4X3skmc9e4IQBaRGz+fNxHTatKrpFCnWBirrSI3mwukuX728tadB2knogCeOpz0ZjlaLIoz23qCq9mQCTJLw0p/+vlhTcusWZatNPm7o2jgJNw2QqxiELNapCz8FP8XOa0IALAYdHE6lTJM2jSpo2xGNyum6AcN0HR9yCrvJOOmRBEfuJ+S+jLtkTE633uR66xVSP6NjY7ACQHC823o/07THmSrumrnSfo2VZgerUxJfE9DcKi5wfvy7aBRFEp+znz/vWYjHo/IeVc5QnRVO/bt/XApVxaHZvXX3VD+2aGisX3bdHs1qeu2c1y6eYqWVsTOOXbkb59CKXR15DuYXk621onEHRy2xrkTjXGBWxVlo07Lh2u0hK62cXiuj08qOzYXvU6kR+Vt/62/x8ssvUxQFn/jEJ/iX//JfPo2H3WOllXF2dYUQ4pqZ856ytnH7k4orzlorskTPi03jqpvRi5NxmHdMDfNeIVC7cFcQYkxc5tFa0cpiZXNjPYNJzaSK2xTTxPDWjQE3dsbkqWGlnZGnhq3RjNev3I5D8oR4BOc+9AH82bOkwyHB792iF7wnHY0I587uyQS88l0f48Uf+vdpLlwABbM0o9YahyJVcQthHaBRCcPkFMNkHYiFoTp4GpWThAYdLIRA4aasNZukwTJM+ngUK80Oa80NDJ6WnVC4CSYEEl8SUMx0l0G6wZX2a7zT/ibebr9G6iochsqs4FRCCIHb+Xk8mkkSC1y79U3adkDuJvSa25h5DYsnbuMdq4x32h/Ez78+V4oqQHZ6nZc/8NLT+8WI587+lg9KKbLUUGRJHGLXyamtW37u3a0R41lsTHavwGI/pbhrY8VuPoBzgWllub495u0bO2wOpvzO16/zm797mX/2b9/mX3393WPzWnPogcjP/MzP8JnPfIYf+7Ef47d/+7f56Ec/yvd+7/dy8+bNw37oPZRSvHphjfVuaz5gKPb6gJjxyFNDajS1i5GrXgy+IF5ZLS4B9bz6eRGV7reoelbzQtUiNSRa0ykyzq2v8PLZPlrDrGqoahuzIfN2vL1ORlU7rm6O7vtHJsR+iwyHbbVJNzcJ0ynBOcJ0Srq5iW23efFPfd9dmYBXvutjfPeP/99Z+9N/AvXJPwT//r/H5P/0J9lG0wQYepiYLrmbkIa4zDMzK+Q+rnsrPEElVKZFrVo4lZKFkiw03M5fYJyuLjMcQWk2swuMdI9xsoYjZSc/S9du8b7R7wCKa+3X+J3172GcrtGxA16efJkz5WVutt7H1fY38YHhb3G6vMyVzod5Z+XDGByr9c0YNJk2XscCvkYXbBcv0ATNdlBsKs0UhVFKhuKJQ3VQy4fdjNF4HzPsV28N+frVrQOX9u8nBLAPaAni5pPgy9qxiG9ciBfGk7Lh8s3hvJ3F0R8Ph56f/O/+u/+OT3/60/zQD/0QAH/7b/9tfvEXf5G///f/Pp/97GcP++H36HcKPvy+M1y5OeTGzhjr4jbeVp7QzjO2RjOaOq51Kx3nV/h5lbPWcYuUmzebUeytSwWWExNDYFmgaucRS7+TUWTpMrNS5AnWeobTmo00iU1olDr2HfDE8bXoE7LoI6LHY7wx2IsXePFPHdxRdH/fkZ0kwa+soEwCrqatApndJlUsc8Zduw1ApRJK06Lb7ICCcbJG3myjgMJNUaFhYrpYUlBwI7sIviEzhq1kHadyWm4SW8Lj8WheGv8uWkHuJqw022gctWlxevgOo/wMlzsfZGa6hKA5XV/e872M0zV0sPTrG/TdiA8Ov0RL1+QBZvOTdpZnMhRPHKo9W3f3bVQA4se1wmh4/fJtnPfLOqzDtOjk6kN87WqsY2s048rm8Mhbvx9qIFLXNV/60pf43Oc+t/yY1po/+kf/KL/xG79xmA99T/1OQe/lnI2dDr/3ziZFZmjlKYo4iG46Xz6BO9XFi+xETIfF5ZciTZhWzf5NMrgQ0CYu8ZSNo5lPXNwelxhdL+cFtEyCSjRVY+dt4uMfrDHHuwOeON5e+a6P8dJ3fvSRO6s2vV4sdK1r0p0dCmdJCCTq4KK4SudoLDp4NA4dAv1mc5klNKGhUV12shdo+5KJ6lG4EV5ntOyQ3E7Zzs8wNavUKmOUrDEozvHB0W9RmxYBxZfXP8m3bP0qZdKl19zm/Owb3MovkPqKrfwFUl/veU4tO0L5BhXAEOj44XI2TRtPqjWh30WdPvXkf/BCzC227m6NZvQ6es8LfAiBaWlZ77aYlpbRrCbRmoqncL6f73mPfa/CvKW84/ZweuQXvocaiNy6dQvnHGfPnt3z8bNnz/KVr3zlrttXVUVVVcv3h8PhoTwvpRQbq21uDTpsjWa0ckBBt5UymlZ4At7DovVAWHS6U2ADQKBs3N0ZERa3j1MTrYuRbitPyeadV6tmvoVLa9JUzwtg79zRIlo+rh3wxPFnkoQLH/nm+95m/1TduOWX2HckyyjeHqH0YmfYHbXKGKanKdyEthuS+QqvDDvJGsY31LqgcGNS3zBK1zg//Tp5mKFVRlAJ3js28xdpuwkdO6FRBZc7H2KcrPLB7d+k32zyxd6f4FX3JbJ535CzszdI5wPx2nZIS2nW6pt7riCtSriVX2S9ehf87E7wpO4U9nmteOfmDme8x2g5vsThUEpxYaPLpKoZTuo4KFVrZlXDrLIUWcKF091ld+5H/VNcXBA/qhBiDZWeHzmLMoO6cUd+4XusjsbPf/7z9Pv95dulS5cO7bEWfyx5ZhhOahrraM0bjul5s7IQFhmQ+KtbrMlZx3KQHcQgZXebXevj57MkZlsSE6Nio3XsY6Lj0CM37yeid2VepqU91h3wxLNhf98Rj6FSPQIKypJcBVxQNB7GpkujMkoddwCkvqTjhtS6oFEZw2SdhgyPYqy67KSnGWSn6DVboKHWLa613o9Tho4dMMxO827rVd5a+RCbrUtUOmOcrnOqvhYLvoPldhGDigRH5qtlMNTogtvZOYbpaYbZBjvpBgC1bmNNRsuPl8fiMN3AqpxcK1yScr23RrW9I11VxaHrdwpeu3iK9W6L8azm2q0RN3emlLWlcY6rt0bz7twK96BtMnNm3mHVKPXYL9xh3v5dzfcJBzgWF76H+uinT5/GGMONGzf2fPzGjRucO3furtt/7nOfYzAYLN8uX758122epN1/LFXjmMyamFbr5KyuFKy04m4Wo9UyAjV3NfaPv1w1/yNJjSI18XZnVlt0iozG+vmvPC5NtdIEH2JFc2I0idE01jGc1OSZ4cJG99hsqxLPpj2dVQGNIwvTuCNmOiNnPqxRgVMmBubKkIaavt2i0i1mpksaalabTTpuwLbZQKlAa749VwdHy81ouxHr9bt07RYGz1p9k5bdQXnoVbcwznF29iYtF7ccm2BJfUnia0rdxmHYzs4yTlYp3BircyrTolffwswH5rXciHOzN0nm7ytgmvSoVc6W1lw+8wKDbl+6qoqnpt8pOH96hcRoiizh7FqbF06vsNKK9YibgymtLMH5h4tE0tTEzRZaHbxT4iHdaUERl2fWVlpHfuF7qIFIlmV87GMf45/+03+6/Jj3nn/6T/8pf+AP/IG7bp/nOb1eb8/bYet3Cr75pdN86/vO8i0vn+H3feA8n/jgRV7c6C+DkZjZSFhbyQ8ORIizAZyLhapxToBnOG0oMkNi9K5xzbFnyKKzap4axrOaqnGsd1u8dvGU9BERh25P35E5TWz/njQ16byVuweMtyShoeXGy6K6zJd0bSxMHSereGXohDFoTWU6jEwPCLxbvIRVGaeam4T59ttz5Vv07S3e6X4LtS7I/YSt/Bzb+QsA9JpbbMwu45ThGyvfxtCscaN4mW90v52d9DQrzTaNysndhO38PJVuoQh07CB2dzUrBODM7E2CHbPlYODVXb1UhDhMIQSu3RrjPZzqtUiMiRelAXrtjLpxrLSzB2YjFkWm7SyJ74W40zJL9LJ84FHFJpyKTpHyvhdWj/zC99B3zXzmM5/hz//5P8/HP/5xvvM7v5O/+Tf/JpPJZLmL5jhQSt1VqNPr5EzKhuG04hvXtujkKc7HbU/3EgvjwnIOxmLmTLedUdaOqol1IyHEP6qPvHqWbitnNIsvBosmM0Ictj2dVfMcNV+oDiGQ2xKIA7csirYfL69Yap1T6xaFG1PrHOMsKniSYNlo3sU3cCs9j9KKQXqaioJhuk5jCqamR8tNOFVdYaO6yqXxl+nbLd7ufgTtSird4c3OR6h0m2ASStNmmJ3Ba83E9PnA6F/RsdvMTIdZ0kUTWKuuk/iKQbpBt7mF1Rm3iktcnPweGs9IG7o+UNYl6WyGvXhBuqqKp2LRTyQxilvDGVVjl0WiWWLIUkPdeF69sM7rV7aomrszI1rFnlSdIiXPkrhzcz6O1z+g58hih8xBFJAmmg+/vEFiDDvj8ki7ex96IPLn/tyfY3Nzk//6v/6vuX79Ot/2bd/GP/kn/+SuAtbjZhGc2Pn0RGM0twbTWNfBvbdaLVrvJloRfJwfoIC1XkFhDSEEaus5s9qhk2dcvjmU7qriqdvTWXVzk6bbhTxHDYf0rWccFLkOJPOJNYu/+MxX6HngYUOsk8r9lCTEoLtWLVCad9uvkbspg2wDW7U5O3uL0qyggmOaxIzEt2//CqDQvmGSruKVwipDpQtKXZD6mnOzr9GyQzaU4VR9jUF6mlnS55sG/xJFoGu3cCgG2WkKNybzM9arq8vv03lPBnRHI2a93oG9VIQ4DNbFpplVHceHpIlGa03TOAaTMm6CmA+4O7va4d2tcRyKx3yDi9rV4yrRjGYNrczE/TVhcRvmpQN7m2suPrYcireYx5RoUmPIEk2aGG4NZ1y5NTry15+nckT+yI/8CD/yIz/yNB7qsYUQmJTNXXNfFnvCB9OK8ayOe7AfcF9xexTU3hFszIyMyjqm4EKc/pslhtev3qaqXayqNhrnPFujGZOqliUacegO6jtirGWmFJtZwcvNlCZdAxoKFydXBxQqeJwyFH7KxMTMROrGAHhtGGbrrFY3Wa+vE9DkfsKKG+BrwzTpUdQ32CxewviGzJdoBedmb7HW3ETjudJ6jdKscHH6NbayMyTB0m9uA9C2I3Tw8zk60Y3Wq7zbepXzk6+ggI4dLk/m6wQmSmPPn+fF//OfOrCXihCHIdGKqrY0ztPKExSxdUM13/UQJ/FCliaUtSVN4jTeEOJMMq00wXumZcNoFjPx49md+9d63rNEKZyPF7gw7/CtobHzd+a7PrPEcGatQ54mWO+4uT1Fj+JwvqN+/ZFLA+KAoqubowMzE712TmYM17fGe3bK3M+ijGTR9CwQt+WGEFgpclKjeeP6NlliONVrLVNhOjH0OprhpObq5ujIm8yIZ9/+viOzrW22f/YX6E+GgCJrdsgVhHl93ChZIwk1VmV07fa8KNUu76/txmShYbW+yU62QepLRslpLF+nZUcUfkrLT2l0xpvdj3KqvMIoPRXvp/GUuk1tWrFbKoFT9Y3lMeeJGZOu3drzPfSqTXTrVd5tfxP9ZpPuPGhpAtigaF77AJ/4f3yWJJNlT/H0LBpyLwbZBWL7hkU37TDfHp8aTXulYFo2hBA4s9YGFMNxyeawvvf9h0VzsniEmPlSTJ4l84nw9s7FtFKstHK6rZxAYGt7ig+BbjtbNt88ytef5z4QGUxKXr9yj8xEWbPWLbg5mNBY91AtZxbLNonWeOdBBRKlaBcpzgeyVNNr51y9NYoD9faVP0t3VfG07e474qzl1/6/v8bGaBuvFNOgaUxOi4YkWLp2a/kX61HMzAodu7Pn/s7O3iLxNVv5Obayc4yTPkn321mtb/BC+RYeRbfZYpis0bYDNmZvM0tjYfrN/CLb2Rn6zeby/pr5pN9St/lq/xP8vtv/BDMPTwKK262LfPPgN0FpcjfdtXSqcImh+MTHJQgRT53zgVaWUDaOysaBdtY51Dxg0PMgwYeA0oq1bhGbi80sK0XC9nhv6/X9NR8+xBfwVh6LWJvGUVtHO0/otjKG04qqcXdGiLQzUFA3jmlpaRcpebo3BDiq15/nOhDZPSWx18n2ZCby1HN9e8w7NwfLtu4P24NXz7dXLVJvSWrI0wTnfWxoZh1Gx652u7uqLkh3VXFUTJKQXDiPfvcd3Hzw44yMlIDBoZYb0WNjpEW79xvFS6S+wqqUoAyFG3Nu9iYT06NvN8l8gwKutl/Do7lZvMRq9S5WZwRtGGUb9JvbpMHSabb3dGwqzQqlabNeXuX85Os4P28CFUCpQMfu0HJjzLwH/bznIJUx3MzbXHz1fU/xJyhElBhNPh90N6saplWzzLgnRt/JRMxfd9pFSlVndNsZ26NZHA8yt9itqcIiAI+s8zgXMEbRKlI6OuPUSptZ05CnCY2L2ZdeJydN4/T50aRGa8VqJz9wG/BRvP4814HI/imJC1VtuT2cUc/TaOohI5BFSZ8KzGfUxN9znsQ9B3re3n3x7zg87+77lu6q4ih1Lp1n9kWNBxIVSO0Qm3XJbBkb++mcMunSb24tazGcMgSdx3kzfkbHblP4Ke8f/SuutT9A5kt6zS0mySppqOmMt8n9jGFymqnpM05WaUg5W75JbQpqXfBO50NcmHyVlWaLzM9QwXFu9jWWC0HzQrx+vYkiZiwVwHy3z02dMLsgu2TE0djd6v1Ur0W7Srk1nJLo2Duqatyy6zbE836eJXzTxVO88e42t4azuHVX7xqzsK+rqjGa1W6LIjWUtWW92+aDL55iOg8kZnXD1nDGcFoxnsYAZG2lNZ8Sf/Dry1G8/jzXgciBUxIDDKc1jfXzX36IJ7jAcvDdfooYzVoXMx7LwXhqXqU8z3gsGshkSayCnlb2roB09yyCo24yI55PZ77lm9j6OU3mPTtK0fKw1gypicuMaahIm+rOGjix2LTROSt2QMsO0YuujcFxufNNfHj71zB4rM55N3+FFydfYcXGxmKNyhiZVW61LnB+9ia5m7HS3GYWesssR+EmLFbLKx8w8xSlRrHZukTH7tBrbuMDeAI1ilFWyC4ZcWR2t3ofTRtauaGVpczqZj5w9c5yyZ7zfiuj3ynijpd916laqXiRO38/0bGJZlk78izhwkYXrfVySWWVgnNrK3s2YrTzhK+8c/uBs3Ce5uvPc32EHjQlsbax34cxitqG5XZcWM4MWp58A7E6OTUmzrMwevnHo7XChBBnWgQIKtBYv4yAE21oZYqytvG289qUaWmlu6o4Uuc/8s38//qneOX2DTaIk6gNIRbDEUcYbHtFT3k682VIPa+g2snO0OgMp1LadsR2fpZXxv+Grt0hCQ1r1bs0OqXb3Jo/muJm62XW6nfJ57NlNsp30DhWXJw1FeYHXDI/ttJdIxgqpWjK29RuwmS+XdEqzThA8Qe/W3bJiCO16N692AwRl1hib5DdyyX7z/sXN7r8zjcSprWdByNhuZ1396yZxBicD6x3W/fcdntQn6y7ZuEc8evPcx2IHDQlMS6pxDXoZU8Qo3F+vtSiA8HvLRfpdXLK2uFDoFNklI1FK81KK2VaWcrGEnzMjrTyhOG0ZqWdcf5Ul8G4irt1qhiQ3O8PSoin4Y233uUrSZuXlCZVHgfUKIoQ0CrQoLmGInhFe7keGVuz95otMhdbxZtgWa1v0plnPqxKafkJL01+D4BheppBcprV+gYb5ZXlEugiCwJ3jrMw33WgUCQKqgBbQTNDk7khpYLGJNg0Z5xm1Ghe/cS3P70fmhD30O8U9Nr5Mitx0HLJ/vO+MYZvfmmD33njOtbNm2TOl/sXO3FWVwq+5aUzrK4Uj9yIbH+AdNSvP891IHLQlESIJ73GehKjsG7e3c4orA/4eQGRUvHjZr6W9tLZFdZ7rWXqbWswYzirKHxgFgJKQ5HF+9/9yz5/6uD+JUIcBe89X/ilX+dcM6Na7XN5MqPtPBmWSYhN+to68JqOwckkxKZnhVKY0NBbZjqiZB6E1LrgRut9XJx8ZRlwNDpDE3uE6H170gKKWmd4DARP5mdYNDtaMUUxaa1AVTNYWyf1nkyByTNqbUhv3cJffEFqQ8SxsTsrcdByyUHn/Q9cPAXA7761SdnY5RFitOLs2gofffXsewoY9gdIz3Rn1eNuf2TofMBohQpx/e72cP4xo0h07EtgtKKVJ6y0UkJQfPDSBhur7eUvcP8fmtFxk6714a5f9kFpMyGOytVrm9x6/Rt8ZwqT1hrl+hmuunOsTr+GSjSj2nJ28yYXcrhZdNGVJfOBNTejrQLp/O/acefkohQYH5dlFkGIJ078fWH2jeVY8t0qXXC9eIms2cEqTbd6FxtggKbvHM1sSsdZujffZau7iltbxVUV6Wgb225LbYg41h72vP+Bi6d45YXVeQfuitQYLm6s0G0XTyRgOC6vP3KkcnDq7PLNAVXtaOUJs8riXNzhopWi285Y6xZUtWe919oThCwcl1+wEI9iPJmhqopcwdAYtIJOOkb3VpjOSvTODhvak3jN+nTMOFvHJy2a2RUmATIgUyx3g5nFlngcHbtzZ6lFKRqd312sTfzabVsxLW9h/QhvMhoUGzj6FioFo7XTTOqK9eEOG6MdBtWMumhhL17gxT/1fVIbIp4ZxhhefmHtqJ/GoZJAZG5/6qydp1zdHBGIRaYuBNppSr+dzfeFOykqFc+clU6LkOdU0xGJczSJIg8jprOSyY1NvsnVtDRYBWOlqesBbV2RJJraeVQIJEA5b7ZUKGh8rOnYNOucZ0IRKjQOE2omSY+OHdwJSAJc9xqvwIQhTimCr+jPd6JZYJLksNIhqBVura6SX79OubrGB/7i/5Xz3/pByYQIccLIEXsPd7IkfXbGJZuDCWVt8fOe/lJUKp5FF85vcPq1V3nri7f4oJ6xY1YIwNbWgJd8Q6YBpakCOBV3nY3dhMxo/EqXN8uGM60M+5Fvxf6L3+KVee8RFLh0hdpZclfhAqS+Infx8/sHiQag7SyV0RA8KyrQBEWpNVunXkXrnDyMUMZQra1hZjO0MRKECHECyVF7H4ssyUor48Lp7rEo6hHiMGmt+b4/9l383NvvsPbuO5yyAwYmRU+nrCUepRXOGDazl6hn18hVg9aGkXV0ypKNdpv2qy/z7le+RqY12yiSAJkKXKiuUiiHne9GW6niHJnxPAgJKm55zwkMlabSho5rWAkxG7JpDKP1M/iVLmrX0DvyHD0eM90eHMnPTAjx3kjrzoe0CEpWVwpWWpkEIeKZ9YH3X+L7P/3vcf0j38ZbKsUNh5zD0TGaut3l5voGOlOc0h7jPd7Hret9AxdevsjW1pBkNqM6d46dJKdSilvacFMFJj7QhLDMgDjgZutlBkmHaVC8a1JuJwmt4HEmYae1wlurp3krLRhunIPuCh1/mzyM7zzhqsIbQ3utfyQ/LyHEeyMZESHEXT7w/ku8+pkf4uqVG7zzb36PX/+5X6IfZpSdFZo0pUhrtOqwVpYE5wje0z6zQfnRj1L//BfwvR7KGG5sfCvh1pv0mi1KrRmowCk8hsDMK3aUYorDOEetNMO1U4RWi+bGDfQn/yAvfdd3sPHa+/jnP/aTpFev0hQFSutlTUnwnnQ0wl6UVu5CnFSSERFCHEhrzaUXX+AP/IlPkv++b+dr3tCpSwiBMs240Vvj+vppvp60aFbXaP3B72K8toaxFubTbvNWYNrPGCcZaQhooA6KaVBM59One+U1SgM7p06jul1UXdMUBZe+6zu48JFvJisKXvrT34dttUk3NwnTaQx+plPSzU3ZrivECSdHrhDivg6qGylbbaY+MB5N2Eg16x94H+nv/w7aW0NckkBdQ6tFEQbQ7TDttJiWJXo6Zds2VN0+rZ1tqpUVSFOYZzruleFYbMd9++e/gL5xAz0e442R7bpCPAMkEBFCPNCibuTXf+6XGH31K5waDFhRilZ/hTMf/RY2/vgfxly8wLlzZ3n97Nm4jJLnKB2TrkprQlFgRiPspUu8/Cf/GJf/nz9DOpvSpCmEEDMco9E9MxyvfNfHeOk7P8r13/0a0+0B7bU+5z70AcmECHHCqRAOmEN/TAyHQ/r9PoPBgF6vd9RPR4jnnveeq1duMLt2nY5WnH3xBcyZjWXAAfDGr3+Jd37qp0lmU5puF/IcqupOkPGpf59XvutjvPHrX7qT4XAObwzh3FnJcAjxDHiU128JRIQQT9zDBhnOWslwCPEMkkBECHHkJMgQ4vn1KK/fclYQQhwKkyRc+Mg3H/XTEEIcc7J9VwghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkTm0QOStt97iL/yFv8D73vc+Wq0Wr776Kj/2Yz9GXdeH9ZBCCCGEOGGSw7rjr3zlK3jv+Tt/5+/w/ve/ny9/+ct8+tOfZjKZ8JM/+ZOH9bBCCCGEOEFUCCE8rQf7iZ/4Cf6n/+l/4o033nio2w+HQ/r9PoPBgF6vd8jPTgghhBBPwqO8fh9aRuQgg8GA9fX1e36+qiqqqlq+PxwOn8bTEkIIIcQReWrFql//+tf5H//H/5G/+Bf/4j1v8/nPf55+v798u3Tp0tN6ekIIIYQ4Ao8ciHz2s59FKXXft6985St7vubq1av88T/+x/mBH/gBPv3pT9/zvj/3uc8xGAyWb5cvX37070gIIYQQJ8Yj14hsbm5y+/bt+97mlVdeIcsyAK5du8YnP/lJfv/v//38g3/wD9D64WMfqRERQgghTp5DrRHZ2NhgY2PjoW579epVvud7voePfexj/NRP/dQjBSFCCCGEePYdWrHq1atX+eQnP8lLL73ET/7kT7K5ubn83Llz5w7rYYUQQghxghxaIPLLv/zLfP3rX+frX/86Fy9e3PO5p7hjWAghhBDH2KGtlXzqU58ihHDgmxBCCCEEyKwZIYQQQhwhCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhwZCUSEEEIIcWQkEBFCCCHEkZFARAghhBBHRgIRIYQQQhyZ5KifgDh8IQQmZYN1nsRoOkWKUuqon5YQQgghgcizbjApubo5YjAt8T6gtaLfLriw0aXfKY766QkhhHjOSSDyDBtMSl6/cpuqdrSLBGM0znm2RjMmVc1rF09JMCKEEOJISY3IMyqEwNXNEVXt6HUy0sSglSJNDL1ORlU7rm6OCCEc9VMVQojHFkJgPKvZGZeMZ7Wc004gyYg8oyZlw2Ba0i6Su+pBlFK0i4TBtGRSNqy0siN6lkKI593D1rAddLvhtJKl52eABCLPKOs83geMOTjpZYzGVxbr/FN+ZkIIET1sDdtBt8tTw6yyhIAsPZ9wEog8oxKj0VrhnEcn5q7PO+fRWpHcI1DZbfeViNEKBVgfZAeOEOKx7alhyxN8CDTOc3MwYVLWvHYpBhIH1rpZz/WtCc57zq13SOfnOJ0Yeh3NcFJzdXNEr53L+ekEkEDkGdUpUvrtgq3RjF5H7zkYQwhMS8t6t0WnSO97P7uvRKraMqstCsizhCJLJA0qhHhkixq2srIYDbeGMxrrgIBSimnZYIzi2149t6fWbXEes/FeUApG05o8vbMELUvPJ48EIs8opRQXNrpMqprhpN6TupyWljwzXNjo3vdqYfeViDGKsnE47wkBQt2QGM2N7TE7k5KXz63SzlPJkgghHmhSNtwaTpiWDZOywYeAmWdoU6OwznPt9oi1buvAWjc/L0hNE03VOBrrydI7mV9Zej5ZJBB5hvU7Ba9dPHVnbbWyaK1Y77YemMXYveum2065PZzhvKdIE5z3TEpLWU8xWrE1mnJje8xqJ6fIU8mSCCHua2dcMphUNNYDgTSJS8TWBXzw5Kmmajzv3h7FWrd9mVutVAxMQqyHm9UxR5IlBtSjLT2LoyeByDOu3ynotfNH7qy6e9eNdYGqcaRG43xgOr+CCT7gvcL7ePKY1pYiT6VYTAhxTyEENgeTeXbVY4xGL85HKuCcY+Y8Wikms5oiS++qdcsSg9GK0bQmENgZzRhqRWoMrTzB+sBGv/3ApWdxPEgg8hxQSj3yOunuXTd14/AhkMzXbpt5utMH8MQKdq3Aec+sajjVazGaNlIsJoS4y6RsKGtLkSaMbc3i7OB9LFZ1PgBx6aUaOTqFp2osZ1Y7KB1vXTWWxrrlko5SUDeWyaxmZxJ31PRaOcNpJRdDJ4DkrcSB9uy6UQqtFHXjqPetuSoVr3BciOnSqnFYF/YUiwkhxMLiIqfbzkApGudpnKe2bh6E3BGAcVmzPS55Z3PAeFbjfWB7XGJdoFOkFFncxltbj9IKDaSJYVrVvH7lNoNJeSTfp3h4EoiIAy123UxLS2IUWaIpa0fwgd35DaUUIYAKcV3Wex+vUuaFscNpJR0PhRBLidE475lWTcykukBjPf4+pwfnA2Vt2dyZsjmYMKss7SKl38mxLhBgnplVmCTef5El0kH6hJClmWfUe524u3/XjdYaHzwB5tkPUMTqdaUURsdCs8TEk8G0bBjNar5xbStmVKTjoRDPvRACw2nJZNZQNpYiS4CYzTiIVsRdesTzi1aOxmramaHbydgalNTWLc87gbjEU3lH1TjZxntCSCDyDHpSE3f7nYLzp7q8fvn2/L7uvk0IoFSgcYCK+/qdd9waTDFa08nTeIUiHQ+FeK4NJiVXbg55Z3NA2TQ4D7PKYsy9L5D2Z0kSY6gaS42i8QHrPQrQSqPmF0dBBbwPTKuGTiuNtSfWMZ7Vj31hJg6XBCLPmIeduPswGZPBuOTtGwMCsNopGKmKoKCqHd7HoIOw62QRoKwb3r0dI5ZTvTx2PFTS8VCI59nivDSZNfMtu/PaMj+/iHloAaP0vIVAQydPsFoTCChiRiSEGLBY66lqi/Oet27sUNZW5tEcUxKIPEP2T9xdvNDvDwJCCFy7Nb5vxmRnPONffe06g2m5XFqJJw5QxDVZwqK2/Y7aBsCSpZrBpKKsHb12Rp4l0vFQiOfQ7vNSCJ6ybvDz5d1HrdyorUOhyBJDbWNQE3fsBQJhuVSczutQBuMKH5gX0KestFKcl+zscSOByDPkYSbu3hpO2JnM8P7eg6IAfucbN7g9mqEIeKVwjWd/k8LFSUTtex/iVQkhMKsaGus41WuRZ4l0PBTiObM4LxkNNyflMgjRWoEP9y1S3U3N34yJazBKKYxRBBSNtbgQC+mNhnKeCamsRStNYy3TMraCX+0W9DqZZGePEQlEniEPnLirNZNZQ5ElnOq3DsyYXLk5pGosO+OSEALGKPw8E3I/i4wJML8CiVvyWnlC3XiG05qNNNnT8fC9FtQKIY4/63wcLVE1OBsHZxJipuRRUiKBeG5pzZuZVU284FlbKdgKsWiVEJZLPsx35CjjaVzc3VfWlklVc2a1I9nZY0QCkWfIgybuzqrYjGz9PhmT26Mpo1kNxCuP4AONu//ZIhBTo2bebCieDKCxniJbzIOwVI2lrB3r3RbOeX7v7VvvuaBWCHG8JUYTiEGA1hqtY4Dgg8I/QiSyOGNVjZtfuGRY69geVyigSA1V7XDzC6JFnNPYQJYqzLyWxFrPzZ0pF06t4H2Q7OwxIH1EniG7e3/s3zcfQmBWWVKjybNkz8frxsVUZghUtaVpHHka2y7XDwhCdnM+Xo0YHU8aLnis83EnjfNxSmZm6K/kvH71NlujGXlqWGln5KlhazSTBkRCPGM6RUo7T3HzAnelYkb0XslPfa+k6Lw43s4bn53utWjlKT7EzqujWUPtYj+SPcvELHb3xaaLidFY57g9msk8mmNCMiLPiMUyR6+TszMpGU4q2kW6Z+JukSVxqcXHYKGqLcNpRTVv4U6IwYQnHrUHbdd98POA1Bicih1ZnQs0zkKA1ZWC972wyrVb4wcW1Mq6rRDPBqUU5093uXZ7RGMdzsXsqVZw4IaZe1z7hACOeO5aFKguOjv7RaPFebBydxG9Xxbdq/kDl5Xl7Foi82iOAQlEngH7+4Y473EuMJ7VGK3vTNw93eXqrVHMRGSerWGJ9Z7UaBKlKGsXr0ZCXMZ53JSl8x41P+idd7gARZqQGs2stg8sqJV1WyGeLefWVrhwqsfV20Nq63b1/4hZCrurYvWgOESxGCfBstZjaxwLYH24U/CqgHudtarGkSV6WfWqtWKj35ELnmNAApET7l59QyazBmMUL57ps7rSWkb907phazTlxtYUCGSpiZ0IG4fRirVuwdaopKyah65m321RUEaIu2wWAYnWis3hlJ1pRWMd7XtchciuGiGePUopXr2whvWO4aSmcQ7nfVwSnndkzrShmmdP968IL5ZX2PX/8aTCGAC9DF7u18k9AI2LF0lZalhdKVhdkXq040ACkRPsfn1D+itxmWM4qblwusdwWi2zJmUdC0chUDaxyc9iDbW2jjxJ5unLR49E1Px5QSxSS4wi0TpOyvSBLInru856dHp3Qe3uXTVCiGdHv1PwTZdOc3VzxM5kxrSK556GWEemjSJVBudiRvdBPICDcM8cyF5GA/PsbKeVcrrXkWWZY0ICkRPsYfqGDKYl17fHXNkcLrMmWudMyoaqscx3uS011tPY+rGejwLaeULj4sApRbwCqZ1fFsU2TpMYzXBWs54Ue553CIFpaVnvtuQEIcQzqN8p6LVzrm+PeePaNonRFKlhUjaUdSyyf9AuPYA0iTv67jGi5kAKhTKKPE/otnMubHRlWeaYkEDkBLtX35AQ4jRL6z1V47g6D0IWWZO4phrupDoBoxVKxYFRj7MkA3G9N88SVOPQSlE2br41N64DK8V8ySUu9g4n9Z7lpGlpyTMjJwghnnHbw5IQYL0bL0Y6RUZtHeNZxbtbkwd+vbrT3/mhJEaTJhqF4kyvw/vOr0qbgGNEApET7KC+Ibt3wjjncT4wLWvWuncamC2aCe0/jEO4e8jUo8hSsww44pJPTIcul4yUwrnYdKjIE7pFznBW4St7p6BW+ogI8UzbnckFqOe79rRSe2o8zH1Wh2MTRIPy7oHhSCtL2FhtU9WO1W7BR145g9ay9HucSCBygi36hmyNZvQ6mrpx3B7OsD52KnUO8jROqxxMqmUPkQB3ZRxCePxMCMR28e08IzUK6xWz2pNovXfpZb4OVOQJIcBLZ/uglHRWFeI5ssjkWu/ZGZfL9gFaxdYCEJd5tVZ4d3DeY5HFfZhF5FZuqK1npZ3xygtrEoQcQxKInGBKKS5sdJlUNYNxRdks2qUrrPWkiaHXydkezbDOMZxWnJ5nLRZdUOe7dR95+NRuiVasFBlGK1pZStXEwVTOe1B6mUa1zpMkhlPdFrX1WB9YXcmfxI9CCHFM7R/lkOh4bhgOKnwIpEaTKo0Pd7qcBogdUhcnqH2MVrhwpwuJmn/MmPn92Ds9WyezBueh15JzzXElgcgJ1+8UvHbxFG9e22F7XBLnSClaeRqn3qYJs8oymdWUtaWxnjTRFFnCrLaEEFOg2misvXMSeFhGK9JEM5xW5En8c1o0Glpc4YT5/Ick0ZxZbWOMRvsgO2OEeMbt73GktaJbZFSNpW7mxfMqngeMUhQpzCob69h8rDtTau+Scew/opbnq8UHF03SYso3/t9oOLe+QpYaplXD61duy8TdY+ipvBJUVcW3fdu3oZTiX//rf/00HvK50u8UvHSuT6+Tc2atw5nVDhv9dmzlrqDXzkgTTW09tbUEoNvKMVrPGwXFnAXsDUIWDYfuJTYjiieJPE04tdqi287JExOH4BHHceepod8puHC6x0orY1pa+u1CdsYI8Qxb9DjaP8rh1nDGrLJxaaXxOO/nGVNHWTvy1LDoO+bnDcwWk3e1jrVx8WJHxf4jqSHRsZeIdYFmnlVJjaLIUpLEkKUJvU5GVTuubo7uGoEhjtZTyYj85//5f8758+f5nd/5nafxcM+lNDHxADaadN/AuzxL6K8UMC6xLjCe1mituHC6y+3BlLKJvT1g71ZeYxRGKZr5/Ib9jFJorem3c3qdfDnD5vRqG4htlbPU0M4TWnka07GTWnbGCPGMu1+Po3aRsDOJ9WtxIKajatxyWSbRajkcT8fhNBitaOcJ7SIj0fFramtRSlGkCdOqpqzjfSymhhdZvNDR88eWzs3H16EHIl/4whf4pV/6Jf7xP/7HfOELXzjsh3tu7S9c3d+fw7nApTN9XjzTx82XRTpFymBS8ua1HTaHEyZlQ2Pd/MCdz2+Y73oxKtDKUlp5gnOedpHSOE87T2nle4tMlVL0V3IGk2rZV8TOaozsjBHiuXC/HkdGa1Kjsc5zut+mbhzb4xkQs6chhOXXuBA41Y3D7bLEzHf+x07QZ9dWIMD2uOR0v411gVlt2R7NKNJYoLr8usVjS+fmY+lQA5EbN27w6U9/mp/7uZ+j3W4/8PZVVVFV1fL94XB4mE/vmbK7cPVe/TkubvTotvcWbK2utHjlvGLWWFJjMImmaRzTsmFWNzgfaBcpa92CIk2YVY68Y3hhvcvbN3Yo8njVUe+pfPcMpzWTMta0p4mhlSWcP93l3NqKZEKEeMbdq8cRsKxRG89qrHPM6oZA3GaLgqr2tIuMbivl+vaE4bSmnad4Ykfm3ecziGMrRtOGdpGQZ2bZwyhLDL12tifNK52bj6dDC0RCCHzqU5/iL/2lv8THP/5x3nrrrQd+zec//3l+/Md//LCe0jNvUbi6LA57iP4cIQSu3hrhfeBUP/YaWTREm1UNO+MKrSD4uNSyuC+jNZc3VQxYqma5BS/MK9/jzhxNr52jtWJaWq5sDmnnqWRDhHjGHdTjaCEukaTMasto1lDVliSJu12aJu6s6bUz8izhdL/NzrhkUjXoWh14Ptt9zlsEGioo1rvFcrkYpHPzcfbIgchnP/tZ/sbf+Bv3vc3v/d7v8Uu/9EuMRiM+97nPPfR9f+5zn+Mzn/nM8v3hcMilS5ce9Sk+1xYtlHdvl7tff46DUqiLoVCxviNlUjW8cn6dXjtf3lcIgTxJuHp7iFZx50yi1Hx5Jw6W6rVT8nReMNuJs2+ubo7otXPJigjxDHuYpeLz610CgSu3hmADWus7u/3mAUS7SHHO88r5ddp5euD5bP85b1Y3XLk5jIM8jduVGW7ixVEn3lb6Fh0fjxyI/JW/8lf41Kc+dd/bvPLKK/zKr/wKv/Ebv0Ge710K+PjHP84P/uAP8g//4T+86+vyPL/r9uLRKaUeuhDrfilUAJNodK1o5+ld9xnwMQviQ+xLMm8Rv6hwh0AgxBkPUigmxHPjYZaKX72whlGKadWQGE2WmGUdyIJzHmNiZvV+54zd57xVCtp5uicz7HwcpOdN4O0bO1zeVPTbhdSrHROPHIhsbGywsbHxwNv9D//D/8Bf+2t/bfn+tWvX+N7v/V5+5md+hk984hOP+rDikNwvhQr3XlO9vj1mc2c6nx8TqMtmvt0uxGp4Y3A+LvFk8ym7UigmxPPjYZaKQwic6rbZGs1IC70nCHkvSym7syQ74xnv3BygCHRa6TIg2hrNmFS19BU5Bg6tRuTFF1/c8/7KygoAr776KhcvXjyshxWP6EEp1INOBINJyTeubVM2jlZmaGc6TtltHI3zpMbM+5bEupEFKRQT4vnyoKXih8mcPO5W/zhML+WdGwO8h/7KnSVhnRhZLj5GpLPqc+5RTwSL/gDWerIkBi5KKbLEkBrNcFozqy1ax5oTvevrpFBMiOfPg5aKH6fI/mHdbxuxLBcfH08tEHn55Zelm90x9SgngsWB3W2n+OBj0DEPRpRStPKESVkzrSwrrQxjFI117/nqRgjx7HrUIvuH9cAaOFkuPhYkIyKAhz8RLA7sJDH02jmN9VTWkRqNVgqtY8OiIkvIU8Nk1jyxqxshxLPrUYrsH9bj1sCJp0sCEbH0MCeC3Qd2niWc6rUYTqvYojl4QoAiS/i2V8/RbedP9OpGCCEexePUwImnTwIR8Uj2H9h5lnA6NTQ2Dq+alpaNfptz69JBVQhxtA6zGFY8ORKIiEdyrwNbKagbT6eVcvFMTw5ssRRCeOJr/0I8rMMshhVPhgQi4pHJgS0e1mBS3vk78QGtpZGUePoOqxhWPBkSiIjHIge2eJDBpOT1K7eparcnJX6/RlKSPRGH5TCKYcWTIYGIeGxyYIt7WfSbqWpHr5M9VCOpe2VPzp9eITFGghMhnlESiAghHsv9sheP2kjqXtmTG9tjLt8a0MoSjNaytCPEM0gCESHEfR0UcAyn1X1rPx7YSEpr6qZmezQjhMCVzeFd2ZPGB2rrKOdN886sFjgvM0KEeNZIICKEuKeDlkvyJGFWN4TAPWs/dvebUUbT2DipWSuF956dScWssrzx7jZaKyazmtWVYs8ogeG0wnpPkRnqxjEpG/LU0GtnDKcyI0SIZ4UEIkKIPRYZkMXUUufuTC211nF9e4zznnNrHdJ5t8r9tR8ffPEU/XbB9a0R1ntq64gTHgLWxVEPK62M/krOrGooG8fOuCQxsTdNYz1V49AoqsbFAYrDED+fJrRymREixLNCAhEhxNLuDMj2qKSxjpVWRssnpIkizmmPgcRo1lBk6XJ0++7aj2llSYxiZ1xinUdphVEKFwLeB9LE0M4SnAuEAIlRNM4znNZspAk+BKzz2PkEZ4VCAd4HJmVN3ViKLJEZIUI8AyQQEUIAe7fbJokGAnlqKBuLHXpO9VoEYhiSJZqqsdTWkaV3ZngshohdvTXkK+/covExUAg+4AkEwGiFUrA1KjHTCh/CcumGsqLfyYFAYx3WxbEBgRiYoEArqFA4H0i0LMsIcdLJpB8hxF3bbc38BT5JNHlisN4znFYoQCsFShFCiMHDLs55rHN8/eoWVWNJtCJPDVlqlvcJcXhi2TRopSjShCzRBA9l5dgel2yNZtTW48Mi/xLFGhNwPlDWDcNZ9RR+OkKIwySBiBDiru22WimUUngfUEqRGk3VOFCQp4bGOmAelMyFEJjMGmZ1w7Rq8CEGDLV1sWh1flPn4/KMAnwITCtLbT0heFwIbA1njCb1Xc8xwPz5gFLxfq7dGhH2BUNCiJNFAhEhxF3bbbPEkKexaDQQd7uEEOs5VlopzgWcj8snfv7/4aTGEygrRwhhUTpCCGB9oHF3AoYAoBRlbbHOxUyLVmgVgxPPwcFFIJakGBODpdGsZlI2h/iTEUIcNqkREULs2W6rEwMKeu2Mxjqq2qHnyyplbRlN62V24+bOlNRoOq2UU702w0mFn9+fdXFp5V6cCygT60qcD6BikKFgz3rMMqDZ9f/gA1prmBe1CiFOLsmICCHoFCn9dsG0tMuljjxLONVrUWRJ3D7rAzvjEoAzqx0unelxdq1NkSUkRrPWLbDek82DmgdZLLW4eSFICODmNSG7V1sW/1zcow8xe5IYQ5IYkns0TRNCnAySERFCoJTiwkaXSVUznNTLRmV6XmzayjsooLKO9ZUCNQ802kVGK08ZTmrevT2GEGjlKePZ3TUeBwnhzoZgdUDwcdD7SoHRMQBZX2nRKdL38J0LIY6aXEoIIYA4Ufm1i6dY77aoGsdoUjEpG1p5ygunVggEiiyhso66cXuigzRRDCYl1gfaeUKaaO5XQ7pImHigth7rPM391nFgvmPnztf2OzkXz/Sks6oQJ5xkRIQQS/1OQa+dc317zLVbI6ZVw6xqeONayXBaxVoSBaBIE0ORGhrn5t1PPYlWTGaKJFH3KDdlXpC692P3C0E0MWBZ3CY1houne7z/4rrMmhHiGSCBiBBij+G0Wg6h6xQpznl2xmVsOuY92miCj23gQ4ht14vMLDMV06rGl/e+/wckPu6+PTF4UUrR6+R84Pw6L59bjcWqQogTTwIRIcTS/sZmCsXOuIq9RBJFbT3ae9JEoVSY9wrxTKtYZar0owcaD/e84nPzPnBjZ0JZu+WkXyHEySaXFEKIpf2NzWrrYodUo5a1GC7EACSEO8sszof5TJgnL/YYmfcPUYo8MWyNZrx+5TaDyX1SL0KIE0ECESHE0v7GZj6EuJ1XxcAjndeI+Hlzs90ULCfrPmkKRZ4lWB9bnfU6GVXtuLopnVWFOOkkEBFCLO1ubAYsW73HKbmxvXpiNK0sJUk0Rus7/T24f9Hp4wrEpmd5Fofr+RD2TPqVzqpCnGwSiAghlvY3Nlu0enfzTIf3sTg1Tw2p0XcNvXsUj7KM08oSCPMGaPOuZ8ZovJfOqkKcdFKsKoRYOqixWbeVUlYNVROXaNJEz+tB4ryXe82FuR+twGi1Z/7MPZ8TMK0anA8Yrdgez5hWDa08mXdYlespIU4yOYKFEHvsb2xWW89KO6PfKcgSQ914GutpFymn+22S5NFPIw/RAX4pEJueOR+W036nZc2twZQ8NdJZVYgTTjIiQoi7LBqbTcoG6zyJ0bTzhBs7k2WjM6UURisunOpybWuMtS5us32I+w+o5XLPw9IKrHOEKpAYM+8NL11VhTjpJBARQhxIKcVKK9vzsRfWu5xbW7krQPlXX7vOO5sDrPW4XXUjavc/dgUp3geMBvsI5R0hzAtinScxhtO9NpW1TMrmrucphDg5JBARQjySgwKU919cZ1Y33BrMwDq0utNvRAFGazp5Qtk4GuswWuFDOLDd+70YHXfwaB37iiSJpm6cFKsKccJJjYgQ4j3rdwq+9ZWzXNrokSQaF2I9R5Zoeu2cC6e6bKx20Dq2aT+z1qHbzshTg3nI1RWtFcbEGTYhxJ4nUqwqxMknGREhnmMhhD3LLJ0ifexptv1Owe977QVeOL3C165sUVtHr5WR5wneBwbjCj3PpkxmDc5DK09JjGY0u7sXiCaWgCxWerRWcYXHQzCBqnGc6XekWFWIE04CESGeU4NJydXNEYNpifcBrRX9dvGeZrgopTh/qkenyJb3PZk1aK1Y7RZoDbOqwXpPnpi41JLGGTZ14wjMW7qreT3JIgiZx0Y+LFrJa/LE0OvEgtr3EkAJIY6WBCJCPIcGk5LXr9ymqh3tIsEYjXOerdGMSVXz2sVT72mg3L123fybb9zk7Zs7FHmyDByUUrTy2DTN+VjvEYi1I0pBojWJid1enQejIUs1jXO8fWOHy5vvPYASQhwdCUSEeM7cNWF3HhDoxNDraIaTmqubI3rt/D1lGQ4qaj292uLyrQFN41BpXG5ZdEttFwlGKyZVg5rvt9Hz4lSCAgN5aijyhDwxdFrpEw+ghBBPnwQiQjxn9k/Y3W3/DJeDtsW+l7qS1ZUW/U5ONd/tYp2fZ0RSeu0MpaBdWV5Y7zKaVZRVQ209SkG3leNDYFbZQw2ghBBPlwQiQjxn9k/Y3c8Yja/sgdti32tdSadIOd3rsDWaUmQJAea7awyBwHBSc6rb5tXzawB7Ap4QAl9+6+ZjB1BCiONJAhEhnjO7J+zqxNz1eXePbbFPoq5k9yybctf9NM4xLS15Zriw0V0GGrsDip1x+dgBlBDi+JIN+EI8Z/ZP2N0thMC0tPTbxZ5tsfvrStLEoJUiTQy9TkZVO65uju66v4Psn2UzntZUjWO927pvMLM7gDrIvQIoIcTxJhkRIZ4zB03YXWQ3DspKwHuvK9nvoF01D6o1WQRQW6MZvY7ec9tFALXebUlfESFOGAlEhHgOLbISy3qPyqK1Yr3bOrDe473UldzLQbtqHnT7Rw2ghBDHnwQiQjynHiUr8bh1JYfxnB8lgBJCHH8SiAjxHHvYrMRxWhZ5nGUdIcTxJYGIEOKBjtuyyKMu6wghji8JRIQQD0WWRYQQh0ECESHEQ5NlESHEkyaBiBDikciyiBDiSZLOP0IIIYQ4MocaiPziL/4in/jEJ2i1WqytrfH93//9h/lwQgghhDhhDm1p5h//43/Mpz/9af76X//r/OE//Iex1vLlL3/5sB5OCCGEECfQoQQi1lp+9Ed/lJ/4iZ/gL/yFv7D8+Ic+9KHDeDghhBBCnFCHsjTz27/921y9ehWtNd/+7d/OCy+8wPd93/c9MCNSVRXD4XDPmxBCCCGeXYcSiLzxxhsA/NW/+lf5r/6r/4pf+IVfYG1tjU9+8pNsbW3d8+s+//nP0+/3l2+XLl06jKcnhBBCiGPikQKRz372syil7vv2la98Be/j4Kv/8r/8L/mzf/bP8rGPfYyf+qmfQinF//q//q/3vP/Pfe5zDAaD5dvly5ff23cnhBBCiGPtkWpE/spf+St86lOfuu9tXnnlFd59911gb01Inue88sorvPPOO/f82jzPyfP8UZ6SEEIIIU6wRwpENjY22NjYeODtPvaxj5HnOV/96lf5d/6dfweApml46623eOmllx768UIIAFIrIoQQQpwgi9ftxev4/RzKrpler8df+kt/iR/7sR/j0qVLvPTSS/zET/wEAD/wAz/w0PczGo0ApFZECCGEOIFGoxH9fv++tzm0PiI/8RM/QZIk/If/4X/IbDbjE5/4BL/yK7/C2traQ9/H+fPn+d3f/V0+9KEPcfn/396dRzV1pn8A/97EkARIkE0lQFkEtxElSKGCU7H6E5ejVVtb64Ki0qlg0XHDrVo7CmOVUxGtS12mjjOKy2ARWyh1HfeNoCyCbKIsLriA7CT394eHO03BCpJ4CX0+5+Qcc3Pve5+8F2+e+77vfe/du5DL5foKt90rKyuDvb091WMrUB3qBtWjblA96gbVY+s1VYcsy6K8vBwKheKV2zNsc9pNeFRWVgYzMzM8e/aM/khageqx9agOdYPqUTeoHnWD6rH1WluH9KwZQgghhPCGEhFCCCGE8KbNJyJisRgrV66k23pbieqx9agOdYPqUTeoHnWD6rH1WluHbX6MCCGEEELarzbfIkIIIYSQ9osSEUIIIYTwhhIRQgghhPCGEhFCCCGE8MbgEpFjx47B29sbUqkU5ubmGDNmDN8hGayamhq4u7uDYRioVCq+wzEo+fn5mDFjBpycnCCVStG1a1esXLkStbW1fIfW5m3evBmOjo6QSCTw9vbG5cuX+Q7JoERERODtt9+GTCZDp06dMGbMGGRmZvIdlkH7+9//DoZhMHfuXL5DMTiFhYWYPHkyLC0tIZVK4ebmhqtXr7aoDINKRA4fPowpU6YgMDAQKSkpOHfuHCZOnMh3WAZr0aJFzZp+lzR269YtaDQabNu2DWlpafjmm2+wdetWLF26lO/Q2rSYmBjMmzcPK1euxPXr19G3b1/4+/vjwYMHfIdmME6fPo2QkBBcvHgRSUlJqKurw9ChQ1FRUcF3aAbpypUr2LZtG/r06cN3KAbnyZMn8PX1hUgkwk8//YT09HRERka26FEuAADWQNTV1bG2trbsjh07+A6lXfjxxx/ZHj16sGlpaSwANjk5me+QDN7XX3/NOjk58R1Gm+bl5cWGhIRw79VqNatQKNiIiAgeozJsDx48YAGwp0+f5jsUg1NeXs66urqySUlJ7MCBA9k5c+bwHZJBCQsLYwcMGNDqcgymReT69esoLCyEQCCAUqmEjY0Nhg8fjtTUVL5DMzj3799HUFAQ/vnPf8LY2JjvcNqNZ8+ewcLCgu8w2qza2lpcu3YNQ4YM4ZYJBAIMGTIEFy5c4DEyw/bs2TMAoL+91xASEoKRI0dq/U2S5ouLi4OnpyfGjx+PTp06QalU4rvvvmtxOQaTiOTm5gIAvvzySyxfvhzx8fEwNzeHn58fHj9+zHN0hoNlWUybNg2fffYZPD09+Q6n3cjOzkZ0dDT+8pe/8B1Km/Xo0SOo1Wp07txZa3nnzp1RUlLCU1SGTaPRYO7cufD19UXv3r35Dseg7N+/H9evX0dERATfoRis3NxcbNmyBa6urkhMTMSsWbMQGhqK77//vkXl8J6ILF68GAzD/O6roT8eAJYtW4YPPvgA/fr1w+7du8EwDA4ePMjzt+Bfc+sxOjoa5eXlWLJkCd8ht0nNrcdfKywsxLBhwzB+/HgEBQXxFDn5IwoJCUFqair279/PdygG5e7du5gzZw7+9a9/QSKR8B2OwdJoNPDw8EB4eDiUSiU+/fRTBAUFYevWrS0qp4Oe4mu2+fPnY9q0ab+7jrOzM4qLiwEAvXr14paLxWI4OzujoKBAnyEahObW44kTJ3DhwoVGzwTw9PTEpEmTWpzJtjfNrccGRUVFGDRoEHx8fLB9+3Y9R2fYrKysIBQKcf/+fa3l9+/fR5cuXXiKynDNnj0b8fHxOHPmDOzs7PgOx6Bcu3YNDx48gIeHB7dMrVbjzJkz2LRpE2pqaiAUCnmM0DDY2Nho/SYDQM+ePXH48OEWlcN7ImJtbQ1ra+tXrtevXz+IxWJkZmZiwIABAIC6ujrk5+fDwcFB32G2ec2tx40bN2L16tXc+6KiIvj7+yMmJgbe3t76DNEgNLcegRctIYMGDeJa5wQC3hsY2zQjIyP069cPx48f526712g0OH78OGbPns1vcAaEZVl8/vnniI2NxalTp+Dk5MR3SAZn8ODBuHnzptaywMBA9OjRA2FhYZSENJOvr2+jW8ezsrJa/JvMeyLSXHK5HJ999hlWrlwJe3t7ODg4YN26dQCA8ePH8xyd4Xjrrbe03puamgIAunbtSldVLVBYWAg/Pz84ODhg/fr1ePjwIfcZXd2/3Lx58zB16lR4enrCy8sLGzZsQEVFBQIDA/kOzWCEhITg3//+N3744QfIZDJufI2ZmRmkUinP0RkGmUzWaEyNiYkJLC0taaxNC/z1r3+Fj48PwsPD8dFHH+Hy5cvYvn17i1uHDSYRAYB169ahQ4cOmDJlCqqqquDt7Y0TJ060/J5lQlopKSkJ2dnZyM7ObpTAsfRA65f6+OOP8fDhQ6xYsQIlJSVwd3dHQkJCowGs5OW2bNkCAPDz89Navnv37ld2KxKiS2+//TZiY2OxZMkSfPXVV3BycsKGDRswadKkFpXDsHTWJIQQQghPqFObEEIIIbyhRIQQQgghvKFEhBBCCCG8MajBqqT5ysvLUVxczE0ERwghpOUEAgFsbGwgk8n4DqXdokSkndFoNIiIiEBsbCzfoRBCSLsxduxYLFmyhOYL0gNKRNqZiIgIHDlyBKGhoVAqlRCJRHyHRAghBquurg7JycmIjo4G8OIxI0S36PbddqSsrAzvvfceQkNDERAQwHc4hBDSbuzZswcbN27EyZMnqZtGx6iNqR1pmGFRqVTyHAkhhLQvDefVhueeEd2hRKQdaRiYSt0xhBCiWw3nVboBQPcoESGEEEIIbygRIYQQQghvKBEhhBBCCG8oESGE6ISfnx/mzp3LdxicthZPW1NdXf3K16/HQ0RHR8PBwQEdOnTAggULUFpaik6dOiE/P5+/L6EjEyZMQGRkJN9h/GHRPCKkSRqNBoVFD/G8ogqmJlLYKqzbzUQ+arUaDMO0m+/TFJZlUVFdh3q1Bh2EAphIRGAYhu+wXqm2thZGRkZ8h8ELVqMB+6gUbFU1GKkEjJUlGD3+jXp4eCAjI+OlnzMMg9zcXDg6OiIlJQXz5s3DDz/8AKVSCTMzMyxfvhzvv/8+HB0d9Rbjm7J8+XK8++67mDlzJszMzPgO5w+n/Z6JyWu7nX0Xm7YdwrqovdiweR/WRe3Fpm2HcDv7rt72eejQIbi5uUEqlcLS0hJDhgxBRUUFNBoNvvrqK9jZ2UEsFsPd3R0JCQncdqdOnQLDMHj69Cm3TKVSgWEY7krtH//4Bzp27Ii4uDj06tULYrEYBQUFqKmpQVhYGOzt7SEWi+Hi4oKdO3dy5aSmpmL48OEwNTVF586dMWXKFDx69EhvdaArzyqqkXHnEW7m3Uda/gPczLuPjDuP8KyiWm/7nDZtGk6fPo2oqCgwDAOGYZCTk4MZM2bAyckJUqkU3bt3R1RUVKPtxowZgzVr1kChUKB79+4AgPPnz8Pd3R0SiQSenp44cuQIGIaBSqXitv2949NUPG35yl19rxC1R46iJuYQag7FoibmEGqPHIX6XqHe9jlz5kzI5XJkZWUhLy9P6zV48GAMGTKESzLi4+Ph5eWFESNGwMbGBgCwc+dOzJgxQ2/xNVd9fX2ry+jduze6du2KvXv36iAi0lKUiBAtt7PvYueeOKSm58DCXI6uTrawMJcjNT0HO/fE6SUZKS4uxieffILp06cjIyMDp06dwrhx48CyLKKiohAZGYn169fjxo0b8Pf3x+jRo3H79u0W7aOyshJr167Fjh07kJaWhk6dOiEgIAD79u3Dxo0bkZGRgW3btsHU1BQA8PTpU7z33ntQKpW4evUqEhIScP/+fXz00Uc6//669KyiGln3SvG4vApikRCmxkYQi4R4XF6FrHulektGoqKi0L9/fwQFBaG4uBjFxcWws7ODnZ0dDh48iPT0dKxYsQJLly7FgQMHtLY9fvw4MjMzkZSUhPj4eJSVlWHUqFFwc3PD9evX8be//Q1hYWFa27zq+DQVj729vV6+e2up7xWi7sdEqPPugJHJILC1ASOTQZ1358VyPSUjAQEBqK6uxs2bN+Ho6Mi9TE1NcebMGcycORMA4OLiguXLl+P8+fNgGAYBAQH48ccfIRaL8c4773DlaTQahIeHw9XVFRKJBJ07d8a0adO4z1NTUzFixAjI5XJ06dIF8+fPR21tLfd5QkICTExMtLqDUlNTwTAMl2Dm5+eDYRgcOHAAf/7znyEWixEXF4eCggJMnToVnTt3hlQqRd++fXH27FmunIKCAkycOBHm5uawsLDApEmT8OTJE636GDVqFPbv36/TOibNQ10zhKPRaPBT0gU8flKGbi72XFO+zNQY3VzskZV9Fwm/XEBXZ1uddmsUFxejvr4e48aNg4ODAwDAzc0NALB+/XqEhYVhwoQJAIC1a9fi5MmT2LBhAzZv3tzsfdTV1eHbb79F3759AQBZWVk4cOAAkpKSMGTIEACAs7Mzt/6mTZugVCoRHh7OLdu1axfs7e2RlZWFbt26te5L6wHLsih8WI6aWjXkJkbc8RN0EEJuIkBZRS0KH5ZDbizWeTeNmZkZjIyMYGxsjC5dunDLV61axf3byckJFy5cwIEDB7QSOhMTE+zYsYPrktm6dSsYhsF3330HiUSCXr16obCwEEFBQdw2zTk+TcXT1rAaDeovX4Wm/DkEdor/HRdjKQRSBTT3ilB/+RoEChudd9NYWVlhzJgx2LVrF8aNG8ct37t3L8zMzDBmzBgAL1qn+vfvj1mzZmHy5MkwNTXFsmXL0K9fP63yIiIiEBMTg+3bt8PZ2RmFhYW4desWACA5ORkDBw5EaGgoNm7ciHv37mHixIno2LEjvvjiC26d3r17a51bVCoVFAoFrKysAAApKSkAgHXr1iE8PBxOTk4QiUTw8vLCu+++i7i4OFhYWODUqVOQy+UAgOzsbC7+ixcv4vnz5wgODsbChQuxY8cObl9eXl5Ys2YNampqIBaLdVrX5PdRIkI4hUUPkZ17Fwobq0Y/VAzDQNHFErdz7qKw6CHs7TrrbL99+/bF4MGD4ebmBn9/fwwdOhQffvghhEIhioqK4Ovrq7W+r68vd0JqLiMjI/Tp04d7r1KpIBQKMXDgwCbXT0lJwcmTJ7kWkl/Lyclpk4lIRXUdnlVWw1jSocnjZyzpgGeV1aioroOp9M2Mw9i8eTN27dqFgoICVFVVoba2Fu7u7lrruLm5aY0LyczMRJ8+fSCRSLhlXl5eWtsY4vFpCvuoFJrCIggsLZo8ZgJLc2gKC8E+KgXTyVrn+w8KCsKwYcNQVFQEhUIBANi9ezcCAgK4Y2Jqaor8/HwMGDCAS+ru3LnDrd8gMTERo0aNwqBBgwAADg4O8PHx4fYzZcoUrF69GsCLVpbAwEDEx8dziYhKpeIuFBqkpKRoLVOpVDAxMcHBgwe5bqMRI0bgnXfe0Wppc3V15f4dHByM4OBgraR40aJFWLhwoda+FAoFamtrUVJSwl0QkTeDEhHCeV5RhZqaWhhLm74akBpLUPPgMZ5XVOl0v0KhEElJSTh//jx+/vlnREdHY9myZUhKSnrltg1XT79+ZFJdXV2j9aRSqdaJXiqV/m65z58/x6hRo7B27dpGnzX0kbc19WoNNBoWQmHTV85CoQCamnrUq9/MzJD79+/HggULEBkZif79+0Mmk2HdunW4dOmS1nomJiYtLtsQj09T2KpqsLV1YCQvuQIXi8E+fgK2Sj9daoMHD4aDgwO+//57LFmyBNeuXcONGze0uihu3LgB4H+tlABQVVWllSgCwOjRoxEWFoarV69i/Pjx+OCDD2Bubo5bt27h2rVrjcZfGBkZoaamhnufnJyM0NBQrXVUKhU8PT259ykpKRg9ejSXhNy5cwc//fQTkpOTm/x+d+7cQVJSEs6ePat1V4xarW7UVddwTqisrGy6soje0BgRwjE1kUIsNkJlVU2Tn1dVVkNsJIKpye//iL8OhmHg6+uLVatWITk5GUZGRjh+/DgUCgXOnTunte65c+fQq1cvAIC19YurxF8//+HXAxpfxs3NDRqNBqdPn27ycw8PD6SlpcHR0REuLi5ar9f54XwTOggFEAgYqF+SaKjVGggEDDq8JFFpLSMjI6jVau79uXPn4OPjg+DgYCiVSri4uCAnJ+eV5XTv3h03b97U+pG6cuWK1jrNOT6/jactYqQSMEYioLrp/3OoqQEjEoGRSpr+vLX7ZxhMnz4du3fvBvCie8vHxwc9e/bk1lGpVI3+7q2srBqNsViwYAEyMjIwePBgfPPNN3BxcUFeXh7S0tIgEokatVKlp6dzyU1FRQVycnK0Wj80Gg2Sk5MbtYj4+flpvTcyMmrUytYgJSUFFhYWuHHjBlQqFfe6efMmTp48qbXu48ePAfzvnELeHEpECMdWYQ0XZ3sUFT/Cbx/KzLIsikpK4drVHrYK3f5HvXTpEsLDw3H16lUUFBTgP//5Dx4+fIiePXti4cKFWLt2LWJiYpCZmYnFixdDpVJhzpw5AF408drb2+PLL7/E7du3cezYsWbNB+Do6IipU6di+vTpOHLkCPLy8nDq1CmueTckJASPHz/GJ598gitXriAnJweJiYkIDAxssz9uJhIRzIwlqKyub/L4VVbXw8xYAhOJfp5F5OjoiEuXLiE/Px+PHj2Cq6srrl69isTERGRlZeGLL75olFA0ZeLEidBoNPj000+RkZGBxMRErF+/HgC4Vq3mHJ/fxtMWnxHCWFlCYKuApvRxk8dMU/oEAltbMFaWeoshMDAQubm5+OWXX7Bv3z6tsThA010mSqUS6enpjcrq1q0bFi1ahGvXrqG8vBzp6emQyWRQq9VaLZV5eXmIjY3FpEmTuPcajQY9evTg1klMTERpaSm377KyMuTn52s91FMkEqG+vv6lrRgikQjl5eVQKBSNElZbW1utdVNTU2FnZ8eNRyFvDiUihCMQCDD8//rDwlyOrOy7KC+vQL1ajfLyCmRl34WFhRzDhvTX+fwbcrkcZ86cwYgRI9CtWzcsX74ckZGRGD58OEJDQzFv3jzMnz8fbm5uSEhIQFxcHNcHLBKJsG/fPty6dQt9+vTB2rVruX7oV9myZQs+/PBDBAcHo0ePHggKCkJFRQUAcC0xarUaQ4cOhZubG+bOnYuOHTu22flHGIaBrbUMYiMhyipqUVevhoZlUVevRllFLcRGQthay/Q2n8iCBQsgFArRq1cvWFtbw9/fH+PGjcPHH38Mb29vlJaWIjg4+JXlyOVyHD16FCqVCu7u7li2bBlWrFgBAFx3QHOOz2/jKSgo0Mv3bg1GIEAHL08IZKbQ3CsCW1kJVq0GW1kJzb0iCGSm6ODVT6/ziSgUCowYMQLTp0+HWq1udGdYw3H4NX9/f6SlpXGtIl9//TX27NmDjIwMZGZmYunSpbC0tISPjw+8vb3RsWNHLF68GLm5uThx4gRGjhyJCRMmYNiwYQAAS0tLMAzDJaoXL17E7NmzIZFIuJaUlJQUCIVCrS4ib29vmJmZYdasWcjIyEB6ejq2bt3K3VXn7e0NuVyOgIAApKSkIDs7GwkJCU1OdPff//4XQ4cO1UmdkhZiSbuRkZHB9uvXj83IyGhVOVm3C9iob2PYzxesZz8NjWA/X7Ce3bglhs26XaCjSIk+PX1exablPWDPpxWwZ2/eYc+nFbBpeQ/Yp8+r+A7tte3du5cViURsZWUl36HoRf3de2z14SNs5cZv2YrIKLZy47ds9eEf2Pq7997I/o8ePcoKhUJ21qxZWsvVajVrbGzMxsfHN9rGy8uL3bp1K8uyLLtq1Sq2W7durEQiYa2srNj333+fTU9P59Y9c+YM6+HhwUokEtbZ2ZmNiIhg6+vrtcpbvXo1a25uzr711lvs1KlT2bCwMNbT05P7PDo6mv3Tn/7UKI6zZ8+y/fv3Z01MTFhzc3N2+PDh7NOnT7nPL126xPr5+bFyuZyVyWSsh4cHGxUVpVVGVVUVa2Zmxl64cOGldaSr8ytpjGHZ37QHEoN169YtTJ48GXv37tVq4nwd7Xlm1T8C1kBnVm2wZ88eODs7w9bWFikpKZg9ezb8/Pza9YRTb3pm1dY6duwYFi5ciNTUVIM/N2zZsgWxsbH4+eefX7qOLs+vRBvdNUOaJBAIdHqLLnmzGIZ5Y7fo6kNJSQlWrFiBkpIS2NjYYPz48VizZg3fYekVIxDo5RZdfRk5ciRu376NwsLCNjtZXHOJRCJER0fzHcYfFiUihJA2Z9GiRVi0aBHfYZBXaC8PFWyYRZbww7Db0wghhBBi0CgRIYQQQghvKBFpRxoGjDU1syghhJDX13BeNfSBuW0R1Wg70vAciJdNd0wIIeT1NJxXDekRAoaCBqu2I3K5HGPHjuVGfyuVSohE+plFkxBC/gjq6uqQnJyM6OhojB07FjKZjO+Q2h2aR6Sd0Wg0iIiIQGxsLN+hEEJIuzF27FgsWbKEumb0gBKRdqq8vBzFxcVt8vkahBBiKAQCAWxsbKglRI8oESGEEEIIb6iNiRBCCCG8oUSEEEIIIbyhRIQQQgghvKFEhBBCCCG8oUSEEEIIIbyhRIQQQgghvKFEhBBCCCG8+X+V9mljpEqBywAAAABJRU5ErkJggg==", "text/plain": [ "
" ] @@ -221,7 +221,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAG/CAYAAABlpLwqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAACeJElEQVR4nOz9eZRk2V3Y+373PlOcGHOorKy5pG61htbQMg0CWjYGgwQyWIh3jbHBvqBn63nZwgazvGxYXutiL3Mt7jPPi7V8bcA8IxsbMdi+ICHTgK4ekoCWQLRAU0vdXSp1DVmVc2bMcYa9f++PE5ldU4/qrMys+n1YCXRmZMTJqIgTv7P3bzAiIiillFJK7QO73weglFJKqbuXBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jd7HogsLS3xN//m32R+fp40TXn961/Pn/zJn+z1wyqllFLqEAj38s63trZ485vfzDd90zfx8MMPs7CwwJNPPsns7Ozz+n3vPVeuXKHVamGM2ctDVUoppdRLRETo9/ucOHECa599zcPs5dC7H/3RH+UP//AP+f3f//0X9fuXL1/m9OnTL/FRKaWUUup2uHTpEqdOnXrW2+xpIHL//ffzrd/6rVy+fJmPfvSjnDx5kr//9/8+73rXu255+yzLyLJs97+73S5nzpzh0qVLtNvtvTpMpZRSSr2Eer0ep0+fZnt7m06n86y33dNApFarAfAjP/IjfPd3fzef/OQn+aEf+iF+9md/lu///u+/6fb//J//c/7Fv/gXN32/2+1qIKKUUkodEr1ej06n87w+v/c0EInjmK/+6q/mkUce2f3eP/yH/5BPfvKTfPzjH7/p9jeuiOxEVBqIKKWUUofHCwlE9rRq5vjx49x///3Xfe81r3kNFy9evOXtkySh3W5f96WUUkqpO9eeBiJvfvObefzxx6/73hNPPMHZs2f38mGVUkopdUjsaSDyj/7RP+ITn/gE/+pf/SvOnTvH+973Pv7Df/gPvPvd797Lh1VKKaXUIbGngcjXfM3X8Ou//uv88i//Mq973ev4l//yX/LTP/3TfN/3fd9ePqxSSimlDok9TVb9Sr2QZBellFJKHQwHJllVKaWUUurZaCCilFJKqX2jgYhSSiml9s2eDr1TSt0dXFmy/NiTjLa61Gc7HLv/PoIwfN4/V0rdvfRMoJT6ipx/5FEuvP9h7MoKQVniwpAnFhc5+51v456HHnzOnyul7m4aiCilXrTzjzzKxfe+j2g8omi38XEMeU60tMTF976PjS+eY/jxP37GnwOcfdMDulqi1F1My3eVUi+KK0s+9s9+kmhpiWJhAWOfTjkT74lWVwnKAhfFt/752hpFqw3NBnZtbXe1xD/Haolu8yh18L2Qz2999yqlXpTlx57ErqxQtNsYa/EEgGDxGGtxScLCeMByo4mxloIEA4Rk1c+DkGMrS2xtpWRzc7dcLbkxGNFtHqXuPFo1o5R6UUZbXYKyhDgGqhCkMCniPTIaYbOMWMBjALA4DK66rQjpaEAskDWbmDTFBAEmTSkWFgjHIy5+4GFcWe4+3u420NISvl6nOHIEX6/vBi7nH3n09j8JSqmvmAYiSqkXpT7bwYUh5DkAAQVxb5n60hLzq8scHQ1IjTDT3YT+gICSgGlgMR5TdwUTA0QRjuoLqFZPWi3M8grLjz0JVNsxF97/MOF4VG3zPI/ARSl1OGggopR6UY7dfx9+cZGo16tWQfp9ZjbWaZY5hTWUBnpiaHvPzMYq9AdP/3JRkAqMgwDyAhmPEH9NulqSYJ1jtNUFbt4GcsSUzxK4KKUODw1ElFIvShCGnP3Ot1GmdaLVVVpbG8TimVhDzXkKG7Dc6tCzAS3vaW6t48sSGY1odreJ8DRLx8LWOkdXLtFa+jLS71d3nmX4IKA+2wFu3gYylAjB0wdzQ+CilDo8NFlVKfWi7SSIXvil/87C1UsgkHphO2oz7MwQtWK2azXc1gYtV5KvrCDWEohniAUDoyAgAJplTryxzrYXosmY8tRJjt1/H3DDNlCaYvFYJk8fyA2Bi1Lq8NAVEaXUV+Sehx6k/obX4gBnIBRouoyZratIt4dpteidOMVGnGK/4c+Tz84xSlKWjxwlMwEN5zAIo8DS8I5jG6s4azn97W/ZLcu9cRvoWuI9Ub+PHFvcDVyUUoeHrogopb4i5x95lOyRT7AgIEBghNRPCIDxxipL2QSiCIwhW18n2dygSFNotdgwMUmvz2zepYkQVgU2dCdjLn7wQ4Ch1mkx2urSeeC19DY3p/1HWpAkkGVE/T5lvc6Zt79N+4kodQjpu1Yp9aLtVLOkkzEC1I1nhKUArEDDeF4x3MZNgxT/p5+iZqCbjdgeDBjNzDLszNNZ65ELDIAIcDagfv5LbP/0/8kkSRFrq2ZnaUrRamHHY+xggA8CylMnOfN27SOi1GGlgYhS6nm7saupdw57dZnQOzyQEWJtiHE5AUJoDBHgjUw7iEBoIBShUU7YXl/BY2jjsQY6QGCgkY8AMAir2YitU2ehKIh7Pcpajda3fjOtE8e0s6pSdwB99yqlnpcbu5r6IESMYa6/TccIGLB4+vFRZvMVauTTVmZgMMjOqohUJ57YwBHxBAZyX23plEHCWAw1MiyQC8x7z2BtjaLRpGg0iAYDep/+HA9893doAKLUHUDfxUqp53TTcLu8oNndZKEsaFvBYMgFwNPOVwglx5gq8DBAZusUBNR9v/om0y0YU62QlKbKD8mcYxIk1GTndw2hDTk1GTGYjPDGkFvL5OJFlh97kpNveI3OnlHqkNN3q1LqWe3kgUQ7XU2HI2a2NqiJIzZVoGENJAaMGDLJcVSBhpl+BVLggJX4JHU3pO22q5K9aiGF2Bi6tsnQRIQ+oyro89QM9MImpuiRWQPGkjpHsxxx+Y8+RTYY6ewZpQ45DUSUUs/q2q6mGEPYGxGYkFRKEgMOg8HgTITFkeDYaZK6szWTBSm1ckir2KLmR3jAGHBUWzVOhNINqWEIggYRHjv9/Wa5zQSwhJRYJtbTdsL443/MxT/6E6LJpFqleR5D85RSB4/2EVFKPavrupqOx9S8UHcFNWMZxvNYG2AQxkEdg2CAPGzQD2eBaoulUfYIcNT8NAnVVAGIRZiIUAIdhBnjafk+Ik8HMQCBjciDFoJQ80I3DJntbhEPBjp7RqlDTgMRpdSzurarqSlLOmWftnUEeBrFNohQEtAqtzFUzcZqbkiz3N69Dws4kzCIFwgBI1CKVAGHQADUTJXAurPdA9WKiQBWCtrlmIYrKaxlVGtQF0+R1DDWUhLhmLZ/19kzSh0qGogopZ7VtV1NvbEkCBYoBAIcYgKGYYdrZ9YBILKTl4oAkWTMFOu7QYYXKKnKdQ1Pb9N4QEyAE4ufJq2WAm2ZMA4CtucWEFvdi0+S6b1ZzG6BMDp7RqlDRAMRpdSzuna4XdrrYnY6qAKDoI0XS7vYpBstUJhkN/jYiTi8SJUHAghCIZCLIAYygS2x9LEI04IagdJEDMMWpQjLYlgzAT0M/c4c0qgTZRkjY6ssWSAkw14biOjsGaUODU1WVUo9p93hdr/4KxSrY2riCY3BuwwrGQLMlOuYaRgi7AQTHRr5OgPbQLC0fb/quoohMjAWQ24N1lv6YZ0Aoeb6iJsQlhOcgUggM1AYg/GOcG2NvNViEs4S9/sUaYqxT19T7cyeuXZonlLq4NJARCn1vNzz0INEacJTP/V/EuU5i2VGy2YE08RTEcEaEIGRF+q2oJ5vYYCWH7GzRFJS3b5lqhbwgRjKICbH0nFdSql6ixgDQ28YBZa290wwWOd3W7oDVW8TnT2j1KGm71Kl1PN24vWv5typU5ilJb7cbrO4uc4xW62CeMCZiO1whpl8DQARNy3TFcBjAC+mCliAxBpCga7Ppv1DhGTafMQCTQtj51lJasTf/Je492v/3E0Ny3b6iOjsGaUOJw1ElFLP206+yMX3vo94PGJtZo6wu0ldhNRCKAVHynUw1cqHna5siFTbMWICXFQ1KJvgqWMIgAbCwEQ4PBGOYOfxgLbxuCxj7coycPO8mzf/i3/M2hNf1s6qSh1S+m5VSr0gu/ki738Ye3UZJxAaYewhw9KxMk1mFbyp4Y0josQAgiOVCRnCUCyJEQKq37dhg6Ds7wYuhQgOQyaGI8bR/vSfcOlzn+GSDXBBuDuRd6eT6n1/8ev282lRSr1IGogopV6wex56kLNveoArn/0i53/6Z4l722TTLqupCIGp2rZPghpp2a/mxkyraBKfsW4SRmFMq+xXfUMEZt32btVMzyQUQJuMGBiJITVwvMwpsQxNzsbcAsSRdlJV6pDT8l2l1IsShCE2CMAYljqzDMIETEBpE4xUk3N90aXEk0nVdyTzVS5JTQoKG+EExgJXsRS++v97vsotadhguloCkam2aWJgO6i2c+q9LtRq2klVqUNOAxGl1Iu20/7dz8wwOnWKtYVFrgZ1PNAXw4iAbdsil6AafmeEXCAxnvlsi54J6GFJpn1GEKgZoUmBN7AVzuAFkul0XsFgsYyDiFACZDzRTqpKHXIaiCilXrTr2r8bQ9BIKWKZTt8VYnEgJX3jKUWmVTPVnvAosGy22mzVmziqJmc1WwUcuRgiN6JWbOOBbrRAacJptY3gAW8jTFlUB6KdVJU6tDRHRCn1oh27/z6eWFwkWlqicB7jHT4M6WGIBVLjMW7MxFqWTETqHBGABwkMR/rbNKb9RwqxCH46yxdcVWdTJbLmm1jjp5NsDCGOsOwzCBvVgWgnVaUOLQ1ElFIvWhCGNF91H8G5c7SGfUoDxbQbaoiwQUi30aJsNaBWo9cfcGxjlYYRZktHOU1wzafTdovp/40NFDaisCkU23j87m2MeGIPgzCGNNVOqkodchqIKKVetPOPPMrw439MEoZ4D4l3RNOqGQSywFZBSJLAZEI0HjEKAgbGMmh3aPW2SV3JKIgIxFH3nkxM1XlVMho+IwcmYsgNtBE63jMwllG7A+OxdlJV6pDTd61S6kVxZcmF9z9MNB4xOXGCMWAmEygdWEO6tVV1SB2Odrue5rOzmE0hn5mpmoUQMwxjjJTkNmVbPLOMWLaWgVjq4hkZizWehodtY+hHMT6MCLMJvtROqkoddhqIKKVelOXHnsSurFC021XlikkxjQaRjAEYW0swGjP7jr9MOjdLfbbDcH2T1V/4rxDHmOGw6qsqGQBWSvpBjU45ou6EYWDJBbqNFlGe040iGm97Cw/+P96mnVSVuoPou1cp9aLslu7GMQCRVLNidiUJZjAgnZvd7Xq69Jkv7FbZSBgS+SHG2t3E1MgP6GEYhSGpK4mBwDnyl529btXj5Btec5v/WqXUXtFARCn1olxbukuaYqY1LbtuUclyXZXNkSOMg4hmmTMKDALUvKcfJgyPHSNfXSXrzPCKv/u/cuINr9FVD6XuUNpHRCn1ohy7/z784iJRr4f464OQnUoWObZ4XSXLztC8Mq0Tra8zSevkxtB0Ba2yoDCWSVon3twkb7c5+/1/ndNf9XoNQtSecGXJ0me+wJMf/US1WqedefeFvruVUi/KtZN4o7U1ilarqo7JsmetZLluaN7KCpMwxpY5GBhHUTW5VxNQ1R47/8iju6/BoCyvG6Cor7vbSwMRpdSLdmNQsVMd81yBxM7QvOXHnmS01aXWaoAx1HsDTUBVe+78I49WAfR4RNFuV3lOea4DFPeJvtOVUl+RG4OK5xtIBGGoSafqtru27LxYWMDYaYZCmlIkCdHaGhc/8DBn3/SABsO3iT7Lapcryxf8YaIUaFCh9t5LdX66sey8JCG3Dep+c3eAop0OUNTX9O2hnzIK0P1SpdTB9VKen24sOxcMmWmTsomBaoDiYKADFG8jDUSU7pcqpQ6sl/r8dGPZecSEWffU0zfQAYq3nZbv3uV29kvDnf3SNIUgwKQpxcIC4XjExQ88rGVtSqnb7lbnJ/MVnp9eTNm52lu3LRD5yZ/8SYwx/PAP//Dtekj1PNy4X+qxZKaFYHf3S810v1QppW6na89P2IChPYpMP7Ze7Pnpul42a2vIaIQ4h4xGRGtrOkBxH9yWZ/qTn/wkP/dzP8cb3vCG2/Fw6gW4cb/U4olk/HSXzCQh6PdZ/vTnNYlVKXVbXXt+MgiFqVGYlFiG1Q1eZD7Hiy07V3tjzz9NBoMB3/d938fP//zP8xM/8RN7/XDqBbpxvxQg4OllTru1TWs8pPfb/zdjYzSJVSl129x4fppxF6+/wVeQz/Fiy87VS2/Pn/F3v/vdfPu3fzvf8i3fooHIAXTd7I8kebqmHpBej2O9LTzggwBfS8GaF5wkpmXBSqnncuN5YuGVL8eXJT6pkWxukh07hgmC3dvv5HOUp06+6HwOLTs/GPb00+BXfuVX+NSnPsUnP/nJ53X7LMvIsmz3v3u93l4dmpp6pjbdMp5wan2VhvH0xXJk0MMN+4yDiFG7QzRNEnuupj9aFqyUei43nieM91xyJWUQUHOemWJCefEpNlsz+NmZ5xwjoA6XPUtWvXTpEj/0Qz/EL/3SL1Gr1Z7X77znPe+h0+nsfp0+fXqvDk9d456HHuTMO7+X4uRJ7HhM7epVTqxdZdZ6AqBuBGsszgQ0y5yZrQ1cED5nkthu2d3SEr5epzhyBF+v766onH/k0dv3RyqlDqSbzhO1lNZkxEKe0Z5MmLRarLZnAVjob1Nbvoodj6t8jh/4Xr2guQMYEZG9uOPf+I3f4Lu+67sIrllKc85hjMFaS5Zl1/0Mbr0icvr0abrdLu12ey8OU13DlSWf/m8fZPjB32Fx1KdlPEMsBiiDJkZKhuTEXhgEEXmccOz/9f3c9xe/7pb39bF/9pPVls+1bZSZLqmurVGeOslf+Ikf1asZpe5SN54nMIb65cs0y5xhENBwjkEYMzp5EkRIlpeZzMxy79/5Pqy1TPpD3e49oHq9Hp1O53l9fu/Zv9w3f/M389nPfva6773zne/k1a9+Nf/0n/7Tm4IQgCRJSJJkrw7pjrMXuRfdP/sciSspDBRVn0HGNsWIo+XHgKFvA5ouZ5P4GZPEbiwLlul9GUTbKCulgJvPE+NJzKz3ZEGAwbIdd3BBE8Zj4jjCpynpxjpf+Jn/TGMyJCody2HIEydOcPa7vl1XRw6pPQtEWq0Wr3vd6677XqPRYH5+/qbvqxduL3Ivdk8KSQ3JJ/RsncgYAimJfIEHaghDPDWBstli4ZUvZ+kzX7gpGLqxLDiTlGAyICyGEAZIHGsbZaXucjeeJ0qJGYdHCN369BYlqS9Iu1uYsiR0JTMI2eYyQ29w1mAK8F96kqWf+QXg6QR6V5Zc+cwXWP384yCGo69/FSde/2pdOTmA9F/kENqrluw7J4Wi3sAPDCITUrG0KDDVggYGCLxnZAzhy8/yhz/+U7cMhq4tu5OyZGb7EnVXYEXwxpBbyyR65hUVpdSd78by3IaskZbreCM4DO2yz6x0GQA9MSyIEFoQwBlY82BCS12Eo4MuT/2XX+XUV72Wz/3332L4P3+bY5MR8wgew8b7DU8ePc7L/tb36MrJAXNbA5GPfOQjt/Ph7kh7OcJ696RgDeMgYr7IaBpHNH0IQ3UCCEUoxCCf+zwRVMFQFGH7A9KnnuLyz76X43/7+/CLi9SeeopaWRCLZxIEOCBAmC1LugKTrlZGKXW3uql9QFojs3aaIyJ0xOMFNgVqgaEp4IA+lqx2lEa2yaAsySNL6ISF5Sv80Tv/IfdOBtQtmMDgBQqE2HuaV6/ctHKi9p/OmjlkbtxTdUQ4IuDFtzzesTuDYTAgi2NaxhMZIfeQC5QCXoSJQCywMOyT1xtQFNSvXmW2u0mzyDg+2Gb93/9/qd/7chJX0PaOiTV4DCFCzXl61pKHIZf+54dwZYkrS5Y+8wWe/OgnWPrMF3S2jVIH3Ev1nu088FrEGJKrV2E0YtTu4IGjpSOkCkJiG9AQw5XG/Qx8lV+4UTuFrR3huPXMe0fDCMet55XTIEQwFAJiIDKQWqhbz5FBlwu/8Vt6jjlAdGvmkLlxT5VpM/ad1F+JY6KtLS49UvVueSEJrDs9RS78x//Cic11rAGPITZQBDXEO0qfEwH1wFOKgbVlsClePJPA4oDSe47kGasf+RiYgM3QkHiPJ6A0EYPQMZ6ZRQKLXV7h07/2m3Q//XntNaLUIfFS5Khdex9xnlMvcxprK4zCmF6tTlkWzEuBdUKMMMYQlX0KGxCJp10OSCTDCRRAy1QfaOE0CMGAmISrtdPU/Jgj2RIgNMUTXr60myivDRf3nz7bh8yNe6oB+W4QQn9Ae2udlivZ+Mjvc+4Tn7zlyeG53njGezpGiGyVGOIERsTErkdiqzc4gBeoI3hr8a4glwBvAgpryMRTKzIi51ifO8IoLwDIGvNEKRhjwDnirU2GD3+ISOQlzXdRSu2NlyJH7cb7mMzMMJlMSLa3cVFE4y+/ldf8+a9m42f+I9tfvozBIF44O7mEBQJjaIyfQATGQIghMQLGYIDc1lmqv5JWsclWcpLTw88j09+rW2hkE0ZbXW24eEBoIHLIPGNL9v6AmY1VWt6zGUZki4tQFDedHJ7tjQdw8b3vIxkOKKkCEDD04yO0ym2ioMoR2VGbJo15P0CAXGK2gpCaH+GNJbMBi64g2ljDGYM3hvFkzGhmBlotmEyoFzlDDNnx4y9pvotS6qX3UuSo3Xgf3sZYn4MxZM0myWBA908/i/26NyK9Pq/IxxipGgBYU+UT7DQEEAMmjFhpvJqZ4Reo1mQBYyiCGpSGZrnNTFFV4Xiq3285z5WPfQJz7hyRc3oRtM/07H7I3Kolu8RxtRLiPT0bMJidgyCAPKeo1Uj6PS78xm8hruTSL/7a9VcykwnJhQss/fv/iItCgixDBGqm2u4xRmiUPa5NJzLAxNYpCElcDy8QGKFjSibXJKamZY6lepH1g4AAaJY58cY6215IBn0AstlZjLWURIAhJNdeI0odQDfmqJUkVO/ZyfN+z954H71shvmNc8zkm1iRqtfQl57g/P/+b6htbtLAY0y13RJSBSPTRVk8EOFolFtMbwRA4ka8uvsJANKwz0p6D8fG56uzmIGG8Sx86k+IgM0wZuwTosDqRdA+0Wf4ELpxhHW0tUXLlWyGURWEYEkvL1F3OVYEENyXnuDSe9cJ83z3Skb6ferb26Qup5WNSYwwFkPdeIyNcUBEgTcBgVRbKzsngMSPsIREVCcGMETknBRhBYsRqImwPf2NjisZG8sosDRLx5HNNbbqTUYisNvELsCSP/2HvsgR30qpvXFjjpozMRPbpuOWqhs8j/fsaKuLLQp86QhWVzmRbRD7MR7BT08wR0Wwm+t4qsaKRdCm7Xo3BSFVfpxwJFva/T7AE62vYSZfYTG7QOKHjKWF8HTlX2IhdUI3sDRcQX9Uo2BC1LB6EbQPNBA5pK4dYX3pkU+y8ZHfJ1tcxIwn1LbHJBJS2hKHIRDHjPPUtza40qlWH1x/THNrSMPlZEHAWEraBhLjEanerJMwJSqLaqsFSz+YATwtV5XcliZAbEDks90tm8BA6iHwJYFUtf7jsEnHT1iQgtLBhGqbJnjwz5F/+rO7+S4hk+v/yK9gxLdS6qV3Y45aLAMCd83Fw/N4z/aXrtKejAjHA5oiJAZKEZwJyW1C6kfEBsw0tHAEfKn95zg6eYpjkwu75xoDbNROk7ox7WIdj2ElvYej46eouy4ybX4U+4yjkwvAdCuZ6oNvJhC8F3qBZXayRJY5svpilb+mF0G3lZbvHmI7I6xPP/Q1FLUa5Dnp9hbNsktJRoll2lSdyTSjvDYcIM7R2F4nwjIKQjyGRKo3tscgBqwUu1UupUnoRfOElIRSPn0iMAYrHmD6SGCNYcZ6juKp4zEChbVsWsO6CZhgyA2MwpiFN76uKhfu9RDvr/vbdkZ8y7HFFz3iWyn10tot8Z++Zw1CSDUf7Pm8Z88/8ii93/t9rAiRsHuGqlmQIGUzPUO8U/0yLbuNjWM+u4wjxrBzngrYSE6TuiFWqryQ6kwnFDbh9Ohxjk2eAqrA42p6L5lJqdaHYTNawGNpGWHGeyLXp1mOYDyuDlQvgm4rDUTuADsnh2Rri9Tl5EEAGCbhDLlNqHnPMAgZGWi4AtvtUXcFwggz/R8f1CgxWKAUgzFQKweEUhBKxkyxRs0NSP1o92RQ8xMCKRkGLUpT9TKxQGgMkTFEGMaBxfoJYMhtSC+ICIF6mZN2Wpz9zrdRpnWitTVkNEKcQ0ajaiiejvhW6kDZyVF7Me/ZnSTVcDxmfX6BfNrbwxpwYqj7ISfyZSzspJwC1TllPl+hH88zCGem3/OMwhaJG1MvtxmGVcBwbHyemh/t/q5QVfdZybnQvJ+xqSMC3XCGQTRHYCBCqImv8lPKUi+C9oEGIneAnZODiyIaXqgGKnvSYpuZYkBuLIPOHMMgJhUweZU7svNmt1ISmJCBVFcP42iWkW0QSL67r7rzdS1Dlb0eSUkoBULV9KycLplERmg6Ibc1DBCKIxDHNUsq3PPQg5x55/dSnDyJHY8JNzZ0xLdSB9iLfc9el6TabtNN51lN7yUyhpqFrfo9RDhEBCdPn28EwzhocWx8nsLWgGr148zwMRI/5mr9FTzVeB1b8eJNj2motouPTi6xMLmIR7hSO8vMZIm43MYDRoR0miSLc3oRtA/0Wb5D3PPQg2w/dZHB//h1at4hGLzx1QjtmRlMq8W4KMj6BUmeVUuqImAcqfdE9OlhsAKtcpsQwZobQ4/r7cQTiR/jAWcSHGAlQ6YJ7DPGUThP6rMq4x0hw9K3lklvsHvsO/ku2lRIqYPvxbxnr010rRU5M8WIIErx06udtOwBhtgYShMwDlJSN8AgrCcnKW3MPf0/u+l+W8UmBSGX66+m5kbUXf+6nwtVK4IkX8NSbT8TREgxZgJkWNpGqrJg56uA6u3aR+R20jP9HeSBv/ZX+Niffpbk0iWyZhOiCGq1qkLGe0JXsrx4HBpNal8+R8c5htYwsgGRc8yaKrSoskbAS5Uudm08UhLgbEjk86q231SlvBObIOKwCI2yKgHOBFJjmPd9cqrl1olYBGg6R3/p6u797uS7KKUOhxf6nt1NdM0yOvmIRjmm7i4CBjEhqRsQ+zEYKGzKZnKKk6PHMQiL4/Ncqd+HNyGjoE3dPT2jqlVs0Co22Cq3dhNUdwhMe5CY3eX/k5Onqm6sIkywFMCmGNa+5k3c+x1v0YugfaBbM3eQIAw5+13fTt5qE02mFSgi1+3fnv3ev8p93/tddB/8alZqdTIbEntPaaotldjINKHLYE3VMGhsUyZE5AJOPBNTA6q1U0dV2lsvtgnEU3cDDFACkTEIsC6GdRuyaUO2wpDCVEFJ988+p/MelLpL7OSy1Xtd4uEIqIZnFkHC5fqrWK+dBqbltX7ITH6FnXXXTrnJa3p/hDcBV9N7mdg6AKUJudS4H8Eyly/TixYYBi0AxraOx4IxBDCtxIHMg5+evwoDzhiuHDvBm//J3+fkG16jQcg+0Gf8DnNjjxE7GOCDgPLUSRqvvI+LH/zd3a6qYqBZFiTAyBjGBiIRBEMy7Q3iBEpCIikYxAt0ik06rru7LSPAJGjSlC2sH0y7scLYQ8NWOSeFNQzDFpEUNIsRhQ3oNtrY1VWt01fqLhGEIWe+461s/7v/QKvICHb6jwmEUtDMV1lKX8Hc5BI1yagXvWpF45pFjsLGBJITSlUybMWRuj6FjcmDOv1wln44y+LkApmtEficmWIFj2DFTIfoCV0Tkic1rAhZs8nL/tb3aACyj/SZvwPdav920u3d1FXV9nqE4xFjYLPRZhK2WewtAyVHxBOYiEnUol1sYIG4WMdOQ5CdxkBWShrFFrLzDargo8TgxNMTi7XQdBOMzxmEcTXwrp4Sbmxonb5Sd4nzjzzKxff9d85mE8JptYynyik7OXoCAxRBA2yEcRkeEAR7TZr8bL5CJ1/DstM2QFiYXKIwCZvxCe4d/ClOLFcbr2Qmu0roJiAwEcNWKRwJq3yRdYFGnFAeX+Ss5oPsOw1E7lDX7t+6suRj/+wnb5oPIUFANm0clEzGFM0akRTkxjDEMC85SbFRJaBSvel3VkIyW9XkJ64q50Wq7HQjMBKYAKVYrrZn8PV6VRYXhpCmVcOg0Ujr9JW6S1RD7n6JIxtrZAY2raHj/HT71yDVvDpmJlWH1p1qPaY/45qOqgZhO1pgpljbvV0kGcfHTxKII8BxfPQkVgoQWElOkUyuMMGzjmHcbPCZeIa3/s2388Bf/FpdCTkA9F/gLnDjbIfMNLDiCcNqOJ0Vz1GX0+yu0ECYESHbPRNUL5KdYVGeYPrm90Q+p9zpfihCjKGkuvq4ungcYwxxv4+v1cA+fV2zU6dfnjqpdfpK3eF2+ock/f405ywkxyBhQt0XNGSCneaoVT1FqvJdjyFGWA6PEItnxm1ijcEDvfgoiNBwXcBT2qrCZkckOSJVLtowaJGZOrEdMApiwmajqsg5dVKDkANCk1XvAjtlc0znQ0QyJmIMaUppLDMINQSHYdME5AJ1U9XV575aBSmCOv1whn7QYiQB4jJA2OmHmouh5+HxqM5KkmLbbc5+7199xsZHLghI7305y489qQmrSt3Bdi+EkgQrsJWcYmQS1sImV5KT142H2IyP0yWhELBGyAW8sfSiGQa2UW0HI5wYPY6zIZvJMXKbspacmtb6VQzgqC6o5gdfIHIDShtQdDrkAhLHNBvpfjwd6hY0HLwL3DgfwvJ0O3XZGacNeGMobUAPCMQRI9XSKWDdiMhNiPCMPMQWCqmSUl2Q0I0a5IwoTpyA8Ri7ukqt0+LMO7/3usRZ6xyBK8EHFB/9A8794Sd4YnGRs9+p+7RK3Yl2LoSKegM/6GGNpQzqJMUWmC5DgcALpcAgjnBhh3qxhhcYiqGRr+Fjz2pygmR0DoDIlNTyTYZEBDKiKVcYepkm2VeTwydiKAQGYuhHEdnMHDMBfDETFl51LydPLOzfk6Kuo4HIXeDY/ffxxOIi0dISRZLs5ogwHhOJZygGb8DZhLBq/8OqDcGX1Az0/HRbxngaBpq2+u+xVG94fFZdfTSamH4fMZagLBltdbnvL37dbuLs0h/9KZPffwQxhqLTqVZo8pxoaYmL730fgAYjSt1hdi+EAss4iDg6/DKjIAQMgS+qKbrTvLS8uMqgPYPdtiyIm86VAV/2aLkJYiBEqvYAklGTgoEIXdenERgueajVUuKyoI9hE0vYalOr12hOxiyPHJdOnOEdb3kIa3VD4KDQf4m7wDPNh7CjEQ0vZMayWm/TazZZnznKxsIipQkYYsikmj1TZbdXKyA7Tc5CBDGwjcXgmRl0ObKxxsL6Mq3xkP6V5d3HP3b/fQzPncd6R3H0KCZNMUGASVOKhQXC8YiLH3hYt2mUusPsDsrr9xl1OhTGUnclAQ5nBCPVuWRiDYPZeczMDFuzC0zE0jBVr5GJgUwmFF7wUiXFe6nySYYYktAwEYOPEpajGo8/8FWcfMs3cmxxgYYrKHs9njIRqw+8kXf8nb/Ofa84vd9Pi7qGrojcJW7VXyTKs2nGuWdu1MebAeMgYpLWSX3JxFi8ePpxijF1juYbhEYY+moqZl8sq0lKs8ipeU8WBDigWZYIwtaHP8r5s6e556EHb0qYdUSAEFBirKVotbDLK9pXRKk7zM6F0MX3vo9oPKLbbJOOBtRdQSowxjDBMgkTCALEOYhCNsOQsCwIDDRdSWkMV2wIpSeiqsyrW6hbw9jDKpYrUcrKiTO8469/FyfvOcmx1TVWLl5l6IVXnDjGyVOLuhJyAGkgche5tr/I0h/9KZOP/QGDIiMSGAcBAdAscxr9nHCavNoPE0YnjiFZwTBfwHqHN4Z6v0ceJcxMhoQIgyDEilD3nokN2J6dJ5qMufiBhzn7pgeumzMBUJqYSMZPH1ySYAcD7Sui1B3oxguhPKmRS8Jqq83MN3w9ndMn2Zg2W9xpwji89xWsvOJehn/6GeLNDYwIRZww6Mxwrj3PhdVNtpdXsVmOj2Nmjy/ysq96Le94y0O7Kx7hsUVOHrt5GJ46WDQQucvsbJM88Uv/g0iEjbmjzGxtUHeOibWMgoBOWdLAs2HCamBeEGDqAdRrVYb7aMTENwne8Hr8H/4BXoTUOQob041Csk4T02pRROHuKseNCbOJDK8/sCzTviJK3cGea1Dey77+q275M1d+903fN9aydGWNXn/IYDim2UhptxqcPLGgKx6HkAYid6HrtknSlI0goba9TbscYr2nMJCJZRwE0Ghc97vX9gA5+sbXsvLopygbTRChjOpILSU2Vfvla1c57nnzV986YRbtK6LU3eLZBuU908+e6funT+lKx51CA5G70I3bJLZZZ9xoUYy3oXRgDLXtLcokIVpbo2i1IEkgy6qAoV7nzNvfRtKs46MIwirpNAIgf/qBrlnluG6f+FnuUxsMKaXU3UXP+nehm/uKOKxxUK8mWspoRF6v0/rWb6b7Z5+7aXjemelsBleWL2iV49kG8p3ReQ9KKXVX0kDkLvSMfUW4PoB44Lu/A777O55xT/fFrHI81z6xUkqpu4ue/e9CLzSAeLZy2hezyvFs+8RKKaXuLhqI3KVeym0SXeVQSin1YhkRkee+2f7o9Xp0Oh263S7tdnu/D+eO5MpSAwillFIvqRfy+a2fOHc53SZRSim1n7Tzi1JKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3exqIvOc97+FrvuZraLVaHD16lHe84x08/vjje/mQSimllDpE9jQQ+ehHP8q73/1uPvGJT/ChD32Ioih461vfynA43MuHVUoppdQhYUREbteDra2tcfToUT760Y/yDd/wDc95+16vR6fTodvt0m63b8MRKqWUUuor9UI+v8PbdEwAdLtdAObm5m758yzLyLJs9797vd5tOS6llFJK7Y/blqzqveeHf/iHefOb38zrXve6W97mPe95D51OZ/fr9OnTt+vwlFJKKbUPbtvWzN/7e3+Phx9+mD/4gz/g1KlTt7zNrVZETp8+rVszSiml1CFy4LZmfvAHf5APfvCDfOxjH3vGIAQgSRKSJLkdh6SUUkqpA2BPAxER4R/8g3/Ar//6r/ORj3yEl7/85Xv5cEoppZQ6ZPY0EHn3u9/N+973Pt7//vfTarVYXl4GoNPpkKbpXj60UkoppQ6BPc0RMcbc8vvvfe97+YEf+IHn/H0t31VKKaUOnwOTI3IbW5QopZRS6hDSWTNKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jfhfh/AncKVJcuPPcloq0t9tsOx++8jCPXpVUpEGE4KSucJA0ujFgEwGOf0xzkA7TSmkcYYY/bzUJVS+0A/KV8C5x95lAvvfxi7skJQlrgw5InFRc5+59u456EH9/vwlNo33eGEpbU+3dEE7wVrDUkUkOWO7eGEonQIkIQBR2cb3Htijk6jtt+HrZS6jTQQ+Qqdf+RRLr73l0j6fYokoag3ILBES0tcfO/7ALjnoQd1xUTddbqDCZ+/sEaWl6S1iGYaMcoKLqx0d1dHkigAhKx0LK31GWclr3v50d1g5FarKbpqotSdRT8JvwKuLLnwX36VIxtriAGbT/CDHuMgYtTpEI1HXPzAw4hzXPzg7+qKibqj7QQNRenYGow5t7TBKCsJrWVSlPSsYZwVZIUDwHlHXjisBWssIGz1x1xe69GuJ/RG2U2rKZ16jRNHmoRBoMGJUncIDUS+Ap/+bx/k2MoVAMY2ZGJTQnE0yzHJ5gbdZpvo4iWWfv4XibyjaLfxcQx5ftOKiVKHWXcw4fzyFpu9McNJziQv8QKhBRsZ8J7hpMQ5wQAy/T0BnAePxxgY5yUrWwPm2imX13pkuaNeCwkCi3Oela0Bl9a7pHFIYO1ucHJyoaVbOkodUhqIvEiuLOl+5A/pCGyEIQaLmAh8zigIqbuSdDQgdSXDKGF84gTGTouU0pQiSYjW1rj4gYc5+6YHdJtGHVpL6z0+/9Qao0lO6Tylr8IMA3ggLx0iXBeA3EgAmf6v3jDjylqfLHe0G08nsBZeyEvHJC+xxnB0pobzns3+mGGW88pT8xqMqENHt+01EHnRlh97knDQZ2IgAByG0I0xCGDIAkvbFQTAVhghgyFFVMcmEaH1GGspWi3s8grLjz3JyTe8Zp//IqVeGBHh6mafz3xphUlRVq99UwUcMA06BATBebDPc/dkUjg2+kOOdBq7QYiI0BtllN5Ti6ttmdJ54iig3bD0hjlLa33a9US3adShcctCh6NH6bzxdbROHr9rApM7+6/bQ6OtLgCDIKbpCjaSDqEvML4kFI8RT1M8HpiZ9LHjAeOwjWPCeGYG02pBkmAHg937UuoguzZxdJwXbGyPuLDaZTjJscbgpVrvkOt+B9z0G/6ZlkNuoTfKqScxtViIo4Ci9GSFIwosxkCelfTHGXEREIcBaRLQHU0YTgqaafzS/dFK7ZGq0OF9ROPR7ra93d7myLknCM49wXqUkKfpc+YT3gkrKofraA+Q+mwHH0VMooi432M+28QBNRFqRkiAyBpyLxgPozAgkCF150g21tkGCAJ8EFCf7ezr36LUc7m2DHeSlwwnOYihdA5rDd7LCwo0nkvpqtWWMAho1CIatRgv1XtpnBcUpWdSOIyBwFrqSUQSVSslSh10riy58P6HqyBkYaHatu8P6PR7WAQjYL3H12rPmk94yxWVhQU6X/UGWieOHZrA5GAf3QF27P77eGJxkWhpie3ZedrdTY66gtAIxXSDZiTVls2sEQrxOBMyDAwNV1Lf2qKIY8rTpzh2/337/eco9Yy6wwlPXN6oEkeTkNGk2M3nyJ3Hv5QRyDWcB4OnN8oYZyUgjJ3gpsFGaKt9IO89/XFOVgSM84IZNE9EHRzXrljUWg0whtXPPk50+TJFu42xlhEdZreXiMQzCiKseFJfMjKGYmHhlvmE5x95lIu/8EvUhwMwBhHB5jmtLz1B+KUnWI9qz2tF5SDQQOQFuHEJ7Mx3vJVL//mXSfo9AucYi2FiDDUBY4QtE+AsdCSgjaUrBd4EFMYw53KuhnXOvP1tBz5aVXcvEWHpmsTRovTkpZs2JSv2LAjZUfqqyqZ05U0/K1xVaSPTZJQCuLrepxaFRGGgZb1q3127YhGPx9TLqpOwt5ZFV7KZZ4xn58lqR7G2ThYUgGEQzRETE/b7lEDRbF6XT+jKkpX/9huc6a5R90J8zYZoidAXi/UOn6aHokJTPwGfp1stgbk0JShKWsWEowjeQCSGraBGhpCZ6sptICEdyWh5T2483hgmGGp//qED+8JQd49rcz8Ca6oPfi+EgQURuqMJ9VqImeaBiFTJp+Pc3Z7je5bvi7AbjJTOc2F1m/44r5JY04S5TkoaR9pvRN121+aAuDCkVubE4kHAiuCBtitINtYw9QlpPiALDKE4ZsscGwYEwx7FeMDEhrjA7uYTrn7045y4fIGmd4Dhcu0e5vOr1P2Y2Bo64inLghE844rKQXLwjug2eKHJPc+UVHRy+QoCrNdSomxMbiAGrHi8CWi4jDZCYnICIDHQF8N6GFGkDe792j93u/5kpW7p2tyPLC8Z5yUGSOKQWhwShwGTvKQ+nQ+zk5SaZeVLmhPyFbmmNNh5oRaHeBEurHb58soWjVpMLQ6134i6ba7LATlyhPqVK8QiDIIIARqu2t6sLl4djckAjCf20PKeutliXHbZtOARWmWOOEN/6SriPe5PHqXhHYIhFxBxOBvj/BgLjKJ5YleQD0Yk9fqBr9C86wKR8488yoXf+C2ipSXCsmQlDHni5EnOvuMv33J14lZJReI9teEQTzW+uJFnZMZQ2oAMQ93l1D207HRZefqCmwg0jadRTniShuaGqD3xfNuiX5v7EQSGSeFw3ldbHXlJGof0RznDSb4blDjvKUtPcQCSQq1hmiNSBSIWQCAvHKOsQESqk70XkjDQfiPqtll+7EnsygpFuw1ZRigB3WSeqOyRB21gQNNN8FhC8dRcSWYti95Rs4IXQ2SEeYFCpHpdA9t/+jnKb/w6/MXLu58tpYkYRvMENqJedhGBPEhwQQM/uIoszB/4Cs27KhA5/8ijLP3ML3Bk2JumkwIFyLknWPqZZeDmPbRrX1DGWiamRbi9zFGXExjBApFJGJiIRtmjF0Zk1nJEHBYoqfJFCjE4DAWQINRHA1xZHshlMnV43WrI3K1WAq7N/WjVIzZ6Y5z31KIQDGS5Y5SVzLdq9EcZy5sDwsCQFwcjCAF2m6TtMtU2zTDLcV5IogAvVRM0AdqNWPuNqNtitNUlKEt8HGOGQ7yJ6SbHOVL2yIM6WdigPrrIdr1Fko2ZdTkIpNOXZC4wDJskCHN+iIiw6YTZLz3B4x/4XWbEVx2rDFgpCf2Ey41XszA+T4DnSHYVL/BUaciynMS7A12haff7AG6XnbkwC4MuFiEPQsZBRB6EWISFQZcL/+XXcOX1SXE7LyjiqjdB3FtmsbtMjSq4mGBI/Ii0HNLA0y4LYu+JgLGvTvgTU2diUgIjTDCsUTU7++LDv7cPz4S6U+2scGz2xyRRQLMek0TVSsATlzfoDie7tx1Oit3cj9LJNT06DAZDFFqyomSUlRhb5V9Mcof3ByMIgWor5trtoSCwRGGIc7L7t1hbVRN4EYwx1Gvhbr8RpfZKfbaDC0PIcyQMSVyPheETADSKNZrZKt4YylaD4fw8y7UGttOhlOpzJbWQhCmj2nEcUGCILHS8Y/CxR3AilBgQiI1wavgFXr39+0R4wmlTQWPglPW0V66Qbm0ixxYP7Cr8XROIXPnMF5hbXUYMjIKI3NRwJsFhGQURYmBu9SpXPvMFXFmy9Jkv8ORHP8F4cwtng+oF5T2N7U1CETLAINQRYgOpKYmMoWM8M/iqvbWBLpbLtsFy1GDThnSDiEkQEAKjlbV9flbUneLG6pYoDLDGEIUB7UZMllfTbWXadKyclt0GgcVPP6jtNSsEVW+QqnRWfLW94b3sNic7aASQaYKtiFA6T5aXZLmryhqnf1sQWLwX7Tei9tSx++/DLy4S9XqI85TG0nAZgmCkpOkmjIMIkoRoMMA0G6SDPh5DJlBisOI5OrkAQY3EGmasIbWG2WJMNs4op3uRXiAxwkzZJbims/HYCyMPR52jmec0Xnlw+4kczKPaA6uff5y2eHrWUrVgr2GBYFpOlRlD23vO/cpvcO6Xfx27toYtqo4gjTzDjYcMZ+dIXcEosMTOE0QtkBznJgRAbMDuNnmvlo6HgSWSPjhwJgAg9EIJ1BcX9ufJUHeca1c4btxyuHEloJnGhEE1MM45jzVmNwk1mP6u94LzwijLcAcmK/XZlV4YT7dlpvEWAkSBJctL4iio/l5rqoogpfZIEIY0X3UfwblzLA77ANSM0CxzxhhGNmCS1onW1ylqNVIRQnE4gZYFsQHLtRPUh3224uPU3IBOsYYVISo9aa8HAYQGLFKtDmKoPt2q170FUmvYCGO8tWRPPHlg0wEO3hHtFdk5OVf/NxRH6MaE4mh4R4pQs3DmiccZWcN2UiMtc1qlo4YQGWG4uoI10LMQItT8iEBKxFQviB07ra5bFsQ7Nq2hnAYhgqflPWtxwgNv+QssfeYLh7o1rzoYrl3huJUgsPis3F0JaNQiOvUam/0xrXrVlXQ8HSa3kyOSF+7QBCE7ihuWbCzV+3F5q9pnFwxzrZTGtApIqb1w/pFHGX78j0nCEO8h8Q7x0DBCTTzbtsrFKk+dpPPA6yg+8EECL9WKfdgm9ROOj84RScFMsYaR6n1rgLYVAoSxq3IQY1NVs+0EIKXAhqkzCBoUZpPRyZOQZVo1cxAcff2r2Hy/pe4c/dASuyGhOGa8I0CwIjigNIaaCK/MhngMpQEHGIGO9YTG0BIILBS+rH4WJJR4rBQgMBRTlesiNI3gnGMjFGrO0xJhjCF77Wv5xL/86etb8x6CDnjqYLpuhSMMbvr5jSsBxhhOLrQYZjn9UUGaROSlZ1KUiIfClbtTdA+zILDVe7X0rHZHnDzS4uRCSxNV1Z65ttJycuIEE6A/mUDpEGOodbeZzM5x39/9Xznx+lfzpd//YybZBKG6oO0FDbKww0J2iUE4Q80NsVLSC+dplxsYIBeDp6qosVSrIt3kGGExwJUDXBRRxh1mxuvk29tkQUh9OOTL/9f/ZOmxJ1h801dx+mUnsfZgrAzelqP4d//u3/Gyl72MWq3G137t1/LHf/zHt+Nhr3Pi9a9m4+ixqtzWldjpSkhYjQfFGMjEMDFViS02pjAxk2lCUGirJyvzsrunHhlILBQ2Ae8Y2habts3IRDhgw1sKgaYRTjnHwrRSZxSEtD79GdILT+HrdYojR/D1+m4HvPOPPHrbnx91uO2scIwm5W4eyA4RYTQp6dRr160EdBo1XnlqnrlWCkAtCgisJbAGma4gTitkDxUDRIEhmG43eQFrq+2mhZmGlu6qPXVjpWUZNCkaRzHtFrbVJJubI5iMsUFAEIYs/c7vcdwIdSvUDETiKU21RtALZ9iMj+FMwEZygtJURRNOwBAwSRaQ6YpIZhu4qEUnNCz4Li+fnGPOCKd7W7x+a5V7fcbZP/sUs7/ya1z4x/+MX/yn/wdPnru0n0/Vrj0PRH71V3+VH/mRH+HHf/zH+dSnPsUDDzzAt37rt7K6urrXD32dIAx52d/6HlabHTzQ8I4mVZ8PD6zbJptBm4ZUUenQ1AhxINUJTabzzR3VzwPz9FTRhusR4qn5yTTJqOqaNwwtXbGMPKyK5Ym0zfmTZxADHV9SK3JM6TBBgElTioUFwvGIix94+KbqHaWezc4KRxIH9IY5RenwIhSlozfMSeLglisBnUaNV5+Z597jc9x7Yo6vuu84D9y7SFqLsAbCoArSD5OdxmaBrY49DgPqtYggMIQH5ApQ3blurLQsiBnbmadvkCRY5xhtdfnkL/wqpx//ApF5Oq8wkQmpGwBQdwPGQYNASo6PvkQoVbXXOJ7HBym92kmsjaYXDIKVssqLMtWWTUh1IZwYIQBqAdSt8ApKvvbJz/M//98/eyCCkT1/V/6bf/NveNe73sU73/lO7r//fn72Z3+Wer3OL/zCL+z1Q9/knoce5OTf+3+yfu99dMOYXKCHYSVM2GzEBDKmZqp95JrrE+CQICaLZ/FSJQMFNiQ3ARMxBNMT9M7enJGclutSk5J8mrRaM4I3hs2ji7hjixjniL1nOwyIREi3t3avYI21FK0WZrqXp9QLce0KR1Y4BqOcrHDMtdJnbOLVHU744sUNvnR1k4urXc5f3WK1O6rKX6nKYw/jDo0XyJ3gpqXJk7y6qEiiu2Y3Wu2Ta0t3AeqyTdtdfvoGWYYPAuJ6Dfc7v1tt3wu7K/ONsstMUVVUNsoec3nV46omI+zOqnrYYhg2OTP8PJFUF62dYo3EjQh4eiUzMBBRXRhnQnVBLYZCYM563rRygYd/5w/2vSx/T9+VeZ7z6KOP8mM/9mO737PW8i3f8i18/OMf38uHfkb3PPQgZ9/0AJ//zQ+x9Gu/TpnW8Z02GEMx7BPK0xnHpYnIgiZHi3WMqZKATBAzJKBwQ+ZFdsulgmnFTAgMBAbWUneewAgb1iKlI5uERCXTfBRLFlgwKcXEEKfTAzzgHfDUwdZp1GjXkxfcWbVeCwkCi3Oe4bhaUZFDGoRca6fpU1F6kiggDnVFRO2tayezF0mCsXZ3e1O8J+r3KU+dZOviEkeKjNIYQjyb8QmMEcRYZvI1QimIJCcssioX0ZjpfCVhcfwUXiyRfTqA2EqOMze5fnWjMDG9eJ5OdpVudIRJ2GJx8mXGrvrwPx0I/79PfZqlK3+J06cWb9dTdJM9fVeur6/jnGNx8fo/cHFxkeXl5Ztun2UZvV7vuq+9EIQhr/0rbyE/c4Ygz4DqH3nYaFPNPqyWtvAFjXydgOlQLYHEjwnLAXWEQbxAQbQ7lMtQtZ0OEJrOk1IludZEOLq1zrGVLzOzvYqlWiZzWOplj6i45u+cRssHtQOeOviMMTTTmJlmjWYa3zIIeba+I616TOE8d0qnjZ1tmnot4srG4KYcGqVeSkEYcvY730aZ1onW1pDRCF+WsLVFeuUKLgg4/e1vYbK2QcjOeAJDL57jcv0+1pIzXGq8evf+zLS8XqT6HBKgGy/Qjausw512Ed3oKCvpy3dX6AXwJqAfzWMw9JNjXK3fR2FTDFTNOA20+j0Gw/Ftf56udaAuD97znvfQ6XR2v06fPr1nj3WrF0s502HDVG2hBQio8kEmslMmVS2hBUDdQKtYAymYeJj4p3NGEltVzAyADEtuLKMgwBtH6koSgUZZJcwKwLRkdydaPsgd8NSd4dn6jpROcM4fuiTVZxIGluPzLebbqXZVVbfFPQ89yJl3fi/FyZPE3S4Lly5wfHuDepERFDkXP/ghytGEErDTUQWnB5/nVd1PcHbwmaotxPS+PNVny9gJ+bTzqpGS0AjORLuBxyt7f8zi+MI0CDFsx0dJ/Jizw89hjdDyA84OPkfkx7ur/iIgQUizkd7y77hd9jQQOXLkCEEQsLKyct33V1ZWOHbs2E23/7Ef+zG63e7u16VLe5tEc+2LxY7HRJubDJIaV0zMJUJWTEAfw1hMVeILlAgtKyTTZCAJangbUQBjEXoeer5q/d6LYtbCmEgEg8FjGQbVbSNgxnlya5E4RkYjorU1ynqdM29/m/YTUXvq2fqOZEVZDb/bh+N6qVlTVRS10kS7qqrb6p6HHuTMd7wVF0YMo4TlmSNsnD5LMTNbbds89gW2gxCL300Ij6VAjOXay4CdoCEzlnFQZYm0iy0a+RqX6q8mM3FV+isFqR9hgK14kc34OLlNdvNFjk2e4mixPM0hMaQGBl4wr7yPkyf2t7nmnn7axXHMgw8+yIc//GHe8Y53AOC958Mf/jA/+IM/eNPtkyQhSZK9PKSb7OSMLD/25G5jsUm3x8UPfohgeYUwmxC4krwssQgJsjuYqMojiTHG4eMWzWKD8XRwUcMIE1fQbbQJRgHeNgj9Fh7LxFpi7+kbSxYlRJub+CCgPHWSM2/XPiJq7z1b35Gi9NwJn9XWQBIFOF8NvjMG7aqqbhtXllz84O8Secf4+HHIMsxohIQhxZEjROvrDOster0tFkR2P4xTN+Ds8DGgGn5XTBudZUnCnMvJgNxXKQDBZAURP50AWX0wOYFhNMf85DKhz3aPxwBGqhBnJqh6kHzWJHzb3/iOfe8nsueX3T/yIz/C93//9/PVX/3VvOlNb+Knf/qnGQ6HvPOd79zrh37egjC8qdvcy77+QZYfe5LhxhZL/+M3Kba3yefnOXrpAjN4Aqols4brVdUx5QhjDNF0GweBWIT2cECv0SSajIi8YCmr0l5jGL/1rZz52jey+tnHwQhHX/sqThzArnfqznNtZ9V2wz69PSMwyautCztd8z2sMYk1hloc4rzgvCcvvHZVVbfNTj8RF4akV67ggzma2QYYYWxDsjgmzDMuvfLVRE98kVk7rZ4EetEcUbZJOc1nWvWWWhxjxwWhVIPtxMZsJIuY4fpuTsjOHURFl6TYRgzX5TAyvZ2IsO4si9/5bTR7XZ74xJ+SnjjGyVOL+xKU7Hkg8j3f8z2sra3xv/1v/xvLy8u88Y1v5Ld/+7dvSmA9aHaCk6XPfAE7GFC028g4g2iOvNwmkpJR0KjyRfyQaacREguBCLmAw5B6Rzke0Js5Qp7FGARvA6wIzXaDc7/yG7vdVc9/+KOc0+6q6ja4trNqb5jvVs2MJwWTwhGFdjrk7vBu0HiR3bkzo6ykUYu0q6q6bUZbXeLxmFqZE4twub6Al5xWscmxMsO6DCeGKxcvcqk9Q9DbrAIMDFfDRWbKnKQcMDKGPAyp9weU4bSQwoCTklaxRjYtx01tFWxMvLBmO8TBAMougRGG4SyToEkyvszEGDITkMSW5OMf58JHP0bmYaNWZ/iqV/PQO97Kfa/Yu/zMWzFygFPIe70enU6HbrdLu93e08dyZXnd9szO3JcnP/oJVn7+P1PUasxtbXACqfb0MPTjeeplj0Ty3agzswmZF2KfERgofNUOfiyGMYbcGIzARqsDcUw4HlO021Xzmzwn6vUo0zpn3vm9GoyoPdcdTlha69MdTaaD7jzDcUGaBKx3x4e6zbsBAltVEJ0+2uHUQlu7qqrb5tKnPsvm//6vicUzCCIESyCOWVcSIlgDBXCxM080GjGfTZiEhtAL3qQE5KTiqONJjLmusWDhhbEYCgxiquKIuqm6CI+kutComadXP7aCDr2gxTgeEScRc6MB0XjMubgOR46QGkjHIzZyx2eOn+Ed7/rrX3Ew8kI+vzUjkmpA0YX3P3zLuS/12Q64kqMba9QBu7t8JjTKbWK5oQOqgPdQUCW4JrZ6MYRUZbx2utQ91+8yihPGx09gdpbC0pQiSYjW1rj4gYc5+6YHNGlV7akb+47khePclQ16w3y/D+0rZi1EUcCrzhzhZYszuhKibq9rtjs9MEiOcXJ0mRAhM4Z0Wnjrk4ReWqdx5TKhMaweOUpaFhztZ7SdJ5gGFG7a9XunvcQwbCNSErgRAwEwWIQlZ5kNIRKhFBhiwfURk+PnTzLT38LkGT2BaGYGk8Q4YBBFLI4GnF69ym9/6BHuvee7b9s2zV2ftXX+kUe5+N73ES0t3WLuyy9x7jd/l8XJmKO2msAbGHbzQ4wvd/fm3LT5U+QzAqpRz6XsdF2tGtHEBryBlWabFCFwJew0qZkej3ZXVbfbtX1HFmbqpHHEYJKBgSQ6vKeIehLRqSfMNlMNQtRtN+kNGIUxmbU0XYm3TQhSHIYEYTU5xdAmGOdIagmXowRTemaLCXOuqGaiTbdqchMxMjUc088KYyjCBhvRUbrG4m3AphiGHnwSsdye44oJGE0Ht05swHhmlsQ74smEsvSMwwhfb7AdnJ2OMDEMkxovi2Dt8S+xdGXttj1Xd/Xl9rVTEouFhetXJoqCY+urNNeXqVlg2vxlp1NjIDAJUnITkLoBxkA3OkK96FP4rKqoCWoIQuZzNmyMBQJxYCwl1Wjo/njMpHGSmvQxTFdXtLuq2ifGGFr1ZNpuuvrwtubwdVi1BhY6KU60Skbtj/pshzxNyUmpjUacHHyeDkJmqmZivaBG3deRsOrlY+bmWV6+SjYuea0bEU3ffwWGL9VfR2kjFspVFidPYYEj+VXq3rAFbGLphglNY/ALC9jhgCyKqeUZHug22kg9xQ4HhFnGFjCemcMag5UCT0BASRmE1K3B5PltbXJ2VwciN05JdER4LFF/g9nNdRoIsYFCDJYqEdXZGqOgQafYIHFjnJ1m4As08k2Caq2EAEFsjHEDeiZgFLYJJKddDjBI1dZXBFuWpLJ1ffMo7a6q9lE1IM5izDVVgYcsEKnFIVkhzLe1Skbtj2tbvY9OnGDS7UJ3k9wYchuyMDrPIIwhnQOqZFPfbLJ6+uUce/LzJOIwGDIMg3iWyE+I3RCYVrIJZCKsNjtkrTadXpfy1Ene/M//MWtPfJnRVpf+0lW6f/Y57Ooq4cYGYqBI62w5g6/VSPC0/ZXdYw5dSeYFqcW3tcnZXR2I7ExJ9NMpiZYSR410e4tEPN5UlS+xgYlNyQQ6fkzDTXBmWt7oiyri9GDxNO20AytQc30G3pCFwe4QPY/BpXWy8ZBOWSI2uC4IuXYWgXZXVfuhncakcUhWlMRxQFRaJkVxIAbghRacf/a4KLRMm7UZrZJR+2ane/fF976PaH2dotlkEMS0ypxICorpdokx5unz/umT/PV/8H1c+Il/g1+5SmAgFM+rNz+C2IhEdkaSVMFIhsFGEc1ed7cZZlyrXdeOwv21v/J0IcZMm9knvsjo9z7OE70h8XznmtJ9oZFN+GIBC6+697Y2OburA5HrpiSmKQYhGm2QuoICQ5OqI6oDQj+hNDVKqQbcTTzT1ZJqquHIGKxUjWag6iVSSJUVXaUkCYnz0wi4RmFD+gFEwwGFNZAkkGXVi1G7q6p91EhjOs0aV9b75GVRTeGlWh2xVCfBMLBk5e3vMFL6aWtqqvyrNK76ROauxLlpE7M4IrSWM0c7WiWj9tVO5eNOMYQLLOLMddsljEbXnffj48c58Y1fz8av/gZ18dhpMIKvtlmcgVCq8vTcGCjdszbDvLFPlpvvcPSpy4y/cI619S3iVoO6NdTGI5Zzx6UTZ3jHWx66rf1E7upPultOSSxLrAi5hdBDYWv0bY35cosgMPSlQccPq1puqhP01ahGs8wxBraDkKYrKDFsJCnNfEKrLMDAxFgmaZ1ofZ2s0yH8ujdRPF5tD9nBQLurqgOhN8rwXggDO22HXlV7OUBMVYkSBxYvUOxDC1ZjIAostSjEWoOIEIUJYWipJxGBrRLAZ5r7Oz9DKbi5e/eN2yW3Ou+nb/56so/+EcnyEpGANYITM53yDpkXvmxj7Le9hVd87Vfttpt4PoJTJ1n4G/8L/uEPE3/6Mcb9PpkIy0md0QOv5x3fqX1ErnM7+ojsVM2E4xFFqwXOs7C+TCBQR0iM0MMSUs2HCWxIIgVCVRo1FsPVWoPs9Gk6b3wdrZPH6V9Zpvupz2DX1ognY+pFVQo5CmPyNEWOLe6+6J6pf4lS+0FE+MKFdTb7Y5LI0h/njLOyGgQ5bXAmIlhTJYFmRUnpZM9TSAJrqEUhjTTEOaFZj5lkjmY9Asx0cnB1Bdcb5sy1Ul5z9ohuy6gD6fmc993lJR77//x72k9doGmrWWce6HvhXL3DPe/+O1/RBat4j1tdY+XiVYZeXvLOqi/k8/uuD0Tg5j4irfEQK8J2XONl+ahKCpqW2dapTsRbJmC7lmIFWn/tu3jtX3nLdS+ka19otXYTRJj0hxpsqANtMM757JdXSKKAKAxAIC8dfhp85EXJ8taQmUZCHIUMJzlZXpKXjsK99KcSayCOApIwxIknigKyrCRNIrKixBrLbKtGvRbhnGc0KUnigFeemtdtGXXoifdMLl3m07/+2/QvLOFnOrzqbd/E6a96/YH/DNFA5EW4NnDoX1mm9+GPEo7HNCcjZn01MCuiyv1YDSJ6nTmiyZjy1En+wk/86IF/USj1fGwPJnz+qVWa9Rh7i9WE7mDC8uaAJH56W6R0Vadh5xylk5dkNk0SWZKoajsfWENZeoZZQRhYQmtZnG1QOs9mf4IXoVmLSOKQTr3GyYWWBiFK7TPtrPoi3JjQc/7saS68/2EGly5SG4+IBDatoVtv4RsNosFAk0rVHefZpvJmecn2MEMEAmOIw6DashFwXgjDADEenCe0lsL53aTSMDCU0xWT57ryadYijDHEUYChCnYmRYlINc8pTSKSKCSJq6Zlm4MJrTTmlafmaaaxbscodcjoJ+gzuDbB6PIffYrRpz9H0B9gvYPJRJNK1R3pmabyigjdYYZzjiQKpt0dITCWNDZkhSOKLE1jaNYTFmcanF/eZm276nuAMVhbBSzPxlBVA4TWkOXT4XviKUqPnQYn7Xq8O0rUWEO7HpMVrmoKpUGIUoeOBiLPYmeV5OQbXqNJpequ8ExTeSdZwWCcE4chnWZCf5TvBgrWGqw1jCclCzN1Th5pc2Wjj8GQxhFOPN5XuVVQxRDWGpyvVjisMQSBIQ4CstLhvBCFhiS0TApHXjgAmmnMfDslia9/3wWBxWfltMJHKXXY6Cfp83Tj1o1Sd6pOo8YrT80/PZV3+iEfhQFHOim1JCIOA3qjfFo1UwUAcRhweqFNd5CR5Y7ZVoIXzzgrCWODF2E4rhqj7QYh1pBEAXEY4ERoxTEgTHJPPQlpxyFRYBllBTONGlEU3HS8znms1VbuSh1WGogopW5yq6m8X7q6STD9sE/ikIUo3K2ocdN8kCSKuDzqU6+FWGtp1xOK0lN6TxRY6rWQ0aQaFhnaasUkCKv8kTCwzLVSwtDSHWTcc3yW2VZKPQn54sWNarsotNdtv4gIo0nJXEtbuSt1WGkgopS6pZ2pvFB94K93R9fnjkxLa0WEXu6Ya6VVToeX6wKW+XZKb5SRFQ4Rgw0MVgxhEIAB7yGNQ9r1hCQOKUpHHAXMttLdx7/VdtG15brayl2pw0sDEaXUc3qm3JEbg4HA3lx1k8QhR6KAovTkZdUArRFH9MbT+7GWaLrS8UwrHLfaLrLWMNdKtVxXqUNOAxGl1PPyfIIBEbll1Y2Zdj4dZzDfqnPiSJMnlzbJcke9ZhGgLN2zrnDcuF0UBpbGtNRXKXV4aSCilHrenisYeL4rJy92hePa7SKl1J1BAxGl1AvyXMHA8w0ydIVDKQUaiCil9sDzDTJ0hUMppYGIUmpPaJChlHo+tAOQUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9s2eByFNPPcXf/tt/m5e//OWkacq9997Lj//4j5Pn+V49pFJKKaUOmXCv7viLX/wi3nt+7ud+jle84hV87nOf413vehfD4ZCf+qmf2quHVUoppdQhYkREbteD/et//a/5mZ/5Gc6fP/+8bt/r9eh0OnS7Xdrt9h4fnVJKKaVeCi/k83vPVkRupdvtMjc394w/z7KMLMt2/7vX692Ow1JKKaXUPrltyarnzp3j3/7bf8vf/bt/9xlv8573vIdOp7P7dfr06dt1eEoppZTaBy84EPnRH/1RjDHP+vXFL37xut9ZWlri277t2/ju7/5u3vWudz3jff/Yj/0Y3W539+vSpUsv/C9SSiml1KHxgnNE1tbW2NjYeNbb3HPPPcRxDMCVK1f4xm/8Rr7u676O//Sf/hPWPv/YR3NElFJKqcNnT3NEFhYWWFhYeF63XVpa4pu+6Zt48MEHee973/uCghCllFJK3fn2LFl1aWmJb/zGb+Ts2bP81E/9FGtra7s/O3bs2F49rFJKKaUOkT0LRD70oQ9x7tw5zp07x6lTp6772W2sGFZKKaXUAbZneyU/8AM/gIjc8ksppZRSCnTWjFJKKaX2kQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo34X4fgNp7rixZfuxJRltd6rMdjt1/H0Go//RKKaX2n34a3eHOP/IoF97/MHZlhaAscWHIE4uLnP3Ot3HPQw/u9+EppZS6y2kgcgc7/8ijXHzv+4jGI4p2Gx/HkOdES0tcfO/7ADQYUUodarrie/jpv9YdypUlF97/cBWELCxg7DQdKE0pkoRobY2LH3iYs296QN+0Sql983wCCfEeWd9AxhNMWsMcmcdYqyu+dwj9BLpDLT/2JHZlhaLdxlhLSY3ShNRkgLGWotXCLq+w/NiTnHzDa/b7cJVSd6HnE0i4y0uUf/wnuMtXGPf6FMZiTxxnNH+Ei7/5u7riewfQQOQONdrqEpRl9eYEQiYEck2RVJJgBwNGW93nvK/dK5bNbep4JEkYmYD0xDFOnlrEWi2+Ukq9MNdtHbdaeOex2YTkwgUuvveXADh75hjFb/0OvaurfGl7yGpvRFAWzD/+ZerZmATL5MQJXfE95PRf6A5Vn+3gwhDyHNIUAIN/+gZZhg8C6rOdZ72fnSuW+tUrtCdjRq4kw7AVJay0Zhi+6tU89I63ct8rTu/ln6OUuoPsbh2PhrggoL22Qs07BPDGYDYmXPjFX+XEt3w9vaurfPLqFuNJTqtVJwobDHp9jvS7TIxlPMmo1ZvkpklNurriewhpIHKHOnb/fTyxuEi0tESRJE9fMVDtt0b9PuWpkxy7/75nvI+dK5bmsE8HT+lK+l6IjdApJjSH22z9yR/xwS99ma/79r/EkVZdk8WUUs9p+bEniS5dpJlNmBdHFs5iZUDpS8YGIoSTq1dYf+STXJKA8SRnbm6Gwjax0sdaQ2kgxTPe6hLW5xjZOWpuusL7AlZ81f7TT4s7VBCGnP3Ot1VLn2trFK0WJAlkWRWE1OucefvbnjFguPaKpZkm+H6fbYEwjBERjvgcPxkzB5xdPs/Wzz/Feq1GnqaaLKaUelaX/+hTHBmPqCNgYC2aJwqaLEwukXgYWGh4KK4ss9aYodWq40zC2M4Ruz6EIYUxJEA4GWM2lphJt5A0wRjzvFd81cGggcgdbCcQ2EkGs4MBPggoT53kzNufPVDYSXY1zQbxsM8VGkgcErshZXICOz5PhCBAbAw5nkmRk6epJosppZ6RK0tGn/4cCwAGcgzz4/NspvdSxLPMlFvUAbEGcTmdYR/XbmKZ0HCrGEDSFDGGlne8zDtsdxPpb5HbgFGSEpYl2Zkzz7riqw4ODUTucPc89CBn3/TAC66z30l2DRp1jHgmQZNUhsyUEyToERshBjxQCgzFkoinNhoxOnGCaH1dk8WUUjdZfuxJgv6AvrUcFcdmOAs2YrFYIZYcgMhA6T2BN7zaj/nSyjLZseNEZgyA6Q9oieBECAwYAw0v1KTAjHK6Yrnc73Phjz+tF0OHgH5C3AWCMHzBCVs7ya6mLBFjmcmvEntP2wgUqwiGXjxHWAyJyJDGWbJ8mdQVjLJMk8WUUrc02upiXck4rSOjPqmpLmiarkcABKa6XWAMxkAiwquyAZcvfpnt2QWCtMZcd4NS4Jy3vNzCnBHM9H6cGAoLcb+vK7OHhNZdqls6dv99+MVFZDAkTxJa1tAQjzVCKdWZoh/MAULuoeaHNH2OFQ+lq5LFipLlP3uMJz/6CZY+8wVcWe7vH6WU2neNsmCuyOhkIzwwX26yWKwQUgUh46CBm55jSoESsBiOe8fJjRXSjXWy0vFlB0cCy7h2nKEP2PLQ91Bg6ABBLSEcj7j4gYf13HPA6YrIHeorbXt8bbLrYNBjFmjZ6uQQGeiHM6SuT+RzhghpvkZsoI+BMMBubdOeDOn/zocYG6MdD5VSFBcukn/49wiKAhFh1RoWRWgYsAYE2I6PEQUZjXwN4zPsdOslAELnKXt9eljaSUjNO7bDWSK7TShjMpuyEi1wNrtAazRkrT2jK7OHgAYid6CXqu3xPQ89yMYXz+F+53cJnSPaOVkIRFJinCMwUMdQAgHCmACKgmO9LXJjKNrtqlpHOx4qdVc7/4d/Qvaf/ivx5gbee9pWaHuwxk7T3sEAc9lVnqq/htJYIjemJiMCNwRgjOVIKMwJiDhGXjg1/Dy9oIl1AYLBWkshEBYZURggzjHc2GLpM1/QeTQHlP5L3GGe76C751oxEe956nd+j/wjH8OI4ctJnWPZiESgYzwNN2DohcIYQgMR1dXMKPcc3VgjADabbUjTqpxOOx4qddc6/8ijLP/H/8qJ/hZ9L6TGIBjy2lFm85UqAqE6hyR+ROqH1N2ASdAg90JcDjGANzDCUAOa4ukFBi8wio4Q2SHNYp3W+MuMMTgvRP0+QZGz9mv/F8ujDOt0Hs1BpJ8Ed5DnO+hOnOPiB3/3GVdM3OUlrv7PDzH88Ec5WRSMDPQLQ1+gbaEUg0WIDRhjCI1MzyOG18VVJntfLLPDPqNJRnfuDEnDa8dDpe5CO+el1mQECEeMJzKGwkSsJsdoF2sEeATDJGhQcwNOjb6IBTrlJl5go3aCVrZOx+TkIlwRy71A20OJYT6/SoElRXACGGgitEddrIdifY1BUqPfmaEQdHX2gNFA5A5y46C7sekQ4IivGXQXX7zE0s//IpF3t1wxCTY2SL74RVb/5NPMFTkWaAGzNkCCmIYf7S6iloSs1k6zMP4yQpX5HEz3cgMRBCEwddLeGO8LaDW146FSd5nd85INqHuHswmr4Rxz+TJn+5/ZvZAZ25T15BSnRl8kuOb3jYHM1rE2JvQ5obGkBrrOQxBSIsz4jLapVkwCoGkEjzAuDT0xjL2j7kY08pJJZ4ZiYUFXZw8QrZq5g+z0/mA66A4sVordn0sc056MiLNJtWKSppggwKQpxcIC4WjI6Dd/i5XHniDMMhJTzX2IDIyTI/igVjUTmt5fiCP1E0TAydM9RQRDzQipCOJ7zGQrpNtbiMh1HQ9dWbL0mS9oVY1Sd7Cd81KcVdsruU2YhC0KE7NVO7Z7u8SPqLkeQ9u87vcNcHJ8jrYbEAJdoBlaMjE8XkKR1hmKIZ+ehxJT/U7pITJCgjBnBBs0KOPTHFtfodjcpmi1MNPVWbW/NAy8g9w46C6Vret+bvt9YoTNRhNjLSUJgiVijLEW06hT295gVHrqpiqbc9EMSblFVGwAfnflo9qKEebyqwziNsYXTLDEbshadIwZ16XmxoQilNaROmE0HBENB5SnTjLp9vjYP/vJrzihVil1sNVnOyBCo/TkBpJywIxZpR/PITcEHV6EXjRHPRtcd5UsAuvJCRqTZZoIpfcQhfzfA8v32TELAUTGECJYprkm1mAxFHGDohji/YSw7NExQtbdZCtawDqnq7MHgK6I3EF2en9EvR7i/XU/E++pDYdkGHyrevOP7QxeQEYjpNcnKD2BOGpeGAUGb2KWm6+hCOpEUhCI50p6Hxvx01cxAnTjRfrxUbAJIjA2IUbKag6E93ggFE+6vUVZr9N45X1c+sVfI1pawtfrFEeO4Ov13e2h8488evueNKXUnjp2/33VtjBCAUyAkW1gjeHEqFqNEIGN6BidfJ3ZyWVEqjoaAXITkWEYhh28jaghWAHfbHLvTIO2EXIv4D1Xaq9gYhO8VBV+eZDSr53C2JBAStJ8lRJDjKe5tYlYnUdzEGggcofYqYKpv+Ll+CAgWlurAgznkNGIaG2NrFajV0uhqLZrGt1zzFx+gvnVZRY2VpndXifykFC1b29QsDi5SDhtuzwOmjgbEcjTQY4Fjo/OM58tUS+rK4uZyWWcL5gAMUKndMTA6MgRTv+t72Hw+JOEOwm1N24PaQMipe4oQRjS+YaHmBiDFSEQIcmuUh9f3d3O9cAkalPaBIvHYxjaOqu1M1ytvwIJarx89AXaZEAVZBjveb2vtpDDaeI8PsMTgKnOTYkfMZsvUySLjOKF3dWSoYeOd5Stps6jOQB0a+YOcGPfEOsdoXOYbhcfBLuD7k7/5W9h5Tceprm8TD6ZUO9uE4tnEgQ4oOVKHNC0QuyFwAjz2WUCYBI0GEaznBk+BlRvZgMMww6NsouR6oRiDSQAImRYDJDg6WIhTelevnJdQm11xdMikb5W1Sh1h3rgr/0VPvr7H+f48hWaRmjiKTEUAoVA3QqLg8cpBAYCjcAAFiNCK1tDfEku1QdWZKCGh9GA9vR8kxgYmZSZbJlRPMtmcpJT42q1pVFuY6wnrUIURlJdaDmg8cDrNFH1ANB/gUPumfqG0O3ig5DaX3iIk1/75zjaruMe/VPiUCjLjKQ7xAMbgaXmHTURJgJfrre4d9ynOR3PLQJioDQhRqorlSxoMAqaNMtutZ87XQnxwrTpWbWsGlJVzpQYnDHUL19iuLJCnOdMZmYAyE2TwqQk0q/+IK2qUeqOE4QhL/tb38PFX/iv9LrbJM4RG09ThNTAyMNALC0rRFSVL4kf0MwGAJQmwGIwSHVOwjAqHFhLaqs+RkU8Q2HAE5MHjd3Eei9Q9z28UG0NeWFgLJLWefnXftW+PSfqaRqIHGI39g0RGwOCTYPdviGjL53n6FvezMZ/ez+rX77E0qQECbjfF6QGTnsHBnrRPEfyDaJBj3UMp4NpUqoBZwK24mN4E3AkW8KZgNX0Zcx3P8Fstgw8XbrrgCqGMeTARCw5kIpg8wmZK4m9Z5JlUK+TyIBIJk//UddU1Sil7hw7SegX3v8wk6vL1MZDgrLAIjgM3sJYDHXjCY3ZDSQMMAjnKG3MkWwJj1AKdELDyMn0RoaZYh1nIiI/plVukNmEwqak5TZCNYdmKMITEjCX1jBnzui2zAGhgcghdmPfkJHtEEp+3TZHcHWFC7/862w8eZ5LztJqN4gtFBsZ9enEypGt0ws7NIoNFoKqNsaYpx8nEMfRyUUinwFC6AvsdHUkcWNKqj3agROWveVIAMNp58QgSEkoSXxGCxiUJTmQbG2R1WoYa7FU+SDiPVG/T3nqpJ4glLoD3fPQg5x90wN8+td+k+HDH2KjCFlOEmqjIQ1XUKOaO4MIYnZq86pcj1CK3R5GW0REQUJL+sS2WrUNpSCctitI3YBueIRh2CEtthmJcKEwzAaGY0lI0Wlz5u1v022ZA0L/FQ6xnfp8P+0bEvsRAUVVMTOZQJ6TTkb0njjHqrPMH+lgjCErOjTtWpW4JZD4jPnxBRzTwVLTSxGPYKdXJpHPKGxELzrKQnaJo5OnsAhiIJDqfgoPYiE3BmcshAsM43k6k4tAjhNoGs9ELD0gWlujaLWqWTRZVgUh9bqeIJS6w3U//XkiEbLjx6uLKJlnNBoTb6zQ9AXlNB+kH88xCOc4MT5X/V50hGaxThG0GYVNWn6AUOWnubABGBJXbed0ynWa5RaFCD1vmI8MgYFs8Rgv+96/qm0CDhA92x9iN/YNiRgj/T717W1SVxCJJwaybkbamccYQ0mCMxEWqfZbjWGtdpqRqXFq9CTTzRUwYMRQmhgw9OJ5ClN1NtyOFpgETUqzSSglTFdDlpwnCUMK42l6T05JNLlANB1YVRoIpx1I8nodmZ/Hrq1hB4PdhNozb9c+Ikrdya5dyQUYT2KSfJPEeGIEj8EBuVgu1l+DFc/x8TnAsJUcp+aGzJab9F2GBUZeqFnDanKKSApOjKtAxAOI44vOMpqbZ2Y0ZLSwwJ//qR8nrtX26a9Xt6KByCF27P77eGJxkWhpiSJJYDhkZmN9WgljCRx0jaXlPYu9LdajkLAFSb7NTksyK5CWA7IgpAxSIlddYXgMk7BJFjRJ/ITYjRnHTRpll0E0y2y2XAUhgBPhi7lnEoSYOCEpMhrGYfMNvIHSRBRBnUbZJQDWrcU6z8v/xndhw1AnYip1F9ldyc1z0vV1fO1eOuMhDT8gns7htQbGPqBdbHJ8eA4xYBFOD7+I9QUGwbohBBBPt3AKATPeIhfBToMZL0IQRbTHIybtNi/73r+qQcgBpGf9QywIQ85+59uqqpnVVWr5pApCrKHmPIUN2GjNMOpucUI8o+0tRo0GNijwVGsfFmgV62Q2xtkY78AIDON5SmNplJtEUmLFEfsJsR9Tdz0CqVZOvMB6KURhyFZcJ2i1qW+v450jMkIhhlE8hxVPHWEkhu3ZecI8Y9Ifct9f/Lr9fAqVUnvsxknftVYDvGN+c4MAwU6+TElJbi3GO4xUW8KxKTg5fAx7TcJaKAXeQC5QsyAIoYFcDK3BORxCD0NqIDUgxnDMlwwKzySc3cdnQT0bDUQOud1M9F/67yxcvYQDIjGsJ/NMOnPEdcfGOKOVDTjicq4OBxRpnYk1dLwnF8NIIBqvEBtPAQRGaOSb5DYi9BlFUCMPO7SLTQDsNAhxIow9RNYwL4IvM8Jhj9B5nICjmsy7kC9P6/cNS60OxBHelVoZo9Qd7sYeRy4McUeO0C4KGuLZCEOMOAyGjICR9cS+yk+rGUMphgDIgwYWT+TG9J0Q26qde+Grar1ShEE8RyAltWKL0AheqiqcS60ZpJ4S9/s6cfeAui2dVbMs441vfCPGGP7sz/7sdjzkXeWehx7k5P/yHWzEKauzC2wcPcbw5MuxzRrGGKK5WZ40MX0H0XhCJxuzFadk8nTPjxKHICRmOkYbh3UTChEmtoGfvlR2Bt7t1Ocbqi6FT5qY0eIixcws4zCimG79jDAMMFwNIi4eWcTNzxP1+8ixRa2MUeoOttvj6IZRDrXLl+mUBZmBhnP0a6dwJsBKiThhIpbCV8GFmQ6wG4cttqMjbDthQwxfyoWLheepEq64ahsmNJbABKTTZPu+GLomgCTGNBraufkAuy0rIv/kn/wTTpw4wac//enb8XB3pcaROa7WakgSY9KU+jUD7+ppgp3pcGl7m0/XOhQ2QRbmeeWwy9dvLNG0VS1+YHZL8pHpHitAo9jAspvGigdyEa7mngGWi/U2c0dmqKfV3uvk5EnWlpaolwXjMGLSbOJbLUyeE62taWWMUne4G3scGTu95k1TsmYTm40ZW0thI6xYGh6a0/lYE6BuDCWQSdV99f/f3r2HR1Xdi/9/r71nzy3J5EYISQjhqsgREEF5CjynVjii9qtiLda7ouLx0qNWa5WjR2sflVq0tVqPtfXIscfW24/jXVSOWi94V6AiCiJXcyMkIZlkrnvv9ftjyMhwTSRhkvB5PU8enWHPnk/WZPZ8Zq3PWqs1uoUt3hDewhKwPMRiSXLjUZSCmM9PLNJObmILXhdsAyLaIGoYNAZGkLRSK6nKys29V49/EixevJhXX32VRYsWsXjx4p5+uoPWzoWr6Tc+qfU5chMxkqNGcO5lc4jEk+TmBKgoL2H9Ox+x6ZHHKGlpRDmaNif1LSTPSC2bnATWx102JR2SmOR6TEIG1HkscgMmZn4BFaFcUAYJlYNXt6EMg1hREUbLNhLBHMxoFKIxlM8rM2OEOAjsvMZRQgUxtY1JAiyLmAKf69I6oAgjuhXlxohpiHhM0Jqom5rxZ2qozgkRzy8g4LNQSqFdl4JIA8mqIWitsWpriVRUEInH8YTbGBBpJewxyXFc0BGUvwC271EjKzf3Tj2aiNTX1zN37lyeeeYZgsHgPo+Px+PE4/H07dbW1p4Mr1/JKFzdw/ocVaecQNXQiozHDZ92FL4vv6D27Q+oS7oor5dtsSSBWDvDtU2+obAtEzWglAKPgbetjVggQOX3jsZ+6x2SucHUvjJxD7brYBHBbxrkRNoIJqI0ak1cGbQYFk1lVUw5/zSGjx6WpVYSQhwIO69xlFA5GEoTdLeC30+76aXYTkAiiT8aRQFhj4UCgq5Nm8dHQyhEeWMDAyJhtubmgGuid7yezToRIHXN27qVZF4edjCIG2klz3GIGyYqx8FS336myMrNvVOPJSJaay644AIuvfRSJk2axIYNG/b5mPnz53Prrbf2VEj93o5LKBv19Z1an8PZ0kDDP76kTlnklheSNHIpd9tIJJLUh8PEW7ZRbIBKxohhoYcOYcTJJ+DLDbL23fcxtm3DH4lQ5CTRyotPxwkYkLQ1USCck4cvGKAiGsFfvZHnFv5/zJp7BqNGVh7g1hFCHCg7r3GU425Fb68zU4ZBNJhDPJwkGG4l10kQN0w8uCSNIsKmQyzkReXlUpe0KWhrSdWVtbfv8XqWvuYlbWxlYGhNc2ExKi83fYys3Nx7dTkRueGGG7jzzjv3eswXX3zBq6++SjgcZt68eZ0+97x587jmmmvSt1tbW6mslA+sruhYQnnH6XJ7W5+jflMt0XAYXygflElUFZCn4vh9CnzFtObkwLZmfNOPYfCk8elzObbN6kCAiroabKW27+Brp3bDdFw8aFpNCzM/hKMUbZZFaaSNyi21vLzkXUYMn41hHJBaaSHEAba7oeKOqjPtungcm7rSMpTrUtRQh3YdXGWwzRcgkZeDP5haqt0tLKDVtgnOnEGoqmK317Odr3nh6lqa33gbKxol6fFk9Aw7pklgxDDqVn0l6xb1Il1+Fa699louuOCCvR4zfPhwXn/9dd577z18Pl/Gv02aNImzzz6bRx55ZJfH+Xy+XY4XXWd6PJ0uxGp3NXE3NefewSXH3Zre+wUgaBpEPR6Kxhy66zl1akl4tMZ0XUxD4dWpDakU4BoewmogeTSglKLd52eoG2XV6q+prmmgcnBpt/3OQojeozNDxUPP/QmW38vGu+/H8Xhwg0FUwMKvkt+eKB7HtTwMOmLMXq9pO1/z1g0dktEzbDgOpmODa5J88x3WLn2fNaWlVJ0i9Wq9QZcTkZKSEkpKSvZ53L333sttt92Wvl1TU8PMmTN54oknmDx5clefVvSQQPkgGv1BhkYjtFkWnh3HU7XGH41Q5wsysnxQxuNWPPUCxQ11OIrURlXaod0owmM30wLEDBOPCyqRQKWGibFND0FDoRIJ2tqjB+6XFEIccJ0ZKnZsm7WDB6em+PpTyw102J+hlB17Sao/WEbs7XfRSpHMzwevFxIJrOpqWVekl+ixfqkhQ4Zk3M7NTY3VjRgxgsGDB/fU04ouqhhcSvuho2n8x3JKI220+/zYpgePY5MTj1GXcIiMH0vFDr0X6979hPaXXqXcddlmGrQqE9fIQZl55NnNRFEklEHQTZAb+wa8eQB4HJu4q9F+L7k5gWz9ykKIA2RfQ8Wd6Tn5rlP9TY8nNUT010VYrkNy4EAcw4+HVN1K0ufDamhg03OLqTp6vAzTZJG0/EHOMAymzDqOZxqaqNxSy1A3StBQxF3Nl0nYXD6EWaccl67n6FgfwJdM0m4oUAagMNwIjo7jAEUG1NoOrmmAx0w9kdbkxGN8mYSSQ0dQUb7vXjUhRN+3r6Hi71Jk31k7TyNuN0sJuE14dbusK9KLHLBEZOjQoWit932gOOBGjaxk1twzWPzqu6xa8zUqkUD7vZQcOoJZ/zIlY4ZLxxs7XlCA2dSEzzVo9g8kP16LqR22GSZFrkOpAfUaksogmEikhngSTiqx+ZcpUqgqhEjrapF9Z+08jTjgNmPqxLcHyLoivYL0iAgglYyMGD6b6poG2tqj6QXPdk4Y0m9sv59IQQFGc5hcJ4GhHVylSGhIuhC1LKzcPHLb24hrTZ0vSGT8WGadcpxM3RVC7KIrRfadtfM0Yq9uyzxA1hXpFSQREWmGYexzJsuOb2yVl0cbENzWhKU1huui0LQYBs6skxl37BTqN9XS7mpGlg+iYnCp9IQIIQ6Yfa04LeuK9A6SiIgu2eWNnZdHJCeHSCwGySS+tjbiQ4bwz6eflPqGM0im6AohsqMni2FF95HWF12yxzc2YMViJEIhqmadKG9sAaS+dTpbGtI9YwHpGRMHWE8Ww4ruIZ8WosvkjS06w/mmmvrFr1G/YhXRcJi4C43+IO2HjmbKLKkVEgdOTxXDiu6hdC+eytLa2kp+fj4tLS2EQqFshyN24ti2vLHFbjnfVNPw2CI2frGWLTb4QrkEFASiERoTDv8oG7LbPYfkb0qI/qErn9/yDhffWU9UuYu+T7suyQ8+Ysv6zWx2DIoH5OMoHw6Jve45tO7dT9K9bKZt43g8rCktZcj/Ow5/fp4kJ0L0U/JuFkJ8J3vqvdBbG2lfu57qmE1eKAetPITNMgqcTSjFbvccWvfuJ6m6o2iEZCiUWvchkcC/YQMtv7+fOq8fbRjp5ET2CBGi/5BERAixV7srOE1s/oZNz72yS+9F1SknUFUxEDsaI+pqCjwmBg4BdxuK1CiwbXoIKPA3N7P27Q9g9HA2PvNSKgkpKUlPsdS2jd9OEnIdnGSctvLBkEzKHiFC9DOSiAgh9mh3BaetGnzRGH7LxC0qTPdedCQI5mn/j9yAn4ChSCZtcg0w4tUkNNiWRW48iqelmSPiLuq5F9nwgqIoFmFbXj7KMNAYxHQORds249Wp/YyCjk1k2zbcYJDkgAFYW7fKHiFC9BPyDhZCZOgYckl8vR71yTLqq+vY4ih8oXwCaEbW12C6Dp8lvORgEDTNjE3E1r+xlKMnjWHo1xsJ19eRh43huNga2oEiDWGtafMFCZQOxG3eRq7r4oZbaPH6SIQGkUh4CThJHAV5rktAGUTjFt62OqKmRSwQRMkeIUL0C5KICCHSOgpGzbp6SuPteJJJElqRW1yC8nmxIxEc18XyGAyybTY0xyBQQpDWbzcRq6+nVY0jPxahwImhAVcrtIKBaBxgeULhKcqFeBy0S0yBX7sktjWjc3OworUEtItXg0dpIipI1MwjkGggZMfxtiWIeXyyR4gQ/YAkIkIIgIyCURXMwbRjtMQTFAKJpkbqPMUE7DCGhnYUhR5Y5xhEbYNgx5XE58MMtxF59wMC7RE8ChQKFLgalAINDLMULdvayLNM/HYbQQ0mLtqO44ZbyY9HKVAaU6UeE9RhimJhtAJbg601Lck4/rycLLaYEKI7yPKGQggc22bjs4vxbC8YNX0WCogrg4jHxFAWRE0wFK5KlZ2agD/RiD9R9+2J4nGC2ia4eROGdml3FS142OIpxgY0qUxksOni8xbSbuYQMT20Ax4FA3Ap3daIJ9aOBgyVuki1e/JxlImtwVQQUJp87bBl1VdZaC0hRHeSREQIQd2qrzDq60mGQijDIGrm0eyrwKNSyYNDgorwKgCipkXQcXG0xlEGprl9lovrYrW2EnJtvK6LQhE0oCU4ghZ/JQ6pC45lKCyl8EWryYlvpcCxyVUarRWWgjzXxpN0sABFqgdlm28QUU8+HgUuGnd7QtLy1rs4tp2lVhNCdAdJRIQQRJpbMG0bvF4AHAOwNHmGgeM4uEphahdcTSQUSiUJSYegAV7TQEciWA0NWCh8dgIAF0hqEwyLIbF15BmpHg5LpZ7TqzT5gPbk4ZCqIUmicHUqgdEo2owQGshLbqXBX0XSzMXVENEKjSLQ0kyd9IoI0adJIiKEIFiYj+PxQCKVRHhUAtuXxPD7KDDAtG1cNN5YhIEtTdiuRqMZ7SYo2bwRb0sLyfJy3KAfDSRJ9Vh4DA9BN0Ku04pBajhHbf+3QgNi3mLajDwsNFqn6j8UGkNBm6eAdqsAR0Mw0YzXiZDqH1EoBUmduikFq0L0bZKICCEYNGYUbmkpVmsr2nUBiFlemgqLsYM5hAyFBRS2hbGTDmstPxtLK6gvHEC75cPxWITGjYF4nKTHS7sLSTMXU0F5dB0AHZtadfw311QUJuopTtYRN/NwDQufAq+hAEWOvY2oJw/H9GHgUhFZjc9px7t9vCamFLZlESzMP8CtJYToTpKICCEwPR6qTjkBOxDEamhARyJoxyGatGl1YUt+EVvzC9nm9VNbPhhf5VACeXnowgKi5eUYrkPLOx9g2w6JYBDXUGzzlpAwAwBoDMKeIrQGV2tsrVGAozXKdQh7S4gYQTr24LQUmEqTZzfj0TZKpYZ1NKlekziKBAq7ooJBY0Zlr+GEEPtNpu8KIYBvl0vv2HjOaA0DmmRuHtboUbjLVuDm5OBPJmgzApg+RYBWAByvl2BTIxpNu/YyyDQpjG7Co1K9K61WMZtzDqMq/BlGvAnTSF18cgyFC5THN2JpByM10zetMFaD2n6HrSHqajzb79iWG6Jq1omysqoQfZzSHV9BeqGubCMshOgejm2z4snnaXnrXaxwGJTCG48z0I4TUanEIWkESShN0qOwbJugmyTP1RgGeAHLAGv7pSVVyqFYHxxLnt1EbuwbEq4mz1SpXo5U2QctngEEnHZ8Opp+nKtTRa8Gqf86WtOuDTaUljH0vDNkrxkheqmufH7LVwkhRIaNH66g9Y13UpvQ5edDIklhe5iA0phak8Ag5LbjBUwXYq6mHQOPAc72lVBNF/QOXRsuBsMin5FQPkwFlrlDz8f2mo+4JzeVhDjgYNJmFRJKbiXmaOI6NVyzxbBon/o9pl1xAV6//8A3jhCi20mNiBAibeeFzfD7Cba2gII2bRBUkKtcElrhkMohvAqKlYul3dSMF0gPpySMAFEzl9rgSEDjdWN4VOob0PZJLwDEPLkMiFcTcCIoIGHmEPEUAKni1VxT4TfAQON+9jlLb7mLde9+cmAbRwjRI6RHRAiRtvPCZm4kBiqXmJHE77jETD9RTz5WbAseBQ7gUYqop4B2I4AvXktCWRiGB78bJWwVY2iboJ2avrt9YVVQKj3cYgLNVikmDq4yGRD7hoDTSiAaJumCRpPUoJTCNhVuIJDe6ReQ4Rkh+jjpERFCpO28sFlCB0mYedvX/tC04iepLBxPEMdIDY0oQGkHS8cJAHGrgIgnNSZckKjHVh4snUw/R8eQTFL52OIfCkDAbQM0PjeGqW0UEPHkoQ0PSoGrFO1a4d0+tThZUoInGmHTc4tlZVUh+jhJRIQQaTsvbOZTbeQlqvG4qSEXr91KIFZNtTWQFiNIwk31avicMIXJJmJWAQA5dmo2jUcnyUs24XPasLePw8SMII5OLUjm0Q4ARYl68pLN5CabMEndt807ENsMoEgNAzVaOUStYpRtp3f6VXX1srKqEH2cJCJCiLRdFjYLBIgbBhYal9Sy7AkUVrIeI7kN7/bhmY4N6hzDl1r3w3XQQMQMEfHk43VjmAocZdLgH0IcDw2+weQntqSfu93KJ2Z+u5tuaXQDQScMQJNh4Bg5aEyU7aTWG/H5MBxHVlYVoo+TREQIkbbzwmZEo0RC+SQBLxpDa9qVwksSjSYBoMHcvkR7YbIegI15h9PuKQQ0lk6mhm8AQzvkx2txdRLcCOYOQzahRCNol23WgNTuvtuHaAygyHEZmqinLFFHYWszwW++wWjehmuasrKqEH2cFKsKITLssrCZ4xD25xBPJsi3k4RcTVQ7tJgWWz0WVbE2rO09I0qD140QSjbicWP43Cg44dQMGQ2gybW30a6hIF5LOkMB2q0C4kYQn9OeriPp6GnJNyHmwjZXkTAh146T3xqndpCsrCpEXyeJiBBiF8OnTKTq6PHUrfqKSHMLwcJ8Sg4Zxsr/XZyx0Jnj8fC138+Q5kZyjNR6H7nEGRDbCBpcBW2eIkJ2U2qBsh1SjOD2ZdtTtyCU2IJGYWibmJmDf4eEBMBQigKl2Zp0cE21fQpwr12PUQjRSZKICCF2y/R4qBh3WMZ9R551Ks7pJ+2SoLxz1X8wfEsNPpWZGrQRYJs1gNxkIw6KuKvxGwqPgoSrcRSpdUWUwtI2DqnEJWF+2zPibj9Xo7+SULKRAbSx0VW4BQWY0Sh1q77aJU4hRN8hiYgQokt2l6AMPf8MNv/nQ1S0hzEUJByNqSBIFCu2kZiGqFJ4lcJFE3UBBY4DOQZ4jFTC4QKuMvFvn3XjotjqHURxohbHjpJ0kxjKYLMyKQ7mktPWKsWqQvRxUqwqhNhvw6dMpOLyi9lQWk4U8Bmp4ZZ2DQ1ukjV5RWwsHkRSQZ2rqDcMtnly2RoYTFhDx0ogShm0egficyOpTe6Uj0bfYLSGYKIBx03iUZocV6PjMSlWFaIfkB4RIQ5ijm1nDLMMGjPqO+9mm64reeMdtj33Em1bGqk3LMzCAoKmQW5zE44LKEUu0IxB3DTxGga52sUETO1Q3raKpE4N8WhlkuO0YANa7VBQojSBaBRdVSnFqkL0cZKICHGQWvfuJ+mZMaZt43g8rCktpeqUE77zsummx0PFvxyDc9go6he/hlqximi4jbjWbDUs/IbCApo0eEngS9STNC1a7SQFyiUJeNBYKtVL4trtlETWpWbcaPApSGhNCFBeC/+IYdSt+mq/EighRHYprXWvLTvvyjbCQojOW/fuJ2xa+LfU5nahUGpJ90QCq7UVOxBkyJyz9nsPF+26OFsaqN9US7urCVgm8XvuJ9HWTtjjwcCg3TuQYKIBj05S6qQGaBptzQBLoXVH3YjCReFTGgNNkwNNXi9YXjBMHI8Hdz8TKCFE9+rK57d8hRDiINOxw67VscOuYQIaFQiQ9PmwGhrY9Nxiqo4ev1+9DMow8AwqpWJQaep5N26mekAR29rayXUcoh6DqFVEXqIOn6tpVgaWMmjEIeAqgoZGo4iS6h1pdxWtGNheL4bXIpmf/20CJZvgCdFnSSIixEFm5x12I6oAhSagt6X3cDG27+Gyu2mx37WuRAX85AwaSPOWbSQTMXIcm8rwZ7go2jxe7FAIE81SFWDyYcPwrFuHv7UVI5EkYRi0DSjBl0zgDYdJlpSgDSvVVxIwuzWBEkIcWPJuFeIg07HDrrt9h10TG4MddrD1+TDa2nY7LXZ/6krUgGJyRg6jdGMNa9q9FAd8KMdGezzg91MYbefLuCb/6COY/q+z0a6bkfC4jsO63z/4bQJlFGJqG38nEyghRO8kiYgQB5mMHXYDAXw6nHlAPL7babEddSXW9roSt4vDIsowsCYfxcB1m4h+sZaGSAxvXg5BQ+EPt1KXcNhcPoRZ/zIFwzDAMDISiq/efD8jgfLqCEq73z7BXhIoIUTvJeuICHGQ2WWH3R1o18UKh9GDSjOmxXbUlXi215U4gUKUaabqSkpK8EQjbHpuMY5t7/x0GczBFZSceRqV35tERShIbluYZEsLG5TFlvFHMOviMxg1snK3j81IoABLR/AQ+/aAPSRQQojeTXpEhDjIdOywu2nh37AaGkjm5YHPB/E4VjiMHQwy5OQTMuosdqwrwTBpNweQ42zBQ6LLwyLm4ArKLjqHgTvMqBlZPoiKwaWpnpA9GDRmFGtKS7Gqq0n6fKgdju1IoOzBsgmeEH2NJCJCHIR22WG3rQ3XNLEHVzDk5F3rPXasK1FoAu42TJLfHtDFYZGdZ9R0xndJoIQQvZ+8Y4U4SO1uh909zYDZua7Eq9syDzhAwyJdTaCEEL2fJCJCHMR2t4Hd7vSmYZGuJFBCiN5P3rlCiH3qbcMinU2ghBC9nyQiQohOkWERIURPkERECNFpMiwihOhucvUQQnSJDIsIIbpTjy5o9uKLLzJ58mQCgQCFhYXMmjWrJ59OCCGEEH1Mj/WILFq0iLlz53LHHXdw7LHHYts2K1eu7KmnE0IIIUQf1COJiG3bXHXVVSxYsICLLrooff+YMWN64umEEEII0Uf1yNDMp59+SnV1NYZhMGHCBMrKyjjhhBOkR0QIIYQQGXokEVm3bh0Av/zlL7npppt44YUXKCws5JhjjqGpqWmPj4vH47S2tmb8CCGEEKL/6lIicsMNN6CU2uvPl19+ibt9R88bb7yR0047jYkTJ7Jw4UKUUjz11FN7PP/8+fPJz89P/1RW7n4XTiGEEEL0D12qEbn22mu54IIL9nrM8OHDqa2tBTJrQnw+H8OHD2fTpk17fOy8efO45ppr0rdbW1slGRFCCCH6sS4lIiUlJZSUlOzzuIkTJ+Lz+Vi9ejXTpk0DIJlMsmHDBqqqqvb4OJ/Ph8/n60pIQgghhOjDemTWTCgU4tJLL+WWW26hsrKSqqoqFixYAMDs2bM7fR6tNYDUigghhBB9SMfndsfn+N702DoiCxYswOPxcO655xKNRpk8eTKvv/46hYWFnT5HOBwGkOEZIYQQog8Kh8Pk5+fv9RilO5OuZInruqxevZoxY8awefNmQqFQtkPqszrqbaQdvztpw+4h7dg9pB27h7Tj/ttdG2qtCYfDlJeXYxh7nxfTq/eaMQyDiooKIDXcI38k+0/acf9JG3YPacfuIe3YPaQd99/ObbivnpAOPbrXjBBCCCHE3kgiIoQQQois6fWJiM/n45ZbbpFpvftJ2nH/SRt2D2nH7iHt2D2kHfff/rZhry5WFUIIIUT/1ut7RIQQQgjRf0kiIoQQQoiskURECCGEEFkjiYgQQgghsqbPJSIvvvgikydPJhAIUFhYyKxZs7IdUp8Vj8c54ogjUEqxfPnybIfTp2zYsIGLLrqIYcOGEQgEGDFiBLfccguJRCLbofV6999/P0OHDsXv9zN58mQ+/PDDbIfUp8yfP5+jjjqKvLw8Bg4cyKxZs1i9enW2w+rTfv3rX6OU4uqrr852KH1OdXU155xzDsXFxQQCAcaOHcvHH3/cpXP0qURk0aJFnHvuucyZM4cVK1awdOlSzjrrrGyH1Wf94he/oLy8PNth9Elffvklruvy4IMP8vnnn/O73/2OP/7xj/z7v/97tkPr1Z544gmuueYabrnlFj799FPGjx/PzJkz2bJlS7ZD6zPefPNNrrjiCt5//32WLFlCMpnkuOOOo729Pduh9UkfffQRDz74IOPGjct2KH1Oc3MzU6dOxbIsFi9ezKpVq7j77ru7tKccALqPSCaTuqKiQj/00EPZDqVfeOmll/To0aP1559/rgG9bNmybIfU5/3mN7/Rw4YNy3YYvdrRRx+tr7jiivRtx3F0eXm5nj9/fhaj6tu2bNmiAf3mm29mO5Q+JxwO61GjRuklS5bo73//+/qqq67Kdkh9yvXXX6+nTZu23+fpMz0in376KdXV1RiGwYQJEygrK+OEE05g5cqV2Q6tz6mvr2fu3Ln8z//8D8FgMNvh9BstLS0UFRVlO4xeK5FI8MknnzBjxoz0fYZhMGPGDN57770sRta3tbS0AMjf3ndwxRVX8MMf/jDjb1J03nPPPcekSZOYPXs2AwcOZMKECfz5z3/u8nn6TCKybt06AH75y19y00038cILL1BYWMgxxxxDU1NTlqPrO7TWXHDBBVx66aVMmjQp2+H0G2vXruW+++7jX//1X7MdSq+1detWHMehtLQ04/7S0lLq6uqyFFXf5rouV199NVOnTuXwww/Pdjh9yuOPP86nn37K/Pnzsx1Kn7Vu3ToeeOABRo0axSuvvMJll13GlVdeySOPPNKl82Q9EbnhhhtQSu31p2M8HuDGG2/ktNNOY+LEiSxcuBClFE899VSWf4vs62w73nfffYTDYebNm5ftkHulzrbjjqqrqzn++OOZPXs2c+fOzVLk4mB0xRVXsHLlSh5//PFsh9KnbN68mauuuoq//vWv+P3+bIfTZ7muy5FHHskdd9zBhAkTuOSSS5g7dy5//OMfu3QeTw/F12nXXnstF1xwwV6PGT58OLW1tQCMGTMmfb/P52P48OFs2rSpJ0PsEzrbjq+//jrvvffeLnsCTJo0ibPPPrvLmWx/09l27FBTU8MPfvADpkyZwp/+9Kcejq5vGzBgAKZpUl9fn3F/fX09gwYNylJUfddPf/pTXnjhBd566y0GDx6c7XD6lE8++YQtW7Zw5JFHpu9zHIe33nqLP/zhD8TjcUzTzGKEfUNZWVnGZzLAYYcdxqJFi7p0nqwnIiUlJZSUlOzzuIkTJ+Lz+Vi9ejXTpk0DIJlMsmHDBqqqqno6zF6vs+147733ctttt6Vv19TUMHPmTJ544gkmT57ckyH2CZ1tR0j1hPzgBz9I984ZRtY7GHs1r9fLxIkTee2119LT7l3X5bXXXuOnP/1pdoPrQ7TW/Nu//RtPP/00f//73xk2bFi2Q+pzpk+fzmeffZZx35w5cxg9ejTXX3+9JCGdNHXq1F2mjq9Zs6bLn8lZT0Q6KxQKcemll3LLLbdQWVlJVVUVCxYsAGD27NlZjq7vGDJkSMbt3NxcAEaMGCHfqrqgurqaY445hqqqKu666y4aGhrS/ybf7vfsmmuu4fzzz2fSpEkcffTR3HPPPbS3tzNnzpxsh9ZnXHHFFfztb3/j2WefJS8vL11fk5+fTyAQyHJ0fUNeXt4uNTU5OTkUFxdLrU0X/OxnP2PKlCnccccdnH766Xz44Yf86U9/6nLvcJ9JRAAWLFiAx+Ph3HPPJRqNMnnyZF5//fWuz1kWYj8tWbKEtWvXsnbt2l0SOC0bWu/RT37yExoaGrj55pupq6vjiCOO4OWXX96lgFXs2QMPPADAMccck3H/woUL9zmsKER3Ouqoo3j66aeZN28ev/rVrxg2bBj33HMPZ599dpfOo7RcNYUQQgiRJTKoLYQQQoiskURECCGEEFkjiYgQQgghsqZPFauKzguHw9TW1qYXghNCCNF1hmFQVlZGXl5etkPptyQR6Wdc12X+/Pk8/fTT2Q5FCCH6jVNPPZV58+bJekE9QBKRfmb+/Pk888wzXHnllUyYMAHLsrIdkhBC9FnJZJJly5Zx3333AaltRkT3kum7/UhrayvHHnssV155Jeedd162wxFCiH7jL3/5C/feey9vvPGGDNN0M+lj6kc6VlicMGFCliMRQoj+peO62rHvmeg+koj0Ix2FqTIcI4QQ3avjuioTALqfJCJCCCGEyBpJRIQQQgiRNZKICCHEQSgWi+3zpyvDEI2NjQwcOJANGzb0XNA94IwzzuDuu+/OdhgHNUlEhBDd4phjjuHqq6/OdhhpvS2e3ubII48kEAjs8ScYDLJp06ZOn+/222/nlFNOYejQoen7fvazn/GjH/2oB6L/7naO6aabbuL222+npaUli1Ed3CQREQcdx3Gk4KyXSiQS2Q7hoHHxxRcTCoVYs2YN69evz/iZPn06M2bMyEgq9iYSifBf//VfXHTRRRn3f/jhh0yaNGm/Y7Vte7/P0WHnmA4//HBGjBjBo48+2m3PIbpIi37jiy++0BMnTtRffPHFfp/LdV0djsR1cziqw5G4dl23GyLcs6eeekoffvjh2u/366KiIj19+nTd1tamHcfRt956q66oqNBer1ePHz9eL168OP24N954QwO6ubk5fd+yZcs0oNevX6+11nrhwoU6Pz9fP/vss/qwww7Tpmnq9evX61gspn/xi1/owYMHa6/Xq0eMGKEfeuih9Hk+++wzffzxx+ucnBw9cOBAfc455+iGhoYebYfu4jiO3rS5Tq/6cr3etLlOO47To893/vnnayDjZ+3atfrCCy/UQ4cO1X6/Xx9yyCH6nnvu2eVxp5xyir7tttt0WVmZHjp0qNZa66VLl+rx48drn8+nJ06cqJ9++mkN6GXLlqUfu7fXZ3fxdPw99Fau42infou2N2zSTv0W7fbwa9bQ0KC9Xq9etGjRLvdblqWfeOKJ9H3vv/++njp1qvb7/Xr8+PH6zTff1ID+7LPPtNap929JSUn6+Hg8rj0eT0b7T548WWut9c0336wPP/xwHQwG9cCBA/Wll16qE4lE+rHr16/XgH7iiSf0tGnTMmLcVxxaa71x40Z95pln6oKCAl1YWKjPOuss3dTUtNeYbr31Vj1t2rS9tld3Xl9FJllZVeyipT1GdUOYlkgM19UYhiI/6KeiJI/8HH+3P19tbS1nnnkmv/nNbzj11FMJh8O8/fbbaK35/e9/z913382DDz7IhAkTePjhhzn55JP5/PPPGTVqVKefIxKJcOedd/LQQw9RXFzMwIEDOe+883jvvfe49957GT9+POvXr2fr1q0AbNu2jWOPPZaLL76Y3/3ud0SjUa6//npOP/10Xn/99W5vg+701drNLF7yHmvXbSYeT+DzeRk5vJIT/uV7jBpZ2SPP+fvf/541a9Zw+OGH86tf/QqAwsJCBg8ezFNPPUVxcTHvvvsul1xyCWVlZZx++unpx7722muEQiGWLFkCpBbmO+mkkzjxxBP529/+xsaNG3cZYtnX67O7eEpKSnrkd+8OzjfV2B9+jFtdg04kUV4Lo6Icz9GTMAdX9MhzDhgwgFmzZvHwww9nDFU8+uij5OfnM2vWLABWrlzJ9OnTufrqq3nooYf4/PPPmT17Nj6fj9GjRwPw9ttvM3HixPQ5PB4PS5cuZfLkySxfvpzS0lL8fj9aa7TWPPjgg1RUVLBq1SrOP/98xo0bx2WXXQbAihUrAFiwYAF33HEHw4YNo6SkpFNxrF27lu9973tcdtllvP/++7S1tXH55Zdz3XXX8ac//Wm3MQEcffTR3H777cTjcXw+X4+0t9gzSUREhpb2GGu+aSSecAj6PZimgeO4NIWjtMcTHDK4uNuTkdraWmzb5kc/+hFVVVUAjB07FoC77rqL66+/njPOOAOAO++8kzfeeIN77rmH+++/v9PPkUwm+c///E/Gjx8PwJo1a3jyySdZsmQJM2bMAGD48OHp4//whz8wYcIE7rjjjvR9Dz/8MJWVlaxZs4ZDDjlk/37pHvLV2s3811+eo6m5lfKyAQQDPiLROCtXfU1NbQMXnXdyjyQj+fn5eL1egsEggwYNSt9/6623pv9/2LBhvPfeezz55JMZiUhOTg4PPfQQXq8XgD/+8Y8opfjzn/+M3+9nzJgxVFdXM3fu3PRjOvP67C6e3sj5pprkS6/ghtswiotQfh/E4jjrN6K3NsKJM3ssGZk7dy7HH388NTU1lJeXA7Bw4ULOO++89Otx5ZVXcvLJJ3PbbbcBMHr0aB555BG++eYbPJ7UR8jGjRvTj4fURnE1NTUUFxen33MdOhJDgKqqKmbMmMHq1avT9y1fvpycnByeeuqpjKGhU089dZ9xXH755Vx++eUZf3e/+MUvuO666/YaU3l5OYlEgrq6uvQ1SBw4koiINK011Q1h4gmHUI4XpRQAhscklGPQ2p6guiFMKOhL/1t3GD9+PNOnT2fs2LHMnDmT4447jh//+MeYpklNTQ1Tp07NOH7q1Knpb02d5fV6GTduXPr28uXLMU2T73//+7s9fsWKFbzxxhvk5ubu8m9ff/11r0xEXNdl8ZL3aGpu5ZCRlenXKC83yCEjK1mzdjMv/997jBheccA27rr//vt5+OGH2bRpE9FolEQiwRFHHJFxzNixY9MfegCrV69m3Lhx6W+rkPrGuqO++PrsjnbdVE9IuA1jcPm376tgACNQjvtNDfaHn2CUl6F64DWbPn06VVVVPPLII8ybN49PPvmEf/zjHzz++ONAKsF44403WLlyZcbjfD5fxod5NBrNeL0Ali1btssH/saNG/nNb37Dm2++SXV1Nclkklgsxq9//ev0MStWrODkk0/OSEI6E8fGjRtZsmQJ77zzTsYsGMdxqKys3GNMAIFAAEj1nIoDTxIRkdYeS9ISiRH0e3ZJNJRSBP0eWiIx2mNJcgPePZyl60zTZMmSJbz77ru8+uqr3Hfffdx4443prvq96fhA1TtsmZRMJnc5LhAIZPxOHReePWlra+Okk07izjvv3OXfysrK9hlXNlTXNLB23WbKywbs9vUrH1TMV19vprqmgcrBpT0ez+OPP87Pf/5z7r77br73ve+Rl5fHggUL+OCDDzKOy8nJ6fK5++Lrszt6ayNudU2qJ2Q3r5lRXIhbXY3e2oga2P1DS0opLrzwQhYuXMi8efN4+OGHmTJlCocddhiQSti9Xi//9E//lPG4L774gosvvjh9e8CAATQ3N2ccs3z58owP/YaGBo466iiOPfZYfvvb31JRUYHjOEyaNCnjuOXLl3PDDTfscq59xbFixQqKiop2+fuCb9/vO8fUoampCejdw3f9mSQiIs12XFxXY5q7/+ZlmgZu3MZ2un/GiVKKqVOnMnXqVG6++Waqqqp47bXXKC8vZ+nSpRk9F0uXLk1/Q+64cNTW1lJYWAikLjb7MnbsWFzX5c0330wPzezoyCOPZNGiRQwdOjTd7dvbtbVHiccTBAO7H+MOBP3EtzTR1h7tkef3er04jpO+vXTpUqZMmcLll1+evu/rr7/e53kOPfRQHn300Yzx+o8++ijjmM68PjvH0xvpaCxVE+LfQ12Cz4duakZHYz0Ww5w5c7jlllv4v//7Px577DF++9vfpv/NNE1s2yYWi6V7PF577TU+//zzjA/0CRMm7DLr5LPPPuO0005L337++edxHIfHHnssnXT94Q9/IJlMpnvJWltb2bBhwy77ZXUmDsuyCIfDlJeXEwwGd/u77hxTh5UrVzJ48GAGDBjQqTYT3Uum74o0j2lgGApnD4mG47gYhsKzh0Tlu/rggw+44447+Pjjj9m0aRP/+7//S0NDA4cddhjXXXcdd955J0888QSrV6/mhhtuYPny5Vx11VUAjBw5ksrKSn75y1/y1Vdf8eKLL3ZqcaKhQ4dy/vnnc+GFF/LMM8+wfv16/v73v/Pkk08CcMUVV9DU1MSZZ57JRx99xNdff80rr7zCnDlzeu2HW25OAJ/PSyQa3+2/RyMxfF6L3Jy99wZ9V0OHDuWDDz5gw4YNbN26lVGjRvHxxx/zyiuvsGbNGv7jP/5jl4Rid8466yxc1+WSSy7hiy++4JVXXuGuu+4CSH+Adeb12Tme3jhlWwX8KK8Fsd2/ZsTjKMtCBbq/SLxDeXk5J554IhdeeCGO42TU70ycOBHLsrjuuutYt24dzz//PJdccglARiIyc+ZMPv/884xeEdd1Wb16NTU1NbS0tFBcXExrayvPPfccX331Fb/97W+59dZbqaioSH+hWLFiBaZppmvEuhLH5MmTCYVCnHfeeaxYsYK1a9fy8ssvZxQ67xxTh7fffpvjjjuum1pUdJUkIiItx2+RH/QTidkZQx2QGvqIxGzyg35y/N27qV4oFOKtt97ixBNP5JBDDuGmm27i7rvv5oQTTuDKK6/kmmuu4dprr2Xs2LG8/PLLPPfcc+kZM5Zl8dhjj/Hll18ybtw47rzzznQx27488MAD/PjHP+byyy9n9OjRzJ07l/b2doB0T4zjOBx33HGMHTuWq6++moKCggNWX9FVFeUljBxeSU3t1t2+fjV1jYwaUUlFec90P//85z/HNE3GjBlDSUkJM2fO5Ec/+hE/+clPmDx5Mo2NjRm9I3sSCoV4/vnnWb58OUcccQQ33ngjN998M0D623BnXp+d4+nK4lwHihpQjFFRjtvYtNvXzG1sxqioQA0o7tE4LrnkEmpqajj77LMzehPKysp4+OGHefbZZxk3bhwLFy7k/PPPZ+TIkRQVFaWPGzt2LEceeWQ6kQe47bbb+O///m8qKiq47bbbOOmkk7jooos499xzmTZtGtXV1Zx++ukZNUMrVqzg0EMP3aXepDNxFBUV8dJLL9HY2Mg///M/c+SRR3LjjTdmFKHvHBOkVph95plnMoqhxYGl9M5//aLP+vLLLznnnHN49NFH09PZumpPs2YiMRuf1+yRWTOi+2TMmhlUTCDoJxqJUVPXSFFRiIvO7ZlZMz3tr3/9K3PmzKGlpWWf9T19TeasmULw+SAeTyUheblYPThrpqtc1+WYY45h2rRpGTOWAF588UWuu+46Vq5c2ePJ+t7i6KoHHniAp59+mldffXWvx3XH9VXsXt8Y/BYHTH6On0MGF3+7jkjcxjAURXmBHltHRHSfUSMruei8k79dR2RLEz6vxdh/GsHxM3puHZHu9pe//IXhw4dTUVHBihUr0muE9LckBEglGSfO/HYdkaZmlGVhDhuK5+iJWU1C3nrrLRoaGpgwYQJbt25lwYIFbNy4kWeeeWaXY3/4wx/y1VdfUV1dnZ6lko04usqyLO677779D1J8Z5KIiF3k5/gJBX20x5LYjovHNMjxW906ZVf0nFEjKxkxvILqmgba2qPk5gSoKC/ptUNKu1NXV8fNN99MXV0dZWVlzJ49m9tvvz3bYfUYc3AFRnkZemsjOhpL1Y4MKO6RKbtdUV9fzw033EB1dTWlpaXMmDGDDz/8MGNYZkc9tbdPV+Poih1n/4jskKGZfkS6DoUQomfI9bXn9J2vSEIIIYTodyQREUIIIUTWSCIihBBCiKyRREQIIYQQWSOJSD/SMStid3utCCGE+O46rqt9afZZXyEt2o90bHe+bNmyLEcihBD9S8d1tS9tqthXyDoi/UgoFOLUU09NL84zYcIELKt7l2MXQoiDSTKZZNmyZdx3332ceuqp5OXlZTukfkfWEelnXNdl/vz5PP3009kORQgh+o1TTz2VefPmydBMD5BEpJ8Kh8PU1tb2yh1HhRCirzAMg7KyMukJ6UGSiAghhBAia6SPSQghhBBZI4mIEEIIIbJGEhEhhBBCZI0kIkIIIYTIGklEhBBCCJE1kogIIYQQImskERFCCCFE1vz/5KcuvOM51+IAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAG/CAYAAABlpLwqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAACeJUlEQVR4nOz9eZRk2V3Y+373PlOcGHOorKy5pG61htbQMg0CWjYGgwQyWIh3jbHBvqBn63nZwgazvGxYXutiL3Mt7jPPi7V8bcA8IxsbMdi+ICHTgK4ekoCWQLRAU0vdXSp1DVmVc2bMcYa9f++PE5ldU4/qrMys+n1YCXRmZMTJqIgTv7P3bzAiIiillFJK7QO73weglFJKqbuXBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jd7HogsLS3xN//m32R+fp40TXn961/Pn/zJn+z1wyqllFLqEAj38s63trZ485vfzDd90zfx8MMPs7CwwJNPPsns7Ozz+n3vPVeuXKHVamGM2ctDVUoppdRLRETo9/ucOHECa599zcPs5dC7H/3RH+UP//AP+f3f//0X9fuXL1/m9OnTL/FRKaWUUup2uHTpEqdOnXrW2+xpIHL//ffzrd/6rVy+fJmPfvSjnDx5kr//9/8+73rXu255+yzLyLJs97+73S5nzpzh0qVLtNvtvTpMpZRSSr2Eer0ep0+fZnt7m06n86y33dNApFarAfAjP/IjfPd3fzef/OQn+aEf+iF+9md/lu///u+/6fb//J//c/7Fv/gXN32/2+1qIKKUUkodEr1ej06n87w+v/c0EInjmK/+6q/mkUce2f3eP/yH/5BPfvKTfPzjH7/p9jeuiOxEVBqIKKWUUofHCwlE9rRq5vjx49x///3Xfe81r3kNFy9evOXtkySh3W5f96WUUkqpO9eeBiJvfvObefzxx6/73hNPPMHZs2f38mGVUkopdUjsaSDyj/7RP+ITn/gE/+pf/SvOnTvH+973Pv7Df/gPvPvd797Lh1VKKaXUIbGngcjXfM3X8Ou//uv88i//Mq973ev4l//yX/LTP/3TfN/3fd9ePqxSSimlDok9TVb9Sr2QZBellFJKHQwHJllVKaWUUurZaCCilFJKqX2jgYhSSiml9s2eDr1TSt0dXFmy/NiTjLa61Gc7HLv/PoIwfN4/V0rdvfRMoJT6ipx/5FEuvP9h7MoKQVniwpAnFhc5+51v456HHnzOnyul7m4aiCilXrTzjzzKxfe+j2g8omi38XEMeU60tMTF976PjS+eY/jxP37GnwOcfdMDulqi1F1My3eVUi+KK0s+9s9+kmhpiWJhAWOfTjkT74lWVwnKAhfFt/752hpFqw3NBnZtbXe1xD/Haolu8yh18L2Qz2999yqlXpTlx57ErqxQtNsYa/EEgGDxGGtxScLCeMByo4mxloIEA4Rk1c+DkGMrS2xtpWRzc7dcLbkxGNFtHqXuPFo1o5R6UUZbXYKyhDgGqhCkMCniPTIaYbOMWMBjALA4DK66rQjpaEAskDWbmDTFBAEmTSkWFgjHIy5+4GFcWe4+3u420NISvl6nOHIEX6/vBi7nH3n09j8JSqmvmAYiSqkXpT7bwYUh5DkAAQVxb5n60hLzq8scHQ1IjTDT3YT+gICSgGlgMR5TdwUTA0QRjuoLqFZPWi3M8grLjz0JVNsxF97/MOF4VG3zPI/ARSl1OGggopR6UY7dfx9+cZGo16tWQfp9ZjbWaZY5hTWUBnpiaHvPzMYq9AdP/3JRkAqMgwDyAhmPEH9NulqSYJ1jtNUFbt4GcsSUzxK4KKUODw1ElFIvShCGnP3Ot1GmdaLVVVpbG8TimVhDzXkKG7Dc6tCzAS3vaW6t48sSGY1odreJ8DRLx8LWOkdXLtFa+jLS71d3nmX4IKA+2wFu3gYylAjB0wdzQ+CilDo8NFlVKfWi7SSIXvil/87C1UsgkHphO2oz7MwQtWK2azXc1gYtV5KvrCDWEohniAUDoyAgAJplTryxzrYXosmY8tRJjt1/H3DDNlCaYvFYJk8fyA2Bi1Lq8NAVEaXUV+Sehx6k/obX4gBnIBRouoyZratIt4dpteidOMVGnGK/4c+Tz84xSlKWjxwlMwEN5zAIo8DS8I5jG6s4azn97W/ZLcu9cRvoWuI9Ub+PHFvcDVyUUoeHrogopb4i5x95lOyRT7AgIEBghNRPCIDxxipL2QSiCIwhW18n2dygSFNotdgwMUmvz2zepYkQVgU2dCdjLn7wQ4Ch1mkx2urSeeC19DY3p/1HWpAkkGVE/T5lvc6Zt79N+4kodQjpu1Yp9aLtVLOkkzEC1I1nhKUArEDDeF4x3MZNgxT/p5+iZqCbjdgeDBjNzDLszNNZ65ELDIAIcDagfv5LbP/0/8kkSRFrq2ZnaUrRamHHY+xggA8CylMnOfN27SOi1GGlgYhS6nm7saupdw57dZnQOzyQEWJtiHE5AUJoDBHgjUw7iEBoIBShUU7YXl/BY2jjsQY6QGCgkY8AMAir2YitU2ehKIh7Pcpajda3fjOtE8e0s6pSdwB99yqlnpcbu5r6IESMYa6/TccIGLB4+vFRZvMVauTTVmZgMMjOqohUJ57YwBHxBAZyX23plEHCWAw1MiyQC8x7z2BtjaLRpGg0iAYDep/+HA9893doAKLUHUDfxUqp53TTcLu8oNndZKEsaFvBYMgFwNPOVwglx5gq8DBAZusUBNR9v/om0y0YU62QlKbKD8mcYxIk1GTndw2hDTk1GTGYjPDGkFvL5OJFlh97kpNveI3OnlHqkNN3q1LqWe3kgUQ7XU2HI2a2NqiJIzZVoGENJAaMGDLJcVSBhpl+BVLggJX4JHU3pO22q5K9aiGF2Bi6tsnQRIQ+oyro89QM9MImpuiRWQPGkjpHsxxx+Y8+RTYY6ewZpQ45DUSUUs/q2q6mGEPYGxGYkFRKEgMOg8HgTITFkeDYaZK6szWTBSm1ckir2KLmR3jAGHBUWzVOhNINqWEIggYRHjv9/Wa5zQSwhJRYJtbTdsL443/MxT/6E6LJpFqleR5D85RSB4/2EVFKPavrupqOx9S8UHcFNWMZxvNYG2AQxkEdg2CAPGzQD2eBaoulUfYIcNT8NAnVVAGIRZiIUAIdhBnjafk+Ik8HMQCBjciDFoJQ80I3DJntbhEPBjp7RqlDTgMRpdSzurarqSlLOmWftnUEeBrFNohQEtAqtzFUzcZqbkiz3N69Dws4kzCIFwgBI1CKVAGHQADUTJXAurPdA9WKiQBWCtrlmIYrKaxlVGtQF0+R1DDWUhLhmLZ/19kzSh0qGogopZ7VtV1NvbEkCBYoBAIcYgKGYYdrZ9YBILKTl4oAkWTMFOu7QYYXKKnKdQ1Pb9N4QEyAE4ufJq2WAm2ZMA4CtucWEFvdi0+S6b1ZzG6BMDp7RqlDRAMRpdSzuna4XdrrYnY6qAKDoI0XS7vYpBstUJhkN/jYiTi8SJUHAghCIZCLIAYygS2x9LEI04IagdJEDMMWpQjLYlgzAT0M/c4c0qgTZRkjY6ssWSAkw14biOjsGaUODU1WVUo9p93hdr/4KxSrY2riCY3BuwwrGQLMlOuYaRgi7AQTHRr5OgPbQLC0fb/quoohMjAWQ24N1lv6YZ0Aoeb6iJsQlhOcgUggM1AYg/GOcG2NvNViEs4S9/sUaYqxT19T7cyeuXZonlLq4NJARCn1vNzz0INEacJTP/V/EuU5i2VGy2YE08RTEcEaEIGRF+q2oJ5vYYCWH7GzRFJS3b5lqhbwgRjKICbH0nFdSql6ixgDQ28YBZa290wwWOd3W7oDVW8TnT2j1KGm71Kl1PN24vWv5typU5ilJb7cbrO4uc4xW62CeMCZiO1whpl8DQARNy3TFcBjAC+mCliAxBpCga7Ppv1DhGTafMQCTQtj51lJasTf/Je492v/3E0Ny3b6iOjsGaUOJw1ElFLP206+yMX3vo94PGJtZo6wu0ldhNRCKAVHynUw1cqHna5siFTbMWICXFQ1KJvgqWMIgAbCwEQ4PBGOYOfxgLbxuCxj7coycPO8mzf/i3/M2hNf1s6qSh1S+m5VSr0gu/ki738Ye3UZJxAaYewhw9KxMk1mFbyp4Y0josQAgiOVCRnCUCyJEQKq37dhg6Ds7wYuhQgOQyaGI8bR/vSfcOlzn+GSDXBBuDuRd6eT6n1/8ev282lRSr1IGogopV6wex56kLNveoArn/0i53/6Z4l722TTLqupCIGp2rZPghpp2a/mxkyraBKfsW4SRmFMq+xXfUMEZt32btVMzyQUQJuMGBiJITVwvMwpsQxNzsbcAsSRdlJV6pDT8l2l1IsShCE2CMAYljqzDMIETEBpE4xUk3N90aXEk0nVdyTzVS5JTQoKG+EExgJXsRS++v97vsotadhguloCkam2aWJgO6i2c+q9LtRq2klVqUNOAxGl1Iu20/7dz8wwOnWKtYVFrgZ1PNAXw4iAbdsil6AafmeEXCAxnvlsi54J6GFJpn1GEKgZoUmBN7AVzuAFkul0XsFgsYyDiFACZDzRTqpKHXIaiCilXrTr2r8bQ9BIKWKZTt8VYnEgJX3jKUWmVTPVnvAosGy22mzVmziqJmc1WwUcuRgiN6JWbOOBbrRAacJptY3gAW8jTFlUB6KdVJU6tDRHRCn1oh27/z6eWFwkWlqicB7jHT4M6WGIBVLjMW7MxFqWTETqHBGABwkMR/rbNKb9RwqxCH46yxdcVWdTJbLmm1jjp5NsDCGOsOwzCBvVgWgnVaUOLQ1ElFIvWhCGNF91H8G5c7SGfUoDxbQbaoiwQUi30aJsNaBWo9cfcGxjlYYRZktHOU1wzafTdovp/40NFDaisCkU23j87m2MeGIPgzCGNNVOqkodchqIKKVetPOPPMrw439MEoZ4D4l3RNOqGQSywFZBSJLAZEI0HjEKAgbGMmh3aPW2SV3JKIgIxFH3nkxM1XlVMho+IwcmYsgNtBE63jMwllG7A+OxdlJV6pDTd61S6kVxZcmF9z9MNB4xOXGCMWAmEygdWEO6tVV1SB2Odrue5rOzmE0hn5mpmoUQMwxjjJTkNmVbPLOMWLaWgVjq4hkZizWehodtY+hHMT6MCLMJvtROqkoddhqIKKVelOXHnsSurFC021XlikkxjQaRjAEYW0swGjP7jr9MOjdLfbbDcH2T1V/4rxDHmOGw6qsqGQBWSvpBjU45ou6EYWDJBbqNFlGe040iGm97Cw/+P96mnVSVuoPou1cp9aLslu7GMQCRVLNidiUJZjAgnZvd7Xq69Jkv7FbZSBgS+SHG2t3E1MgP6GEYhSGpK4mBwDnyl529btXj5Btec5v/WqXUXtFARCn1olxbukuaYqY1LbtuUclyXZXNkSOMg4hmmTMKDALUvKcfJgyPHSNfXSXrzPCKv/u/cuINr9FVD6XuUNpHRCn1ohy7/z784iJRr4f464OQnUoWObZ4XSXLztC8Mq0Tra8zSevkxtB0Ba2yoDCWSVon3twkb7c5+/1/ndNf9XoNQtSecGXJ0me+wJMf/US1WqedefeFvruVUi/KtZN4o7U1ilarqo7JsmetZLluaN7KCpMwxpY5GBhHUTW5VxNQ1R47/8iju6/BoCyvG6Cor7vbSwMRpdSLdmNQsVMd81yBxM7QvOXHnmS01aXWaoAx1HsDTUBVe+78I49WAfR4RNFuV3lOea4DFPeJvtOVUl+RG4OK5xtIBGGoSafqtru27LxYWMDYaYZCmlIkCdHaGhc/8DBn3/SABsO3iT7Lapcryxf8YaIUaFCh9t5LdX66sey8JCG3Dep+c3eAop0OUNTX9O2hnzIK0P1SpdTB9VKen24sOxcMmWmTsomBaoDiYKADFG8jDUSU7pcqpQ6sl/r8dGPZecSEWffU0zfQAYq3nZbv3uV29kvDnf3SNIUgwKQpxcIC4XjExQ88rGVtSqnb7lbnJ/MVnp9eTNm52lu3LRD5yZ/8SYwx/PAP//Dtekj1PNy4X+qxZKaFYHf3S810v1QppW6na89P2IChPYpMP7Ze7Pnpul42a2vIaIQ4h4xGRGtrOkBxH9yWZ/qTn/wkP/dzP8cb3vCG2/Fw6gW4cb/U4olk/HSXzCQh6PdZ/vTnNYlVKXVbXXt+MgiFqVGYlFiG1Q1eZD7Hiy07V3tjzz9NBoMB3/d938fP//zP8xM/8RN7/XDqBbpxvxQg4OllTru1TWs8pPfb/zdjYzSJVSl129x4fppxF6+/wVeQz/Fiy87VS2/Pn/F3v/vdfPu3fzvf8i3fooHIAXTd7I8kebqmHpBej2O9LTzggwBfS8GaF5wkpmXBSqnncuN5YuGVL8eXJT6pkWxukh07hgmC3dvv5HOUp06+6HwOLTs/GPb00+BXfuVX+NSnPsUnP/nJ53X7LMvIsmz3v3u93l4dmpp6pjbdMp5wan2VhvH0xXJk0MMN+4yDiFG7QzRNEnuupj9aFqyUei43nieM91xyJWUQUHOemWJCefEpNlsz+NmZ5xwjoA6XPUtWvXTpEj/0Qz/EL/3SL1Gr1Z7X77znPe+h0+nsfp0+fXqvDk9d456HHuTMO7+X4uRJ7HhM7epVTqxdZdZ6AqBuBGsszgQ0y5yZrQ1cED5nkthu2d3SEr5epzhyBF+v766onH/k0dv3RyqlDqSbzhO1lNZkxEKe0Z5MmLRarLZnAVjob1Nbvoodj6t8jh/4Xr2guQMYEZG9uOPf+I3f4Lu+67sIrllKc85hjMFaS5Zl1/0Mbr0icvr0abrdLu12ey8OU13DlSWf/m8fZPjB32Fx1KdlPEMsBiiDJkZKhuTEXhgEEXmccOz/9f3c9xe/7pb39bF/9pPVls+1bZSZLqmurVGeOslf+Ikf1asZpe5SN54nMIb65cs0y5xhENBwjkEYMzp5EkRIlpeZzMxy79/5Pqy1TPpD3e49oHq9Hp1O53l9fu/Zv9w3f/M389nPfva6773zne/k1a9+Nf/0n/7Tm4IQgCRJSJJkrw7pjrMXuRfdP/sciSspDBRVn0HGNsWIo+XHgKFvA5ouZ5P4GZPEbiwLlul9GUTbKCulgJvPE+NJzKz3ZEGAwbIdd3BBE8Zj4jjCpynpxjpf+Jn/TGMyJCody2HIEydOcPa7vl1XRw6pPQtEWq0Wr3vd6677XqPRYH5+/qbvqxduL3Ivdk8KSQ3JJ/RsncgYAimJfIEHaghDPDWBstli4ZUvZ+kzX7gpGLqxLDiTlGAyICyGEAZIHGsbZaXucjeeJ0qJGYdHCN369BYlqS9Iu1uYsiR0JTMI2eYyQ29w1mAK8F96kqWf+QXg6QR6V5Zc+cwXWP384yCGo69/FSde/2pdOTmA9F/kENqrluw7J4Wi3sAPDCITUrG0KDDVggYGCLxnZAzhy8/yhz/+U7cMhq4tu5OyZGb7EnVXYEXwxpBbyyR65hUVpdSd78by3IaskZbreCM4DO2yz6x0GQA9MSyIEFoQwBlY82BCS12Eo4MuT/2XX+XUV72Wz/3332L4P3+bY5MR8wgew8b7DU8ePc7L/tb36MrJAXNbA5GPfOQjt/Ph7kh7OcJ696RgDeMgYr7IaBpHNH0IQ3UCCEUoxCCf+zwRVMFQFGH7A9KnnuLyz76X43/7+/CLi9SeeopaWRCLZxIEOCBAmC1LugKTrlZGKXW3uql9QFojs3aaIyJ0xOMFNgVqgaEp4IA+lqx2lEa2yaAsySNL6ISF5Sv80Tv/IfdOBtQtmMDgBQqE2HuaV6/ctHKi9p/OmjlkbtxTdUQ4IuDFtzzesTuDYTAgi2NaxhMZIfeQC5QCXoSJQCywMOyT1xtQFNSvXmW2u0mzyDg+2Gb93/9/qd/7chJX0PaOiTV4DCFCzXl61pKHIZf+54dwZYkrS5Y+8wWe/OgnWPrMF3S2jVIH3Ev1nu088FrEGJKrV2E0YtTu4IGjpSOkCkJiG9AQw5XG/Qx8lV+4UTuFrR3huPXMe0fDCMet55XTIEQwFAJiIDKQWqhbz5FBlwu/8Vt6jjlAdGvmkLlxT5VpM/ad1F+JY6KtLS49UvVueSEJrDs9RS78x//Cic11rAGPITZQBDXEO0qfEwH1wFOKgbVlsClePJPA4oDSe47kGasf+RiYgM3QkHiPJ6A0EYPQMZ6ZRQKLXV7h07/2m3Q//XntNaLUIfFS5Khdex9xnlMvcxprK4zCmF6tTlkWzEuBdUKMMMYQlX0KGxCJp10OSCTDCRRAy1QfaOE0CMGAmISrtdPU/Jgj2RIgNMUTXr60myivDRf3nz7bh8yNe6oB+W4QQn9Ae2udlivZ+Mjvc+4Tn7zlyeG53njGezpGiGyVGOIERsTErkdiqzc4gBeoI3hr8a4glwBvAgpryMRTKzIi51ifO8IoLwDIGvNEKRhjwDnirU2GD3+ISOQlzXdRSu2NlyJH7cb7mMzMMJlMSLa3cVFE4y+/ldf8+a9m42f+I9tfvozBIF44O7mEBQJjaIyfQATGQIghMQLGYIDc1lmqv5JWsclWcpLTw88j09+rW2hkE0ZbXW24eEBoIHLIPGNL9v6AmY1VWt6zGUZki4tQFDedHJ7tjQdw8b3vIxkOKKkCEDD04yO0ym2ioMoR2VGbJo15P0CAXGK2gpCaH+GNJbMBi64g2ljDGYM3hvFkzGhmBlotmEyoFzlDDNnx4y9pvotS6qX3UuSo3Xgf3sZYn4MxZM0myWBA908/i/26NyK9Pq/IxxipGgBYU+UT7DQEEAMmjFhpvJqZ4Reo1mQBYyiCGpSGZrnNTFFV4Xiq3285z5WPfQJz7hyRc3oRtM/07H7I3Kolu8RxtRLiPT0bMJidgyCAPKeo1Uj6PS78xm8hruTSL/7a9VcykwnJhQss/fv/iItCgixDBGqm2u4xRmiUPa5NJzLAxNYpCElcDy8QGKFjSibXJKamZY6lepH1g4AAaJY58cY6215IBn0AstlZjLWURIAhJNdeI0odQDfmqJUkVO/ZyfN+z954H71shvmNc8zkm1iRqtfQl57g/P/+b6htbtLAY0y13RJSBSPTRVk8EOFolFtMbwRA4ka8uvsJANKwz0p6D8fG56uzmIGG8Sx86k+IgM0wZuwTosDqRdA+0Wf4ELpxhHW0tUXLlWyGURWEYEkvL1F3OVYEENyXnuDSe9cJ83z3Skb6ferb26Qup5WNSYwwFkPdeIyNcUBEgTcBgVRbKzsngMSPsIREVCcGMETknBRhBYsRqImwPf2NjisZG8sosDRLx5HNNbbqTUYisNvELsCSP/2HvsgR30qpvXFjjpozMRPbpuOWqhs8j/fsaKuLLQp86QhWVzmRbRD7MR7BT08wR0Wwm+t4qsaKRdCm7Xo3BSFVfpxwJFva/T7AE62vYSZfYTG7QOKHjKWF8HTlX2IhdUI3sDRcQX9Uo2BC1LB6EbQPNBA5pK4dYX3pkU+y8ZHfJ1tcxIwn1LbHJBJS2hKHIRDHjPPUtza40qlWH1x/THNrSMPlZEHAWEraBhLjEanerJMwJSqLaqsFSz+YATwtV5XcliZAbEDks90tm8BA6iHwJYFUtf7jsEnHT1iQgtLBhGqbJnjwz5F/+rO7+S4hk+v/yK9gxLdS6qV3Y45aLAMCd83Fw/N4z/aXrtKejAjHA5oiJAZKEZwJyW1C6kfEBsw0tHAEfKn95zg6eYpjkwu75xoDbNROk7ox7WIdj2ElvYej46eouy4ybX4U+4yjkwvAdCuZ6oNvJhC8F3qBZXayRJY5svpilb+mF0G3lZbvHmI7I6xPP/Q1FLUa5Dnp9hbNsktJRoll2lSdyTSjvDYcIM7R2F4nwjIKQjyGRKo3tscgBqwUu1UupUnoRfOElIRSPn0iMAYrHmD6SGCNYcZ6juKp4zEChbVsWsO6CZhgyA2MwpiFN76uKhfu9RDvr/vbdkZ8y7HFFz3iWyn10tot8Z++Zw1CSDUf7Pm8Z88/8ii93/t9rAiRsHuGqlmQIGUzPUO8U/0yLbuNjWM+u4wjxrBzngrYSE6TuiFWqryQ6kwnFDbh9Ohxjk2eAqrA42p6L5lJqdaHYTNawGNpGWHGeyLXp1mOYDyuDlQvgm4rDUTuADsnh2Rri9Tl5EEAGCbhDLlNqHnPMAgZGWi4AtvtUXcFwggz/R8f1CgxWKAUgzFQKweEUhBKxkyxRs0NSP1o92RQ8xMCKRkGLUpT9TKxQGgMkTFEGMaBxfoJYMhtSC+ICIF6mZN2Wpz9zrdRpnWitTVkNEKcQ0ajaiiejvhW6kDZyVF7Me/ZnSTVcDxmfX6BfNrbwxpwYqj7ISfyZSzspJwC1TllPl+hH88zCGem3/OMwhaJG1MvtxmGVcBwbHyemh/t/q5QVfdZybnQvJ+xqSMC3XCGQTRHYCBCqImv8lPKUi+C9oEGIneAnZODiyIaXqgGKnvSYpuZYkBuLIPOHMMgJhUweZU7svNmt1ISmJCBVFcP42iWkW0QSL67r7rzdS1Dlb0eSUkoBULV9KycLplERmg6Ibc1DBCKIxDHNUsq3PPQg5x55/dSnDyJHY8JNzZ0xLdSB9iLfc9el6TabtNN51lN7yUyhpqFrfo9RDhEBCdPn28EwzhocWx8nsLWgGr148zwMRI/5mr9FTzVeB1b8eJNj2motouPTi6xMLmIR7hSO8vMZIm43MYDRoR0miSLc3oRtA/0Wb5D3PPQg2w/dZHB//h1at4hGLzx1QjtmRlMq8W4KMj6BUmeVUuqImAcqfdE9OlhsAKtcpsQwZobQ4/r7cQTiR/jAWcSHGAlQ6YJ7DPGUThP6rMq4x0hw9K3lklvsHvsO/ku2lRIqYPvxbxnr010rRU5M8WIIErx06udtOwBhtgYShMwDlJSN8AgrCcnKW3MPf0/u+l+W8UmBSGX66+m5kbUXf+6nwtVK4IkX8NSbT8TREgxZgJkWNpGqrJg56uA6u3aR+R20jP9HeSBv/ZX+Niffpbk0iWyZhOiCGq1qkLGe0JXsrx4HBpNal8+R8c5htYwsgGRc8yaKrSoskbAS5Uudm08UhLgbEjk86q231SlvBObIOKwCI2yKgHOBFJjmPd9cqrl1olYBGg6R3/p6u797uS7KKUOhxf6nt1NdM0yOvmIRjmm7i4CBjEhqRsQ+zEYKGzKZnKKk6PHMQiL4/Ncqd+HNyGjoE3dPT2jqlVs0Co22Cq3dhNUdwhMe5CY3eX/k5Onqm6sIkywFMCmGNa+5k3c+x1v0YugfaBbM3eQIAw5+13fTt5qE02mFSgi1+3fnv3ev8p93/tddB/8alZqdTIbEntPaaotldjINKHLYE3VMGhsUyZE5AJOPBNTA6q1U0dV2lsvtgnEU3cDDFACkTEIsC6GdRuyaUO2wpDCVEFJ988+p/MelLpL7OSy1Xtd4uEIqIZnFkHC5fqrWK+dBqbltX7ITH6FnXXXTrnJa3p/hDcBV9N7mdg6AKUJudS4H8Eyly/TixYYBi0AxraOx4IxBDCtxIHMg5+evwoDzhiuHDvBm//J3+fkG16jQcg+0Gf8DnNjjxE7GOCDgPLUSRqvvI+LH/zd3a6qYqBZFiTAyBjGBiIRBEMy7Q3iBEpCIikYxAt0ik06rru7LSPAJGjSlC2sH0y7scLYQ8NWOSeFNQzDFpEUNIsRhQ3oNtrY1VWt01fqLhGEIWe+461s/7v/QKvICHb6jwmEUtDMV1lKX8Hc5BI1yagXvWpF45pFjsLGBJITSlUybMWRuj6FjcmDOv1wln44y+LkApmtEficmWIFj2DFTIfoCV0Tkic1rAhZs8nL/tb3aACyj/SZvwPdav920u3d1FXV9nqE4xFjYLPRZhK2WewtAyVHxBOYiEnUol1sYIG4WMdOQ5CdxkBWShrFFrLzDargo8TgxNMTi7XQdBOMzxmEcTXwrp4Sbmxonb5Sd4nzjzzKxff9d85mE8JptYynyik7OXoCAxRBA2yEcRkeEAR7TZr8bL5CJ1/DstM2QFiYXKIwCZvxCe4d/ClOLFcbr2Qmu0roJiAwEcNWKRwJq3yRdYFGnFAeX+Ss5oPsOw1E7lDX7t+6suRj/+wnb5oPIUFANm0clEzGFM0akRTkxjDEMC85SbFRJaBSvel3VkIyW9XkJ64q50Wq7HQjMBKYAKVYrrZn8PV6VRYXhpCmVcOg0Ujr9JW6S1RD7n6JIxtrZAY2raHj/HT71yDVvDpmJlWH1p1qPaY/45qOqgZhO1pgpljbvV0kGcfHTxKII8BxfPQkVgoQWElOkUyuMMGzjmHcbPCZeIa3/s2388Bf/FpdCTkA9F/gLnDjbIfMNLDiCcNqOJ0Vz1GX0+yu0ECYESHbPRNUL5KdYVGeYPrm90Q+p9zpfihCjKGkuvq4ungcYwxxv4+v1cA+fV2zU6dfnjqpdfpK3eF2+ock/f405ywkxyBhQt0XNGSCneaoVT1FqvJdjyFGWA6PEItnxm1ijcEDvfgoiNBwXcBT2qrCZkckOSJVLtowaJGZOrEdMApiwmajqsg5dVKDkANCk1XvAjtlc0znQ0QyJmIMaUppLDMINQSHYdME5AJ1U9XV575aBSmCOv1whn7QYiQB4jJA2OmHmouh5+HxqM5KkmLbbc5+7199xsZHLghI7305y489qQmrSt3Bdi+EkgQrsJWcYmQS1sImV5KT142H2IyP0yWhELBGyAW8sfSiGQa2UW0HI5wYPY6zIZvJMXKbspacmtb6VQzgqC6o5gdfIHIDShtQdDrkAhLHNBvpfjwd6hY0HLwL3DgfwvJ0O3XZGacNeGMobUAPCMQRI9XSKWDdiMhNiPCMPMQWCqmSUl2Q0I0a5IwoTpyA8Ri7ukqt0+LMO7/3usRZ6xyBK8EHFB/9A8794Sd4YnGRs9+p+7RK3Yl2LoSKegM/6GGNpQzqJMUWmC5DgcALpcAgjnBhh3qxhhcYiqGRr+Fjz2pygmR0DoDIlNTyTYZEBDKiKVcYepkm2VeTwydiKAQGYuhHEdnMHDMBfDETFl51LydPLOzfk6Kuo4HIXeDY/ffxxOIi0dISRZLs5ogwHhOJZygGb8DZhLBq/8OqDcGX1Az0/HRbxngaBpq2+u+xVG94fFZdfTSamH4fMZagLBltdbnvL37dbuLs0h/9KZPffwQxhqLTqVZo8pxoaYmL730fgAYjSt1hdi+EAss4iDg6/DKjIAQMgS+qKbrTvLS8uMqgPYPdtiyIm86VAV/2aLkJYiBEqvYAklGTgoEIXdenERgueajVUuKyoI9hE0vYalOr12hOxiyPHJdOnOEdb3kIa3VD4KDQf4m7wDPNh7CjEQ0vZMayWm/TazZZnznKxsIipQkYYsikmj1TZbdXKyA7Tc5CBDGwjcXgmRl0ObKxxsL6Mq3xkP6V5d3HP3b/fQzPncd6R3H0KCZNMUGASVOKhQXC8YiLH3hYt2mUusPsDsrr9xl1OhTGUnclAQ5nBCPVuWRiDYPZeczMDFuzC0zE0jBVr5GJgUwmFF7wUiXFe6nySYYYktAwEYOPEpajGo8/8FWcfMs3cmxxgYYrKHs9njIRqw+8kXf8nb/Ofa84vd9Pi7qGrojcJW7VXyTKs2nGuWdu1MebAeMgYpLWSX3JxFi8ePpxijF1juYbhEYY+moqZl8sq0lKs8ipeU8WBDigWZYIwtaHP8r5s6e556EHb0qYdUSAEFBirKVotbDLK9pXRKk7zM6F0MX3vo9oPKLbbJOOBtRdQSowxjDBMgkTCALEOYhCNsOQsCwIDDRdSWkMV2wIpSeiqsyrW6hbw9jDKpYrUcrKiTO8469/FyfvOcmx1TVWLl5l6IVXnDjGyVOLuhJyAGkgche5tr/I0h/9KZOP/QGDIiMSGAcBAdAscxr9nHCavNoPE0YnjiFZwTBfwHqHN4Z6v0ceJcxMhoQIgyDEilD3nokN2J6dJ5qMufiBhzn7pgeumzMBUJqYSMZPH1ySYAcD7Sui1B3oxguhPKmRS8Jqq83MN3w9ndMn2Zg2W9xpwji89xWsvOJehn/6GeLNDYwIRZww6Mxwrj3PhdVNtpdXsVmOj2Nmjy/ysq96Le94y0O7Kx7hsUVOHrt5GJ46WDQQucvsbJM88Uv/g0iEjbmjzGxtUHeOibWMgoBOWdLAs2HCamBeEGDqAdRrVYb7aMTENwne8Hr8H/4BXoTUOQob041Csk4T02pRROHuKseNCbOJDK8/sCzTviJK3cGea1Dey77+q275M1d+903fN9aydGWNXn/IYDim2UhptxqcPLGgKx6HkAYid6HrtknSlI0goba9TbscYr2nMJCJZRwE0Ghc97vX9gA5+sbXsvLopygbTRChjOpILSU2Vfvla1c57nnzV986YRbtK6LU3eLZBuU908+e6funT+lKx51CA5G70I3bJLZZZ9xoUYy3oXRgDLXtLcokIVpbo2i1IEkgy6qAoV7nzNvfRtKs46MIwirpNAIgf/qBrlnluG6f+FnuUxsMKaXU3UXP+nehm/uKOKxxUK8mWspoRF6v0/rWb6b7Z5+7aXjemelsBleWL2iV49kG8p3ReQ9KKXVX0kDkLvSMfUW4PoB44Lu/A777O55xT/fFrHI81z6xUkqpu4ue/e9CLzSAeLZy2hezyvFs+8RKKaXuLhqI3KVeym0SXeVQSin1YhkRkee+2f7o9Xp0Oh263S7tdnu/D+eO5MpSAwillFIvqRfy+a2fOHc53SZRSim1n7Tzi1JKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3exqIvOc97+FrvuZraLVaHD16lHe84x08/vjje/mQSimllDpE9jQQ+ehHP8q73/1uPvGJT/ChD32Ioih461vfynA43MuHVUoppdQhYUREbteDra2tcfToUT760Y/yDd/wDc95+16vR6fTodvt0m63b8MRKqWUUuor9UI+v8PbdEwAdLtdAObm5m758yzLyLJs9797vd5tOS6llFJK7Y/blqzqveeHf/iHefOb38zrXve6W97mPe95D51OZ/fr9OnTt+vwlFJKKbUPbtvWzN/7e3+Phx9+mD/4gz/g1KlTt7zNrVZETp8+rVszSiml1CFy4LZmfvAHf5APfvCDfOxjH3vGIAQgSRKSJLkdh6SUUkqpA2BPAxER4R/8g3/Ar//6r/ORj3yEl7/85Xv5cEoppZQ6ZPY0EHn3u9/N+973Pt7//vfTarVYXl4GoNPpkKbpXj60UkoppQ6BPc0RMcbc8vvvfe97+YEf+IHn/H0t31VKKaUOnwOTI3IbW5QopZRS6hDSWTNKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jfhfh/AncKVJcuPPcloq0t9tsOx++8jCPXpVUpEGE4KSucJA0ujFgEwGOf0xzkA7TSmkcYYY/bzUJVS+0A/KV8C5x95lAvvfxi7skJQlrgw5InFRc5+59u456EH9/vwlNo33eGEpbU+3dEE7wVrDUkUkOWO7eGEonQIkIQBR2cb3Htijk6jtt+HrZS6jTQQ+Qqdf+RRLr73l0j6fYokoag3ILBES0tcfO/7ALjnoQd1xUTddbqDCZ+/sEaWl6S1iGYaMcoKLqx0d1dHkigAhKx0LK31GWclr3v50d1g5FarKbpqotSdRT8JvwKuLLnwX36VIxtriAGbT/CDHuMgYtTpEI1HXPzAw4hzXPzg7+qKibqj7QQNRenYGow5t7TBKCsJrWVSlPSsYZwVZIUDwHlHXjisBWssIGz1x1xe69GuJ/RG2U2rKZ16jRNHmoRBoMGJUncIDUS+Ap/+bx/k2MoVAMY2ZGJTQnE0yzHJ5gbdZpvo4iWWfv4XibyjaLfxcQx5ftOKiVKHWXcw4fzyFpu9McNJziQv8QKhBRsZ8J7hpMQ5wQAy/T0BnAePxxgY5yUrWwPm2imX13pkuaNeCwkCi3Oela0Bl9a7pHFIYO1ucHJyoaVbOkodUhqIvEiuLOl+5A/pCGyEIQaLmAh8zigIqbuSdDQgdSXDKGF84gTGTouU0pQiSYjW1rj4gYc5+6YHdJtGHVpL6z0+/9Qao0lO6Tylr8IMA3ggLx0iXBeA3EgAmf6v3jDjylqfLHe0G08nsBZeyEvHJC+xxnB0pobzns3+mGGW88pT8xqMqENHt+01EHnRlh97knDQZ2IgAByG0I0xCGDIAkvbFQTAVhghgyFFVMcmEaH1GGspWi3s8grLjz3JyTe8Zp//IqVeGBHh6mafz3xphUlRVq99UwUcMA06BATBebDPc/dkUjg2+kOOdBq7QYiI0BtllN5Ti6ttmdJ54iig3bD0hjlLa33a9US3adShcctCh6NH6bzxdbROHr9rApM7+6/bQ6OtLgCDIKbpCjaSDqEvML4kFI8RT1M8HpiZ9LHjAeOwjWPCeGYG02pBkmAHg937UuoguzZxdJwXbGyPuLDaZTjJscbgpVrvkOt+B9z0G/6ZlkNuoTfKqScxtViIo4Ci9GSFIwosxkCelfTHGXEREIcBaRLQHU0YTgqaafzS/dFK7ZGq0OF9ROPR7ra93d7myLknCM49wXqUkKfpc+YT3gkrKofraA+Q+mwHH0VMooi432M+28QBNRFqRkiAyBpyLxgPozAgkCF150g21tkGCAJ8EFCf7ezr36LUc7m2DHeSlwwnOYihdA5rDd7LCwo0nkvpqtWWMAho1CIatRgv1XtpnBcUpWdSOIyBwFrqSUQSVSslSh10riy58P6HqyBkYaHatu8P6PR7WAQjYL3H12rPmk94yxWVhQU6X/UGWieOHZrA5GAf3QF27P77eGJxkWhpie3ZedrdTY66gtAIxXSDZiTVls2sEQrxOBMyDAwNV1Lf2qKIY8rTpzh2/337/eco9Yy6wwlPXN6oEkeTkNGk2M3nyJ3Hv5QRyDWcB4OnN8oYZyUgjJ3gpsFGaKt9IO89/XFOVgSM84IZNE9EHRzXrljUWg0whtXPPk50+TJFu42xlhEdZreXiMQzCiKseFJfMjKGYmHhlvmE5x95lIu/8EvUhwMwBhHB5jmtLz1B+KUnWI9qz2tF5SDQQOQFuHEJ7Mx3vJVL//mXSfo9AucYi2FiDDUBY4QtE+AsdCSgjaUrBd4EFMYw53KuhnXOvP1tBz5aVXcvEWHpmsTRovTkpZs2JSv2LAjZUfqqyqZ05U0/K1xVaSPTZJQCuLrepxaFRGGgZb1q3127YhGPx9TLqpOwt5ZFV7KZZ4xn58lqR7G2ThYUgGEQzRETE/b7lEDRbF6XT+jKkpX/9huc6a5R90J8zYZoidAXi/UOn6aHokJTPwGfp1stgbk0JShKWsWEowjeQCSGraBGhpCZ6sptICEdyWh5T2483hgmGGp//qED+8JQd49rcz8Ca6oPfi+EgQURuqMJ9VqImeaBiFTJp+Pc3Z7je5bvi7AbjJTOc2F1m/44r5JY04S5TkoaR9pvRN121+aAuDCkVubE4kHAiuCBtitINtYw9QlpPiALDKE4ZsscGwYEwx7FeMDEhrjA7uYTrn7045y4fIGmd4Dhcu0e5vOr1P2Y2Bo64inLghE844rKQXLwjug2eKHJPc+UVHRy+QoCrNdSomxMbiAGrHi8CWi4jDZCYnICIDHQF8N6GFGkDe792j93u/5kpW7p2tyPLC8Z5yUGSOKQWhwShwGTvKQ+nQ+zk5SaZeVLmhPyFbmmNNh5oRaHeBEurHb58soWjVpMLQ6134i6ba7LATlyhPqVK8QiDIIIARqu2t6sLl4djckAjCf20PKeutliXHbZtOARWmWOOEN/6SriPe5PHqXhHYIhFxBxOBvj/BgLjKJ5YleQD0Yk9fqBr9C86wKR8488yoXf+C2ipSXCsmQlDHni5EnOvuMv33J14lZJReI9teEQTzW+uJFnZMZQ2oAMQ93l1D207HRZefqCmwg0jadRTniShuaGqD3xfNuiX5v7EQSGSeFw3ldbHXlJGof0RznDSb4blDjvKUtPcQCSQq1hmiNSBSIWQCAvHKOsQESqk70XkjDQfiPqtll+7EnsygpFuw1ZRigB3WSeqOyRB21gQNNN8FhC8dRcSWYti95Rs4IXQ2SEeYFCpHpdA9t/+jnKb/w6/MXLu58tpYkYRvMENqJedhGBPEhwQQM/uIoszB/4Cs27KhA5/8ijLP3ML3Bk2JumkwIFyLknWPqZZeDmPbRrX1DGWiamRbi9zFGXExjBApFJGJiIRtmjF0Zk1nJEHBYoqfJFCjE4DAWQINRHA1xZHshlMnV43WrI3K1WAq7N/WjVIzZ6Y5z31KIQDGS5Y5SVzLdq9EcZy5sDwsCQFwcjCAF2m6TtMtU2zTDLcV5IogAvVRM0AdqNWPuNqNtitNUlKEt8HGOGQ7yJ6SbHOVL2yIM6WdigPrrIdr1Fko2ZdTkIpNOXZC4wDJskCHN+iIiw6YTZLz3B4x/4XWbEVx2rDFgpCf2Ey41XszA+T4DnSHYVL/BUaciynMS7A12haff7AG6XnbkwC4MuFiEPQsZBRB6EWISFQZcL/+XXcOX1SXE7LyjiqjdB3FtmsbtMjSq4mGBI/Ii0HNLA0y4LYu+JgLGvTvgTU2diUgIjTDCsUTU7++LDv7cPz4S6U+2scGz2xyRRQLMek0TVSsATlzfoDie7tx1Oit3cj9LJNT06DAZDFFqyomSUlRhb5V9Mcof3ByMIgWor5trtoSCwRGGIc7L7t1hbVRN4EYwx1Gvhbr8RpfZKfbaDC0PIcyQMSVyPheETADSKNZrZKt4YylaD4fw8y7UGttOhlOpzJbWQhCmj2nEcUGCILHS8Y/CxR3AilBgQiI1wavgFXr39+0R4wmlTQWPglPW0V66Qbm0ixxYP7Cr8XROIXPnMF5hbXUYMjIKI3NRwJsFhGQURYmBu9SpXPvMFXFmy9Jkv8ORHP8F4cwtng+oF5T2N7U1CETLAINQRYgOpKYmMoWM8M/iqvbWBLpbLtsFy1GDThnSDiEkQEAKjlbV9flbUneLG6pYoDLDGEIUB7UZMllfTbWXadKyclt0GgcVPP6jtNSsEVW+QqnRWfLW94b3sNic7aASQaYKtiFA6T5aXZLmryhqnf1sQWLwX7Tei9tSx++/DLy4S9XqI85TG0nAZgmCkpOkmjIMIkoRoMMA0G6SDPh5DJlBisOI5OrkAQY3EGmasIbWG2WJMNs4op3uRXiAxwkzZJbims/HYCyMPR52jmec0Xnlw+4kczKPaA6uff5y2eHrWUrVgr2GBYFpOlRlD23vO/cpvcO6Xfx27toYtqo4gjTzDjYcMZ+dIXcEosMTOE0QtkBznJgRAbMDuNnmvlo6HgSWSPjhwJgAg9EIJ1BcX9ufJUHeca1c4btxyuHEloJnGhEE1MM45jzVmNwk1mP6u94LzwijLcAcmK/XZlV4YT7dlpvEWAkSBJctL4iio/l5rqoogpfZIEIY0X3UfwblzLA77ANSM0CxzxhhGNmCS1onW1ylqNVIRQnE4gZYFsQHLtRPUh3224uPU3IBOsYYVISo9aa8HAYQGLFKtDmKoPt2q170FUmvYCGO8tWRPPHlg0wEO3hHtFdk5OVf/NxRH6MaE4mh4R4pQs3DmiccZWcN2UiMtc1qlo4YQGWG4uoI10LMQItT8iEBKxFQviB07ra5bFsQ7Nq2hnAYhgqflPWtxwgNv+QssfeYLh7o1rzoYrl3huJUgsPis3F0JaNQiOvUam/0xrXrVlXQ8HSa3kyOSF+7QBCE7ihuWbCzV+3F5q9pnFwxzrZTGtApIqb1w/pFHGX78j0nCEO8h8Q7x0DBCTTzbtsrFKk+dpPPA6yg+8EECL9WKfdgm9ROOj84RScFMsYaR6n1rgLYVAoSxq3IQY1NVs+0EIKXAhqkzCBoUZpPRyZOQZVo1cxAcff2r2Hy/pe4c/dASuyGhOGa8I0CwIjigNIaaCK/MhngMpQEHGIGO9YTG0BIILBS+rH4WJJR4rBQgMBRTlesiNI3gnGMjFGrO0xJhjCF77Wv5xL/86etb8x6CDnjqYLpuhSMMbvr5jSsBxhhOLrQYZjn9UUGaROSlZ1KUiIfClbtTdA+zILDVe7X0rHZHnDzS4uRCSxNV1Z65ttJycuIEE6A/mUDpEGOodbeZzM5x39/9Xznx+lfzpd//YybZBKG6oO0FDbKww0J2iUE4Q80NsVLSC+dplxsYIBeDp6qosVSrIt3kGGExwJUDXBRRxh1mxuvk29tkQUh9OOTL/9f/ZOmxJ1h801dx+mUnsfZgrAzelqP4d//u3/Gyl72MWq3G137t1/LHf/zHt+Nhr3Pi9a9m4+ixqtzWldjpSkhYjQfFGMjEMDFViS02pjAxk2lCUGirJyvzsrunHhlILBQ2Ae8Y2habts3IRDhgw1sKgaYRTjnHwrRSZxSEtD79GdILT+HrdYojR/D1+m4HvPOPPHrbnx91uO2scIwm5W4eyA4RYTQp6dRr160EdBo1XnlqnrlWCkAtCgisJbAGma4gTitkDxUDRIEhmG43eQFrq+2mhZmGlu6qPXVjpWUZNCkaRzHtFrbVJJubI5iMsUFAEIYs/c7vcdwIdSvUDETiKU21RtALZ9iMj+FMwEZygtJURRNOwBAwSRaQ6YpIZhu4qEUnNCz4Li+fnGPOCKd7W7x+a5V7fcbZP/sUs7/ya1z4x/+MX/yn/wdPnru0n0/Vrj0PRH71V3+VH/mRH+HHf/zH+dSnPsUDDzzAt37rt7K6urrXD32dIAx52d/6HlabHTzQ8I4mVZ8PD6zbJptBm4ZUUenQ1AhxINUJTabzzR3VzwPz9FTRhusR4qn5yTTJqOqaNwwtXbGMPKyK5Ym0zfmTZxADHV9SK3JM6TBBgElTioUFwvGIix94+KbqHaWezc4KRxIH9IY5RenwIhSlozfMSeLglisBnUaNV5+Z597jc9x7Yo6vuu84D9y7SFqLsAbCoArSD5OdxmaBrY49DgPqtYggMIQH5ApQ3blurLQsiBnbmadvkCRY5xhtdfnkL/wqpx//ApF5Oq8wkQmpGwBQdwPGQYNASo6PvkQoVbXXOJ7HBym92kmsjaYXDIKVssqLMtWWTUh1IZwYIQBqAdSt8ApKvvbJz/M//98/eyCCkT1/V/6bf/NveNe73sU73/lO7r//fn72Z3+Wer3OL/zCL+z1Q9/knoce5OTf+3+yfu99dMOYXKCHYSVM2GzEBDKmZqp95JrrE+CQICaLZ/FSJQMFNiQ3ARMxBNMT9M7enJGclutSk5J8mrRaM4I3hs2ji7hjixjniL1nOwyIREi3t3avYI21FK0WZrqXp9QLce0KR1Y4BqOcrHDMtdJnbOLVHU744sUNvnR1k4urXc5f3WK1O6rKX6nKYw/jDo0XyJ3gpqXJk7y6qEiiu2Y3Wu2Ta0t3AeqyTdtdfvoGWYYPAuJ6Dfc7v1tt3wu7K/ONsstMUVVUNsoec3nV46omI+zOqnrYYhg2OTP8PJFUF62dYo3EjQh4eiUzMBBRXRhnQnVBLYZCYM563rRygYd/5w/2vSx/T9+VeZ7z6KOP8mM/9mO737PW8i3f8i18/OMf38uHfkb3PPQgZ9/0AJ//zQ+x9Gu/TpnW8Z02GEMx7BPK0xnHpYnIgiZHi3WMqZKATBAzJKBwQ+ZFdsulgmnFTAgMBAbWUneewAgb1iKlI5uERCXTfBRLFlgwKcXEEKfTAzzgHfDUwdZp1GjXkxfcWbVeCwkCi3Oe4bhaUZFDGoRca6fpU1F6kiggDnVFRO2tayezF0mCsXZ3e1O8J+r3KU+dZOviEkeKjNIYQjyb8QmMEcRYZvI1QimIJCcssioX0ZjpfCVhcfwUXiyRfTqA2EqOMze5fnWjMDG9eJ5OdpVudIRJ2GJx8mXGrvrwPx0I/79PfZqlK3+J06cWb9dTdJM9fVeur6/jnGNx8fo/cHFxkeXl5Ztun2UZvV7vuq+9EIQhr/0rbyE/c4Ygz4DqH3nYaFPNPqyWtvAFjXydgOlQLYHEjwnLAXWEQbxAQbQ7lMtQtZ0OEJrOk1IludZEOLq1zrGVLzOzvYqlWiZzWOplj6i45u+cRssHtQOeOviMMTTTmJlmjWYa3zIIeba+I616TOE8d0qnjZ1tmnot4srG4KYcGqVeSkEYcvY730aZ1onW1pDRCF+WsLVFeuUKLgg4/e1vYbK2QcjOeAJDL57jcv0+1pIzXGq8evf+zLS8XqT6HBKgGy/Qjausw512Ed3oKCvpy3dX6AXwJqAfzWMw9JNjXK3fR2FTDFTNOA20+j0Gw/Ftf56udaAuD97znvfQ6XR2v06fPr1nj3WrF0s502HDVG2hBQio8kEmslMmVS2hBUDdQKtYAymYeJj4p3NGEltVzAyADEtuLKMgwBtH6koSgUZZJcwKwLRkdydaPsgd8NSd4dn6jpROcM4fuiTVZxIGluPzLebbqXZVVbfFPQ89yJl3fi/FyZPE3S4Lly5wfHuDepERFDkXP/ghytGEErDTUQWnB5/nVd1PcHbwmaotxPS+PNVny9gJ+bTzqpGS0AjORLuBxyt7f8zi+MI0CDFsx0dJ/Jizw89hjdDyA84OPkfkx7ur/iIgQUizkd7y77hd9jQQOXLkCEEQsLKyct33V1ZWOHbs2E23/7Ef+zG63e7u16VLe5tEc+2LxY7HRJubDJIaV0zMJUJWTEAfw1hMVeILlAgtKyTTZCAJangbUQBjEXoeer5q/d6LYtbCmEgEg8FjGQbVbSNgxnlya5E4RkYjorU1ynqdM29/m/YTUXvq2fqOZEVZDb/bh+N6qVlTVRS10kS7qqrb6p6HHuTMd7wVF0YMo4TlmSNsnD5LMTNbbds89gW2gxCL300Ij6VAjOXay4CdoCEzlnFQZYm0iy0a+RqX6q8mM3FV+isFqR9hgK14kc34OLlNdvNFjk2e4mixPM0hMaQGBl4wr7yPkyf2t7nmnn7axXHMgw8+yIc//GHe8Y53AOC958Mf/jA/+IM/eNPtkyQhSZK9PKSb7OSMLD/25G5jsUm3x8UPfohgeYUwmxC4krwssQgJsjuYqMojiTHG4eMWzWKD8XRwUcMIE1fQbbQJRgHeNgj9Fh7LxFpi7+kbSxYlRJub+CCgPHWSM2/XPiJq7z1b35Gi9NwJn9XWQBIFOF8NvjMG7aqqbhtXllz84O8Secf4+HHIMsxohIQhxZEjROvrDOster0tFkR2P4xTN+Ds8DGgGn5XTBudZUnCnMvJgNxXKQDBZAURP50AWX0wOYFhNMf85DKhz3aPxwBGqhBnJqh6kHzWJHzb3/iOfe8nsueX3T/yIz/C93//9/PVX/3VvOlNb+Knf/qnGQ6HvPOd79zrh37egjC8qdvcy77+QZYfe5LhxhZL/+M3Kba3yefnOXrpAjN4Aqols4brVdUx5QhjDNF0GweBWIT2cECv0SSajIi8YCmr0l5jGL/1rZz52jey+tnHwQhHX/sqThzArnfqznNtZ9V2wz69PSMwyautCztd8z2sMYk1hloc4rzgvCcvvHZVVbfNTj8RF4akV67ggzma2QYYYWxDsjgmzDMuvfLVRE98kVk7rZ4EetEcUbZJOc1nWvWWWhxjxwWhVIPtxMZsJIuY4fpuTsjOHURFl6TYRgzX5TAyvZ2IsO4si9/5bTR7XZ74xJ+SnjjGyVOL+xKU7Hkg8j3f8z2sra3xv/1v/xvLy8u88Y1v5Ld/+7dvSmA9aHaCk6XPfAE7GFC028g4g2iOvNwmkpJR0KjyRfyQaacREguBCLmAw5B6Rzke0Js5Qp7FGARvA6wIzXaDc7/yG7vdVc9/+KOc0+6q6ja4trNqb5jvVs2MJwWTwhGFdjrk7vBu0HiR3bkzo6ykUYu0q6q6bUZbXeLxmFqZE4twub6Al5xWscmxMsO6DCeGKxcvcqk9Q9DbrAIMDFfDRWbKnKQcMDKGPAyp9weU4bSQwoCTklaxRjYtx01tFWxMvLBmO8TBAMougRGG4SyToEkyvszEGDITkMSW5OMf58JHP0bmYaNWZ/iqV/PQO97Kfa/Yu/zMWzFygFPIe70enU6HbrdLu93e08dyZXnd9szO3JcnP/oJVn7+P1PUasxtbXACqfb0MPTjeeplj0Ty3agzswmZF2KfERgofNUOfiyGMYbcGIzARqsDcUw4HlO021Xzmzwn6vUo0zpn3vm9GoyoPdcdTlha69MdTaaD7jzDcUGaBKx3x4e6zbsBAltVEJ0+2uHUQlu7qqrb5tKnPsvm//6vicUzCCIESyCOWVcSIlgDBXCxM080GjGfTZiEhtAL3qQE5KTiqONJjLmusWDhhbEYCgxiquKIuqm6CI+kutComadXP7aCDr2gxTgeEScRc6MB0XjMubgOR46QGkjHIzZyx2eOn+Ed7/rrX3Ew8kI+vzUjkmpA0YX3P3zLuS/12Q64kqMba9QBu7t8JjTKbWK5oQOqgPdQUCW4JrZ6MYRUZbx2utQ91+8yihPGx09gdpbC0pQiSYjW1rj4gYc5+6YHNGlV7akb+47khePclQ16w3y/D+0rZi1EUcCrzhzhZYszuhKibq9rtjs9MEiOcXJ0mRAhM4Z0Wnjrk4ReWqdx5TKhMaweOUpaFhztZ7SdJ5gGFG7a9XunvcQwbCNSErgRAwEwWIQlZ5kNIRKhFBhiwfURk+PnTzLT38LkGT2BaGYGk8Q4YBBFLI4GnF69ym9/6BHuvee7b9s2zV2ftXX+kUe5+N73ES0t3WLuyy9x7jd/l8XJmKO2msAbGHbzQ4wvd/fm3LT5U+QzAqpRz6XsdF2tGtHEBryBlWabFCFwJew0qZkej3ZXVbfbtX1HFmbqpHHEYJKBgSQ6vKeIehLRqSfMNlMNQtRtN+kNGIUxmbU0XYm3TQhSHIYEYTU5xdAmGOdIagmXowRTemaLCXOuqGaiTbdqchMxMjUc088KYyjCBhvRUbrG4m3AphiGHnwSsdye44oJGE0Ht05swHhmlsQ74smEsvSMwwhfb7AdnJ2OMDEMkxovi2Dt8S+xdGXttj1Xd/Xl9rVTEouFhetXJoqCY+urNNeXqVlg2vxlp1NjIDAJUnITkLoBxkA3OkK96FP4rKqoCWoIQuZzNmyMBQJxYCwl1Wjo/njMpHGSmvQxTFdXtLuq2ifGGFr1ZNpuuvrwtubwdVi1BhY6KU60Skbtj/pshzxNyUmpjUacHHyeDkJmqmZivaBG3deRsOrlY+bmWV6+SjYuea0bEU3ffwWGL9VfR2kjFspVFidPYYEj+VXq3rAFbGLphglNY/ALC9jhgCyKqeUZHug22kg9xQ4HhFnGFjCemcMag5UCT0BASRmE1K3B5PltbXJ2VwciN05JdER4LFF/g9nNdRoIsYFCDJYqEdXZGqOgQafYIHFjnJ1m4As08k2Caq2EAEFsjHEDeiZgFLYJJKddDjBI1dZXBFuWpLJ1ffMo7a6q9lE1IM5izDVVgYcsEKnFIVkhzLe1Skbtj2tbvY9OnGDS7UJ3k9wYchuyMDrPIIwhnQOqZFPfbLJ6+uUce/LzJOIwGDIMg3iWyE+I3RCYVrIJZCKsNjtkrTadXpfy1Ene/M//MWtPfJnRVpf+0lW6f/Y57Ooq4cYGYqBI62w5g6/VSPC0/ZXdYw5dSeYFqcW3tcnZXR2I7ExJ9NMpiZYSR410e4tEPN5UlS+xgYlNyQQ6fkzDTXBmWt7oiyri9GDxNO20AytQc30G3pCFwe4QPY/BpXWy8ZBOWSI2uC4IuXYWgXZXVfuhncakcUhWlMRxQFRaJkVxIAbghRacf/a4KLRMm7UZrZJR+2ane/fF976PaH2dotlkEMS0ypxICorpdokx5unz/umT/PV/8H1c+Il/g1+5SmAgFM+rNz+C2IhEdkaSVMFIhsFGEc1ed7cZZlyrXdeOwv21v/J0IcZMm9knvsjo9z7OE70h8XznmtJ9oZFN+GIBC6+697Y2OburA5HrpiSmKQYhGm2QuoICQ5OqI6oDQj+hNDVKqQbcTTzT1ZJqquHIGKxUjWag6iVSSJUVXaUkCYnz0wi4RmFD+gFEwwGFNZAkkGXVi1G7q6p91EhjOs0aV9b75GVRTeGlWh2xVCfBMLBk5e3vMFL6aWtqqvyrNK76ROauxLlpE7M4IrSWM0c7WiWj9tVO5eNOMYQLLOLMddsljEbXnffj48c58Y1fz8av/gZ18dhpMIKvtlmcgVCq8vTcGCjdszbDvLFPlpvvcPSpy4y/cI619S3iVoO6NdTGI5Zzx6UTZ3jHWx66rf1E7upPultOSSxLrAi5hdBDYWv0bY35cosgMPSlQccPq1puqhP01ahGs8wxBraDkKYrKDFsJCnNfEKrLMDAxFgmaZ1ofZ2s0yH8ujdRPF5tD9nBQLurqgOhN8rwXggDO22HXlV7OUBMVYkSBxYvUOxDC1ZjIAostSjEWoOIEIUJYWipJxGBrRLAZ5r7Oz9DKbi5e/eN2yW3Ou+nb/56so/+EcnyEpGANYITM53yDpkXvmxj7Le9hVd87Vfttpt4PoJTJ1n4G/8L/uEPE3/6Mcb9PpkIy0md0QOv5x3fqX1ErnM7+ojsVM2E4xFFqwXOs7C+TCBQR0iM0MMSUs2HCWxIIgVCVRo1FsPVWoPs9Gk6b3wdrZPH6V9Zpvupz2DX1ognY+pFVQo5CmPyNEWOLe6+6J6pf4lS+0FE+MKFdTb7Y5LI0h/njLOyGgQ5bXAmIlhTJYFmRUnpZM9TSAJrqEUhjTTEOaFZj5lkjmY9Asx0cnB1Bdcb5sy1Ul5z9ohuy6gD6fmc993lJR77//x72k9doGmrWWce6HvhXL3DPe/+O1/RBat4j1tdY+XiVYZeXvLOqi/k8/uuD0Tg5j4irfEQK8J2XONl+ahKCpqW2dapTsRbJmC7lmIFWn/tu3jtX3nLdS+ka19otXYTRJj0hxpsqANtMM757JdXSKKAKAxAIC8dfhp85EXJ8taQmUZCHIUMJzlZXpKXjsK99KcSayCOApIwxIknigKyrCRNIrKixBrLbKtGvRbhnGc0KUnigFeemtdtGXXoifdMLl3m07/+2/QvLOFnOrzqbd/E6a96/YH/DNFA5EW4NnDoX1mm9+GPEo7HNCcjZn01MCuiyv1YDSJ6nTmiyZjy1En+wk/86IF/USj1fGwPJnz+qVWa9Rh7i9WE7mDC8uaAJH56W6R0Vadh5xylk5dkNk0SWZKoajsfWENZeoZZQRhYQmtZnG1QOs9mf4IXoVmLSOKQTr3GyYWWBiFK7TPtrPoi3JjQc/7saS68/2EGly5SG4+IBDatoVtv4RsNosFAk0rVHefZpvJmecn2MEMEAmOIw6DashFwXgjDADEenCe0lsL53aTSMDCU0xWT57ryadYijDHEUYChCnYmRYlINc8pTSKSKCSJq6Zlm4MJrTTmlafmaaaxbscodcjoJ+gzuDbB6PIffYrRpz9H0B9gvYPJRJNK1R3pmabyigjdYYZzjiQKpt0dITCWNDZkhSOKLE1jaNYTFmcanF/eZm276nuAMVhbBSzPxlBVA4TWkOXT4XviKUqPnQYn7Xq8O0rUWEO7HpMVrmoKpUGIUoeOBiLPYmeV5OQbXqNJpequ8ExTeSdZwWCcE4chnWZCf5TvBgrWGqw1jCclCzN1Th5pc2Wjj8GQxhFOPN5XuVVQxRDWGpyvVjisMQSBIQ4CstLhvBCFhiS0TApHXjgAmmnMfDslia9/3wWBxWfltMJHKXXY6Cfp83Tj1o1Sd6pOo8YrT80/PZV3+iEfhQFHOim1JCIOA3qjfFo1UwUAcRhweqFNd5CR5Y7ZVoIXzzgrCWODF2E4rhqj7QYh1pBEAXEY4ERoxTEgTHJPPQlpxyFRYBllBTONGlEU3HS8znms1VbuSh1WGogopW5yq6m8X7q6STD9sE/ikIUo3K2ocdN8kCSKuDzqU6+FWGtp1xOK0lN6TxRY6rWQ0aQaFhnaasUkCKv8kTCwzLVSwtDSHWTcc3yW2VZKPQn54sWNarsotNdtv4gIo0nJXEtbuSt1WGkgopS6pZ2pvFB94K93R9fnjkxLa0WEXu6Ya6VVToeX6wKW+XZKb5SRFQ4Rgw0MVgxhEIAB7yGNQ9r1hCQOKUpHHAXMttLdx7/VdtG15brayl2pw0sDEaXUc3qm3JEbg4HA3lx1k8QhR6KAovTkZdUArRFH9MbT+7GWaLrS8UwrHLfaLrLWMNdKtVxXqUNOAxGl1PPyfIIBEbll1Y2Zdj4dZzDfqnPiSJMnlzbJcke9ZhGgLN2zrnDcuF0UBpbGtNRXKXV4aSCilHrenisYeL4rJy92hePa7SKl1J1BAxGl1AvyXMHA8w0ydIVDKQUaiCil9sDzDTJ0hUMppYGIUmpPaJChlHo+tAOQUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9o4GIUkoppfaNBiJKKaWU2jcaiCillFJq32ggopRSSql9s2eByFNPPcXf/tt/m5e//OWkacq9997Lj//4j5Pn+V49pFJKKaUOmXCv7viLX/wi3nt+7ud+jle84hV87nOf413vehfD4ZCf+qmf2quHVUoppdQhYkREbteD/et//a/5mZ/5Gc6fP/+8bt/r9eh0OnS7Xdrt9h4fnVJKKaVeCi/k83vPVkRupdvtMjc394w/z7KMLMt2/7vX692Ow1JKKaXUPrltyarnzp3j3/7bf8vf/bt/9xlv8573vIdOp7P7dfr06dt1eEoppZTaBy84EPnRH/1RjDHP+vXFL37xut9ZWlri277t2/ju7/5u3vWudz3jff/Yj/0Y3W539+vSpUsv/C9SSiml1KHxgnNE1tbW2NjYeNbb3HPPPcRxDMCVK1f4xm/8Rr7u676O//Sf/hPWPv/YR3NElFJKqcNnT3NEFhYWWFhYeF63XVpa4pu+6Zt48MEHee973/uCghCllFJK3fn2LFl1aWmJb/zGb+Ts2bP81E/9FGtra7s/O3bs2F49rFJKKaUOkT0LRD70oQ9x7tw5zp07x6lTp6772W2sGFZKKaXUAbZneyU/8AM/gIjc8ksppZRSCnTWjFJKKaX2kQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo3GogopZRSat9oIKKUUkqpfaOBiFJKKaX2jQYiSimllNo34X4fgNp7rixZfuxJRltd6rMdjt1/H0Go//RKKaX2n34a3eHOP/IoF97/MHZlhaAscWHIE4uLnP3Ot3HPQw/u9+EppZS6y2kgcgc7/8ijXHzv+4jGI4p2Gx/HkOdES0tcfO/7ADQYUUodarrie/jpv9YdypUlF97/cBWELCxg7DQdKE0pkoRobY2LH3iYs296QN+0Sql983wCCfEeWd9AxhNMWsMcmcdYqyu+dwj9BLpDLT/2JHZlhaLdxlhLSY3ShNRkgLGWotXCLq+w/NiTnHzDa/b7cJVSd6HnE0i4y0uUf/wnuMtXGPf6FMZiTxxnNH+Ei7/5u7riewfQQOQONdrqEpRl9eYEQiYEck2RVJJgBwNGW93nvK/dK5bNbep4JEkYmYD0xDFOnlrEWi2+Ukq9MNdtHbdaeOex2YTkwgUuvveXADh75hjFb/0OvaurfGl7yGpvRFAWzD/+ZerZmATL5MQJXfE95PRf6A5Vn+3gwhDyHNIUAIN/+gZZhg8C6rOdZ72fnSuW+tUrtCdjRq4kw7AVJay0Zhi+6tU89I63ct8rTu/ln6OUuoPsbh2PhrggoL22Qs07BPDGYDYmXPjFX+XEt3w9vaurfPLqFuNJTqtVJwobDHp9jvS7TIxlPMmo1ZvkpklNurriewhpIHKHOnb/fTyxuEi0tESRJE9fMVDtt0b9PuWpkxy7/75nvI+dK5bmsE8HT+lK+l6IjdApJjSH22z9yR/xwS99ma/79r/EkVZdk8WUUs9p+bEniS5dpJlNmBdHFs5iZUDpS8YGIoSTq1dYf+STXJKA8SRnbm6Gwjax0sdaQ2kgxTPe6hLW5xjZOWpuusL7AlZ81f7TT4s7VBCGnP3Ot1VLn2trFK0WJAlkWRWE1OucefvbnjFguPaKpZkm+H6fbYEwjBERjvgcPxkzB5xdPs/Wzz/Feq1GnqaaLKaUelaX/+hTHBmPqCNgYC2aJwqaLEwukXgYWGh4KK4ss9aYodWq40zC2M4Ruz6EIYUxJEA4GWM2lphJt5A0wRjzvFd81cGggcgdbCcQ2EkGs4MBPggoT53kzNufPVDYSXY1zQbxsM8VGkgcErshZXISO/4SEYIAsTHkeCZFTp6mmiymlHpGriwZffpzLAAYyDHMj8+zmd5LEc8yU25RB8QaxOV0hn1cu4llQsOtYgBJU8QYWt7xMu+w3U2kv0VuA0ZJSliWZGfOPOuKrzo4NBC5w93z0IOcfdMDL7jOfifZNWjUMeKZBE1SGTJTTpCgS2yEGPBAKTAUSyKe2mjE6MQJovV1TRZTSt1k+bEnCfoD+tZyVByb4SzYiMVihVhyACIDpfcE3vBqP+ZLK8tkx44TmTEApj+gJYITITBgDDS8UJMCM8rpiuVyv8+FP/60XgwdAvoJcRcIwvAFJ2ztJLuaskSMZSa/Suw9bSNQrCIYevEcYTEkIkMaZ8nyZVJXMMoyTRZTSt3SaKuLdSXjtI6M+qSmuqBpuh4BEJjqdoExGAOJCK/KBly++GW2ZxcI0hpz3Q1KgXPe8nILc0Yw0/txYigsxP2+rsweElp3qW7p2P334RcXkcGQPEloWUNDPNYIpVRnin4wBwi5h5of0vQ5VjyUrkoWK0qW/+wxnvzoJ1j6zBdwZbm/f5RSat81yoK5IqOTjfDAfLnJYrFCSBWEjIMGbnqOKQVKwGI47h0nN1ZIN9bJSseXHRwJLOPacYY+YMtD30OBoQMEtYRwPOLiBx7Wc88Bpysid6ivtO3xtcmug0GPWaBlq5NDZKAfzpC6PpHPGSKk+RqxgT4GwgC7tU17MqT/Ox9ibIx2PFRKUVy4SP7h3yMoCkSEVWtYFKFhwBoQYDs+RhRkNPI1jM+w062XAAidp+z16WFpJyE179gOZ4nsNqGMyWzKSrTA2ewCrdGQtfaMrsweAhqI3IFeqrbH9zz0IBtfPIf7nd8ldI5o52QhEEmJcY7AQB1DCQQIYwIoCo71tsiNoWi3q2od7Xio1F3t/B/+Cdl/+q/Emxt472lboe3BGjtNewcDzGVXear+GkpjidyYmowI3BCAMZYjoTAnIOIYeeHU8PP0gibWBQgGay2FQFhkRGGAOMdwY4ulz3xB59EcUPovcYd5voPunmvFRLznqd/5PfKPfAwjhi8ndY5lIxKBjvE03IChFwpjCA1EVFczo9xzdGONANhstiFNq3I67Xio1F3r/COPsvwf/ysn+lv0vZAag2DIa0eZzVeqCITqHJL4EakfUncDJkGD3AtxOcQA3sAIQw1oiqcXGLzAKDpCZIc0i3Va4y8zxuC8EPX7BEXO2q/9XyyPMqzTeTQHkX4S3EGe76A7cY6LH/zdZ1wxcZeXuPo/P8Twwx/lZFEwMtAvDH2BtoVSDBYhNmCMITQyPY8YXhdXmex9scwO+4wmGd25MyQNrx0PlboL7ZyXWpMRIBwxnsgYChOxmhyjXawR4BEMk6BBzQ04NfoiFuiUm3iBjdoJWtk6HZOTi3BFLPcCbQ8lhvn8KgWWFMEJYKCJ0B51sR6K9TUGSY1+Z4ZC0NXZA0YDkTvIjYPuxqZDgCO+ZtBdfPESSz//i0Te3XLFJNjYIPniF1n9k08zV+RYoAXM2gAJYhp+tLuIWhKyWjvNwvjLCFXmczDdyw1EEITA1El7Y7wvoNXUjodK3WV2z0s2oO4dziashnPM5cuc7X9m90JmbFPWk1OcGn2R4JrfNwYyW8famNDnhMaSGug6D0FIiTDjM9qmWjEJgKYRPMK4NPTEMPaOuhvRyEsmnRmKhQVdnT1AtGrmDrLT+4PpoDuwWCl2fy5xTHsyIs4m1YpJmmKCAJOmFAsLhKMho9/8LVYee4Iwy0hMNfchMjBOjuCDWtVMaHp/IY7UTxABJ0/3FBEMNSOkIojvMZOtkG5vISLXdTx0ZcnSZ76gVTVK3cF2zktxVm2v5DZhErYoTMxW7dju7RI/ouZ6DG3zut83wMnxOdpuQAh0gWZoycTweAlFWmcohnx6HkpM9Tulh8gICcKcEWzQoIxPc2x9hWJzm6LVwkxXZ9X+0jDwDnLjoLtUtq77ue33iRE2G02MtZQkCJaIMcZaTKNObXuDUempm6pszkUzJOUWUbEB+N2Vj2orRpjLrzKI2xhfMMESuyFr0TFmXJeaGxOKUFpH6oTRcEQ0HFCeOsmk2+Nj/+wnv+KEWqXUwVaf7YAIjdKTG0jKATNmlX48h9wQdHgRetEc9Wxw3VWyCKwnJ2hMlmkilN5DFPJ/DyzfZ8csBBAZQ4hgmeaaWIPFUMQNimKI9xPCskfHCFl3k61oAeucrs4eALoicgfZ6f0R9XqI99f9TLynNhySYfCt6s0/tjN4ARmNkF6foPQE4qh5YRQYvIlZbr6GIqgTSUEgnivpfWzET1/FCNCNF+nHR8EmiMDYhBgpqzkQ3uOBUDzp9hZlvU7jlfdx6Rd/jWhpCV+vUxw5gq/Xd7eHzj/y6O170pRSe+rY/fdV28IIBTABRraBNYYTo2o1QgQ2omN08nVmJ5cRqepoBMhNRIZhGHbwNqKGYAV8s8m9Mw3aRsi9gPdcqb2CiU3wUlX45UFKv3YKY0MCKUnzVUoMMZ7m1iZidR7NQaCByB1ipwqm/oqX44OAaG2tCjCcQ0YjorU1slqNXi2FotquaXTPMXP5CeZXl1nYWGV2e53IQ0LVvr1BweLkIuG07fI4aOJsRCBPBzkWOD46z3y2RL2srixmJpdxvmACxAid0hEDoyNHOP23vofB408S7iTU3rg9pA2IlLqjBGFI5xseYmIMVoRAhCS7Sn18dXc71wOTqE1pEywej2Fo66zWznC1/gokqPHy0RdokwFVkGG85/W+2kIOp4nz+AxPAKY6NyV+xGy+TJEsMooXdldLhh463lG2mjqP5gDQrZk7wI19Q6x3hM5hul18EOwOujv9l7+Fld94mObyMvlkQr27TSyeSRDggJYrcUDTCrEXAiPMZ5cJgEnQYBjNcmb4GFC9mQ0wDDs0yi5GqhOKNZAAiJBhMUCCp4uFNKV7+cp1CbXVFU+LRPpaVaPUHeqBv/ZX+Ojvf5zjy1doGqGJp8RQCBQCdSssDh6nEBgINAIDWIwIrWwN8SW5VB9YkYEaHkYD2tPzTWJgZFJmsmVG8SybyUlOjavVlka5jbGetApRGEl1oeWAxgOv00TVA0D/BQ65Z+obQreLD0Jqf+EhTn7tn+Nou4579E+JQ6EsM5LuEA9sBJaad9REmAh8ud7i3nGf5nQ8twiIgdKEGKmuVLKgwSho0iy71X7udCXEC9OmZ9WyakhVOVNicMZQv3yJ4coKcZ4zmZkBIDdNCpOSSL/6g7SqRqk7ThCGvOxvfQ8Xf+G/0utukzhHbDxNEVIDIw8DsbSsEFFVviR+QDMbAFCaAIvBINU5CcOocGAtqa36GBXxDIUBT0weNHYT671A3ffwQrU15IWBsUha5+Vf+1X79pyop2kgcojd2DdEbAwINg12+4aMvnSeo295Mxv/7f2sfvkSS5MSJOB+X5AaOO0dGOhF8xzJN4gGPdYxnA6mSakGnAnYio/hTcCRbAlnAlbTlzHf/QSz2TLwdOmuA6oYxpADE7HkQCqCzSdkriT2nkmWQb1OIgMimTz9R11TVaOUunPsJKFfeP/DTK4uUxsPCcoCi+AweAtjMdSNJzRmN5AwwCCco7QxR7IlPEIp0AkNIyfTGxlminWciYj8mFa5QWYTCpuSltsI1RyaoQhPSMBcWsOcOaPbMgeEBiKH2I19Q0a2Qyj5ddscwdUVLvzyr7Px5HkuOUur3SC2UGxk1KcTK0e2Ti/s0Cg2WAiq2hhjnn6cQBxHJxeJfAYIoS+w09WRxI0pqfZoB05Y9pYjAQynnRODICWhJPEZLWBQluRAsrVFVqthrMVS5YOI90T9PuWpk3qCUOoOdM9DD3L2TQ/w6V/7TYYPf4iNImQ5SaiNhjRcQY1q7gwiiNmpzatyPUIpdnsYbRERBQkt6RPbatU2lIJw2q4gdQO64RGGYYe02GYkwoXCMBsYjiUhRafNmbe/TbdlDgj9VzjEdurz/bRvSOxHBBRVxcxkAnlOOhnRe+Icq84yf6SDMYas6NC0a1XilkDiM+bHF3BMB0tNL0U8gp1emUQ+o7ARvegoC9kljk6ewiKIgUCq+yk8iIXcGJyxEC4wjOfpTC4COU6gaTwTsfSAaG2NotWqZtFkWRWE1Ot6glDqDtf99OeJRMiOH68uomSe0WhMvLFC0xeU03yQfjzHIJzjxPhc9XvREZrFOkXQZhQ2afkBQpWf5sIGYEhctZ3TKddpllsUIvS8YT4yBAayxWO87Hv/qrYJOED0bH+I3dg3JGKM9PvUt7dJXUEknhjIuhlpZx5jDCUJzkRYpNpvNYa12mlGpsap0ZNMN1fAgBFDaWLA0IvnKUzV2XA7WmASNCnNJqGUMF0NWXKeJAwpjKfpPTkl0eQC0XRgVWkgnHYgyet1ZH4eu7aGHQx2E2rPvF37iCh1J7t2JRdgPIlJ8k0S44kRPAYH5GK5WH8NVjzHx+cAw1ZynJobMltu0ncZFhh5oWYNq8kpIik4Ma4CEQ8gji86y2hunpnRkNHCAn/+p36cuFbbp79e3YoGIofYsfvv44nFRaKlJYokgeGQmY31aSWMJXDQNZaW9yz2tliPQsIWJPk2Oy3JrEBaDsiCkDJIiVx1heExTMImWdAk8RNiN2YcN2mUXQbRLLPZchWEAE6EL+aeSRBi4oSkyGgYh8038AZKE1EEdRpllwBYtxbrPC//G9+FDUOdiKnUXWR3JTfPSdfX8bV76YyHNPyAeDqH1xoY+4B2scnx4TnEgEU4Pfwi1hcYBOuGEEA83cIpBMx4i1wEOw1mvAhBFNEej5i027zse/+qBiEHkJ71D7EgDDn7nW+rqmZWV6nlkyoIsYaa8xQ2YKM1w6i7xQnxjLa3GDUa2KDAU619WKBVrJPZGGdjvAMjMIznKY2lUW4SSYkVR+wnxH5M3fUIpFo58QLrpRCFIVtxnaDVpr69jneOyAiFGEbxHFY8dYSRGLZn5wnzjEl/yH1/8ev28ylUSu2xGyd911oN8I75zQ0CBDv5MiUlubUY7zBSbQnHpuDk8DHsNQlroRR4A7lAzYIghAZyMbQG53AIPQypgdSAGMMxXzIoPJNwdh+fBfVsNBA55HYz0X/pv7Nw9RIOiMSwnswz6cwR1x0b44xWNuCIy7k6HFCkdSbW0PGeXAwjgWi8Qmw8BRAYoZFvktuI0GcUQY087NAuNgGw0yDEiTD2EFnDvAi+zAiHPULncQKOajLvQr48rd83LLU6EEd4V2pljFJ3uBt7HLkwxB05QrsoaIhnIwwx4jAYMgJG1hP7Kj+tZgylGAIgDxpYPJEb03dCbKt27oWvqvVKEQbxHIGU1IotQiN4qapwLrVmkHpK3O/rxN0D6rZ0Vs2yjDe+8Y0YY/izP/uz2/GQd5V7HnqQk//Ld7ARp6zOLrBx9BjDky/HNmsYY4jmZnnSxPQdROMJnWzMVpySydM9P0ocgpCY6RhtHNZNKESY2AZ++lLZGXi3U59vqLoUPmliRouLFDOzjMOIYrr1M8IwwHA1iLh4ZBE3P0/U7yPHFrUyRqk72G6PoxtGOdQuX6ZTFmQGGs7Rr53CmQArJeKEiVgKXwUXZjrAbhy22I6OsO2EDTF8KRcuFp6nSrjiqm2Y0FgCE5BOk+37YuiaAJIY02ho5+YD7LasiPyTf/JPOHHiBJ/+9Kdvx8PdlRpH5rhaqyFJjElT6tcMvKunCXamw6XtbT5d61DYBFmY55XDLl+/sUTTVrX4gdktyUeme6wAjWIDy24aKx7IRbiaewZYLtbbzB2ZoZ5We6+TkydZW1qiXhaMw4hJs4lvtTB5TrS2ppUxSt3hbuxxZOz0mjdNyZpNbDZmbC2FjbBiaXhoTudjTYC6MZRAJlX31f9/e/ceHlV1L/7/vfaePbckkxshJCGEqyJHQATlKfCcWuGI2q+KtVjviorHS49arVWOHq19VGrR1mo91tYjxx5bbz+Od1E5ar3gXYGKKIhczY2QhGSSue691++PISPDNZGEScLn9Tx5dIY9ez5Zk9nzmbU+a63W6Ba2eEN4C0vA8hCLJcmNR1EKYj4/sUg7uYkteF2wDYhog6hh0BgYQdJKraQqKzf3Xj3+SbB48WJeffVVFi1axOLFi3v66Q5aOxeupt/4pNbnyE3ESI4awbmXzSEST5KbE6CivIT173zEpkceo6SlEeVo2pzUt5A8I7VschJYH3fZlHRIYpLrMQkZUOexyA2YmPkFVIRyQRkkVA5e3YYyDGJFRRgt20gEczCjUYjGUD6vzIwR4iCw8xpHCRXE1DYmCbAsYgp8rkvrgCKM6FaUGyOmIeIxQWuibmrGn6mhOidEPL+AgM9CKYV2XQoiDSSrhqC1xqqtJVJRQSQexxNuY0CklbDHJMdxQUdQ/gLYvkeNrNzcO/VoIlJfX8/cuXN55plnCAaD+zw+Ho8Tj8fTt1tbW3syvH4lo3B1D+tzVJ1yAlVDKzIeN3zaUfi+/ILatz+gLumivF62xZIEYu0M1zb5hsK2TNSAUgo8Bt62NmKBAJXfOxr7rXdI5gZT+8rEPdiug0UEv2mQE2kjmIjSqDVxZdBiWDSVVTHl/NMYPnpYllpJCHEg7LzGUULlYChN0N0Kfj/tppdiOwGJJP5oFAWEPRYKCLo2bR4fDaEQ5Y0NDIiE2ZqbA66J3vF6NutEgNQ1b+tWknl52MEgbqSVPMchbpioHAdLffuZIis39049lohorbngggu49NJLmTRpEhs2bNjnY+bPn8+tt97aUyH1ezsuoWzU13dqfQ5nSwMN//iSOmWRW15I0sil3G0jkUhSHw4Tb9lGsQEqGSOGhR46hBEnn4AvN8jad9/H2LYNfyRCkZNEKy8+HSdgQNLWRIFwTh6+YICKaAR/9UaeW/j/MWvuGYwaWXmAW0cIcaDsvMZRjrsVvb3OTBkG0WAO8XCSYLiVXCdB3DDx4JI0igibDrGQF5WXS13SpqCtJVVX1t6+x+tZ+pqXtLGVgaE1zYXFqLzc9DGycnPv1eVE5IYbbuDOO+/c6zFffPEFr776KuFwmHnz5nX63PPmzeOaa65J325tbaWyUj6wuqJjCeUdp8vtbX2O+k21RMNhfKF8UCZRVUCeiuP3KfAV05qTA9ua8U0/hsGTxqfP5dg2qwMBKupqsJXavoOvndoN03HxoGk1Lcz8EI5StFkWpZE2KrfU8vKSdxkxfDaGcUBqpYUQB9juhoo7qs606+JxbOpKy1CuS1FDHdp1cJXBNl+ARF4O/mBqqXa3sIBW2yY4cwahqordXs92vuaFq2tpfuNtrGiUpMeT0TPsmCaBEcOoW/WVrFvUi3T5Vbj22mu54IIL9nrM8OHDef3113nvvffw+XwZ/zZp0iTOPvtsHnnkkV0e5/P5djledJ3p8XS6EKvd1cTd1Jx7B5ccd2t67xeAoGkQ9XgoGnPorufUqSXh0RrTdTENhVenNqRSgGt4CKuB5NGAUop2n5+hbpRVq7+muqaBysGl3fY7CyF6j84MFQ899ydYfi8b774fx+PBDQZRAQu/Sn57ongc1/Iw6Igxe72m7XzNWzd0SEbPsOE4mI4NrknyzXdYu/R91pSWUnWK1Kv1Bl1OREpKSigpKdnncffeey+33XZb+nZNTQ0zZ87kiSeeYPLkyV19WtFDAuWDaPQHGRqN0GZZeHYcT9UafzRCnS/IyPJBGY9b8dQLFDfU4ShSG1Vph3ajCI/dTAsQM0w8LqhEApUaJsY2PQQNhUokaGuPHrhfUghxwHVmqNixbdYOHpya4utPLTfQYX+GUnbsJan+YBmxt99FK0UyPx+8XkgksKqrZV2RXqLH+qWGDBmScTs3NzVWN2LECAYPHtxTTyu6qGJwKe2HjqbxH8spjbTR7vNjmx48jk1OPEZdwiEyfiwVO/RerHv3E9pfepVy12WbadCqTFwjB2XmkWc3E0WRUAZBN0Fu7Bvw5gHgcWzirkb7veTmBLL1KwshDpB9DRV3pufku071Nz2e1BDRXxdhuQ7JgQNxDD8eUnUrSZ8Pq6GBTc8tpuro8TJMk0XS8gc5wzCYMus4nmloonJLLUPdKEFDEXc1XyZhc/kQZp1yXLqeo2N9AF8ySbuhQBmAwnAjODqOAxQZUGs7uKYBHjP1RFqTE4/xZRJKDh1BRfm+e9WEEH3fvoaKv0uRfWftPI243Swl4Dbh1e2yrkgvcsASkaFDh6K13veB4oAbNbKSWXPPYPGr77JqzdeoRALt91Jy6Ahm/cuUjBkuHW/seEEBZlMTPteg2T+Q/HgtpnbYZpgUuQ6lBtRrSCqDYCKRGuJJOKnE5l+mSKGqECKtq0X2nbXzNOKA24ypE98eIOuK9ArSIyKAVDIyYvhsqmsaaGuPphc82zlhSL+x/X4iBQUYzWFynQSGdnCVIqEh6ULUsrBy88htbyOuNXW+IJHxY5l1ynEydVcIsYuuFNl31s7TiL26LfMAWVekV5BERKQZhrHPmSw7vrFVXh5tQHBbE5bWGK6LQtNiGDizTmbcsVOo31RLu6sZWT6IisGl0hMihDhg9rXitKwr0jtIIiK6ZJc3dl4ekZwcIrEYJJP42tqIDxnCP59+UuobziCZoiuEyI6eLIYV3UdaX3TJHt/YgBWLkQiFqJp1oryxBZD61ulsaUj3jAWkZ0wcYD1ZDCu6h3xaiC6TN7boDOebauoXv0b9ilVEw2HiLjT6g7QfOpops6RWSBw4PVUMK7qH0r14Kktrayv5+fm0tLQQCoWyHY7YiWPb8sYWu+V8U03DY4vY+MVattjgC+USUBCIRmhMOPyjbMhu9xySvykh+oeufH7LO1x8Zz1R5S76Pu26JD/4iC3rN7PZMSgekI+jfDgk9rrn0Lp3P0n3spm2jePxsKa0lCH/7zj8+XmSnAjRT8m7WQjxneyp90JvbaR97XqqYzZ5oRy08hA2yyhwNqEUu91zaN27n6TqjqIRkqFQat2HRAL/hg20/P5+6rx+tGGkkxPZI0SI/kMSESHEXu2u4DSx+Rs2PffKLr0XVaecQFXFQOxojKirKfCYGDgE3G0oUqPAtukhoMDf3Mzatz+A0cPZ+MxLqSSkpCQ9xVLbNn47Sch1cJJx2soHQzIpe4QI0c9IIiKE2KPdFZy2avBFY/gtE7eoMN170ZEgmKf9P3IDfgKGIpm0yTXAiFeT0GBbFrnxKJ6WZo6Iu6jnXmTDC4qiWIRtefkow0BjENM5FG3bjFen9jMKOjaRbdtwg0GSAwZgbd0qe4QI0U/IO1gIkaFjyCXx9XrUJ8uor65ji6PwhfIJoBlZX4PpOnyW8JKDQdA0MzYRW//GUo6eNIahX28kXF9HHjaG42JraAeKNIS1ps0XJFA6ELd5G7muixtuocXrIxEaRCLhJeAkcRTkuS4BZRCNW3jb6oiaFrFAECV7hAjRL0giIoRI6ygYNevqKY2340kmSWhFbnEJyufFjkRwXBfLYzDIttnQHINACUFav91ErL6eVjWO/FiEAieGBlyt0AoGonGA5QmFpygX4nHQLjEFfu2S2NaMzs3BitYS0C5eDR6liaggUTOPQKKBkB3H25Yg5vHJHiFC9AOSiAghADIKRlUwB9OO0RJPUAgkmhqp8xQTsMMYGtpRFHpgnWMQtQ2CHVcSnw8z3Ebk3Q8ItEfwKFAoUOBqUAo0MMxStGxrI88y8dttBDWYuGg7jhtuJT8epUBpTJV6TFCHKYqF0QpsDbbWtCTj+PNysthiQojuIMsbCiFwbJuNzy7Gs71g1PRZKCCuDCIeE0NZEDXBULgqVXZqAv5EI/5E3bcniscJapvg5k0Y2qXdVbTgYYunGBvQpDKRwaaLz1tIu5lDxPTQDngUDMCldFsjnlg7GjBU6iLV7snHUSa2BlNBQGnytcOWVV9lobWEEN1JEhEhBHWrvsKorycZCqEMg6iZR7OvAo9KJQ8OCSrCqwCImhZBx8XRGkcZmOb2WS6ui9XaSsi18bouCkXQgJbgCFr8lTikLjiWobCUwhetJie+lQLHJldptFZYCvJcG0/SwQIUqR6Ubb5BRD35eBS4aNztCUnLW+/i2HaWWk0I0R0kERFCEGluwbRt8HoBcAzA0uQZBo7j4CqFqV1wNZFQKJUkJB2CBnhNAx2JYDU0YKHw2QkAXCCpTTAshsTWkWekejgslXpOr9LkA9qTh0OqhiSJwtWpBEajaDNCaCAvuZUGfxVJMxdXQ0QrNIpASzN10isiRJ8miYgQgmBhPo7HA4lUEuFRCWxfEsPvo8AA07Zx0XhjEQa2NGG7Go1mtJugZPNGvC0tJMvLcYN+NJAk1WPhMTwE3Qi5TisGqeEctf3fCg2IeYtpM/Kw0Gidqv9QaAwFbZ4C2q0CHA3BRDNeJ0Kqf0ShFCR16qYUrArRt0kiIoRg0JhRuKWlWK2taNcFIGZ5aSosxg7mEDIUFlDYFsZOOqy1/GwsraC+cADtlg/HYxEaNwbicZIeL+0uJM1cTAXl0XUAdGxq1fHfXFNRmKinOFlH3MzDNSx8CryGAhQ59jainjwc04eBS0VkNT6nHe/28ZqYUtiWRbAw/wC3lhCiO0kiIoTA9HioOuUE7EAQq6EBHYmgHYdo0qbVhS35RWzNL2Sb109t+WB8lUMJ5OWhCwuIlpdjuA4t73yAbTskgkFcQ7HNW0LCDACgMQh7itAaXK2xtUYBjtYo1yHsLSFiBOnYg9NSYCpNnt2MR9solRrW0aR6TeIoEijsigoGjRmVvYYTQuw3mb4rhAC+XS69Y+M5ozUMaJK5eVijR+EuW4Gbk4M/maDNCGD6FAFaAXC8XoJNjWg07drLINOkMLoJj0r1rrRaxWzOOYyq8GcY8SZMI3XxyTEULlAe34ilHYzUTN+0wlgNavsdtoaoq/Fsv2NbboiqWSfKyqpC9HFKd3wF6YW6so2wEKJ7OLbNiiefp+Wtd7HCYVAKbzzOQDtORKUSh6QRJKE0SY/Csm2CbpI8V2MY4AUsA6ztl5ZUKYdifXAseXYTubFvSLiaPFOlejlSZR+0eAYQcNrx6Wj6ca5OFb0apP7raE27NthQWsbQ886QvWaE6KW68vktXyWEEBk2friC1jfeSW1Cl58PiSSF7WECSmNqTQKDkNuOFzBdiLmadgw8BjjbV0I1XdA7dG24GAyLfEZC+TAVWOYOPR/baz7intxUEuKAg0mbVUgouZWYo4nr1HDNFsOifer3mHbFBXj9/gPfOEKIbic1IkKItJ0XNsPvJ9jaAgratEFQQa5ySWiFQyqH8CooVi6WdlMzXiA9nJIwAkTNXGqDIwGN143hUalvQNsnvQAQ8+QyIF5NwImggISZQ8RTAKSKV3NNhd8AA4372ecsveUu1r37yYFtHCFEj5AeESFE2s4Lm7mRGKhcYkYSv+MSM/1EPflYsS14FDiARymingLajQC+eC0JZWEYHvxulLBVjKFtgnZq+u72hVVBqfRwiwk0W6WYOLjKZEDsGwJOK4FomKQLGk1Sg1IK21S4gUB6p19AhmeE6OOkR0QIkbbzwmYJHSRh5m1f+0PTip+ksnA8QRwjNTSiAKUdLB0nAMStAiKe1JhwQaIeW3mwdDL9HB1DMknlY4t/KAABtw3Q+NwYprZRQMSThzY8KAWuUrRrhXf71OJkSQmeaIRNzy2WlVWF6OMkERFCpO28sJlPtZGXqMbjpoZcvHYrgVg11dZAWowgCTfVq+FzwhQmm4hZBQDk2KnZNB6dJC/ZhM9pw94+DhMzgjg6tSCZRzsAFCXqyUs2k5tswiR13zbvQGwzgCI1DNRo5RC1ilG2nd7pV9XVy8qqQvRxkogIIdJ2WdgsECBuGFhoXFLLsidQWMl6jOQ2vNuHZzo2qHMMX2rdD9dBAxEzRMSTj9eNYSpwlEmDfwhxPDT4BpOf2JJ+7nYrn5j57W66pdENBJ0wAE2GgWPkoDFRtpNab8Tnw3AcWVlViD5OEhEhRNrOC5sRjRIJ5ZMEvGgMrWlXCi9JNJoEgAZz+xLthcl6ADbmHU67pxDQWDqZGr4BDO2QH6/F1UlwI5g7DNmEEo2gXbZZA1K7+24fojGAIsdlaKKeskQdha3NBL/5BqN5G65pysqqQvRxUqwqhMiwy8JmjkPYn0M8mSDfThJyNVHt0GJabPVYVMXasLb3jCgNXjdCKNmIx43hc6PghFMzZDSAJtfeRruGgngt6QwFaLcKiBtBfE57uo6ko6cl34SYC9tcRcKEXDtOfmuc2kGysqoQfZ0kIkKIXQyfMpGqo8dTt+orIs0tBAvzKTlkGCv/d3HGQmeOx8PXfj9DmhvJMVLrfeQSZ0BsI2hwFbR5igjZTakFynZIMYLbl21P3YJQYgsahaFtYmYO/h0SEgBDKQqUZmvSwTXV9inAvXY9RiFEJ0kiIoTYLdPjoWLcYRn3HXnWqTinn7RLgvLOVf/B8C01+FRmatBGgG3WAHKTjTgo4q7Gbyg8ChKuxlGk1hVRCkvbOKQSl4T5bc+Iu/1cjf5KQslGBtDGRlfhFhRgRqPUrfpqlziFEH2HJCJCiC7ZXYIy9Pwz2PyfD1HRHsZQkHA0poIgUazYRmIaokrhVQoXTdQFFDgO5BjgMVIJhwu4ysS/fdaNi2KrdxDFiVocO0rSTWIog83KpDiYS05bqxSrCtHHSbGqEGK/DZ8ykYrLL2ZDaTlRwGekhlvaNTS4SdbkFbGxeBBJBXWuot4w2ObJZWtgMGENHSuBKGXQ6h2Iz42kNrlTPhp9g9EagokGHDeJR2lyXI2Ox6RYVYh+QHpEhDiIObadMcwyaMyo77ybbbqu5I132PbcS7RtaaTesDALCwiaBrnNTTguoBS5QDMGcdPEaxjkahcTMLVDedsqkjo1xKOVSY7Tgg1otUNBidIEolF0VaUUqwrRx0kiIsRBat27n6Rnxpi2jePxsKa0lKpTTvjOy6abHg8V/3IMzmGjqF/8GmrFKqLhNuJas9Ww8BsKC2jS4CWBL1FP0rRotZMUKJck4EFjqVQviWu3UxJZl5pxo8GnIKE1IUB5LfwjhlG36qv9SqCEENmltNa9tuy8K9sICyE6b927n7Bp4d9Sm9uFQqkl3RMJrNZW7ECQIXPO2u89XLTr4mxpoH5TLe2uJmCZxO+5n0RbO2GPBwODdu9AgokGPDpJqZMaoGm0NQMshdYddSMKF4VPaQw0TQ40eb1gecEwcTwe3P1MoIQQ3asrn9/yFUKIg0zHDrtWxw67hgloVCBA0ufDamhg03OLqTp6/H71MijDwDOolIpBpann3biZ6gFFbGtrJ9dxiHoMolYReYk6fK6mWRlYyqARh4CrCBoajSJKqnek3VW0YmB7vRhei2R+/rcJlGyCJ0SfJYmIEAeZnXfYjagCFJqA3pbew8XYvofL7qbFfte6EhXwkzNoIM1btpFMxMhxbCrDn+GiaPN4sUMhTDRLVYDJhw3Ds24d/tZWjESShGHQNqAEXzKBNxwmWVKCNqxUX0nA7NYESghxYMm7VYiDTMcOu+72HXZNbAx22MHW58Noa9vttNj9qStRA4rJGTmM0o01rGn3UhzwoRwb7fGA309htJ0v45r8o49g+r/ORrtuRsLjOg7rfv/gtwmUUYipbfydTKCEEL2TJCJCHGQydtgNBPDpcOYB8fhup8V21JVY2+tK3C4OiyjDwJp8FAPXbSL6xVoaIjG8eTkEDYU/3EpdwmFz+RBm/csUDMMAw8hIKL568/2MBMqrIyjtfvsEe0mghBC9l6wjIsRBZpcddnegXRcrHEYPKs2YFttRV+LZXlfiBApRppmqKykpwRONsOm5xTi2vfPTZTAHV1By5mlUfm8SFaEguW1hki0tbFAWW8YfwayLz2DUyMrdPjYjgQIsHcFD7NsD9pBACSF6N+kREeIg07HD7qaFf8NqaCCZlwc+H8TjWOEwdjDIkJNPyKiz2LGuBMOk3RxAjrMFD4kuD4uYgysou+gcBu4wo2Zk+SAqBpemekL2YNCYUawpLcWqribp86F2OLYjgbIHyyZ4QvQ1kogIcRDaZYfdtjZc08QeXMGQk3et99ixrkShCbjbMEl+e0AXh0V2nlHTGd8lgRJC9H7yjhXiILW7HXb3NANm57oSr27LPOAADYt0NYESQvR+kogIcRDb3QZ2u9ObhkW6kkAJIXo/eecKIfaptw2LdDaBEkL0fpKICCE6RYZFhBA9QRIRIUSnybCIEKK7ydVDCNElMiwihOhOPbqg2YsvvsjkyZMJBAIUFhYya9asnnw6IYQQQvQxPdYjsmjRIubOncsdd9zBsccei23brFy5sqeeTgghhBB9UI8kIrZtc9VVV7FgwQIuuuii9P1jxozpiacTQgghRB/VI0Mzn376KdXV1RiGwYQJEygrK+OEE06QHhEhhBBCZOiRRGTdunUA/PKXv+Smm27ihRdeoLCwkGOOOYampqY9Pi4ej9Pa2prxI4QQQoj+q0uJyA033IBSaq8/X375Je72HT1vvPFGTjvtNCZOnMjChQtRSvHUU0/t8fzz588nPz8//VNZuftdOIUQQgjRP3SpRuTaa6/lggsu2Osxw4cPp7a2FsisCfH5fAwfPpxNmzbt8bHz5s3jmmuuSd9ubW2VZEQIIYTox7qUiJSUlFBSUrLP4yZOnIjP52P16tVMmzYNgGQyyYYNG6iqqtrj43w+Hz6fryshCSGEEKIP65FZM6FQiEsvvZRbbrmFyspKqqqqWLBgAQCzZ8/u9Hm01gBSKyKEEEL0IR2f2x2f43vTY+uILFiwAI/Hw7nnnks0GmXy5Mm8/vrrFBYWdvoc4XAYQIZnhBBCiD4oHA6Tn5+/12OU7ky6kiWu67J69WrGjBnD5s2bCYVC2Q6pz+qot5F2/O6kDbuHtGP3kHbsHtKO+293bai1JhwOU15ejmHsfV5Mr95rxjAMKioqgNRwj/yR7D9px/0nbdg9pB27h7Rj95B23H87t+G+ekI69OheM0IIIYQQeyOJiBBCCCGyptcnIj6fj1tuuUWm9e4nacf9J23YPaQdu4e0Y/eQdtx/+9uGvbpYVQghhBD9W6/vERFCCCFE/yWJiBBCCCGyRhIRIYQQQmSNJCJCCCGEyJo+l4i8+OKLTJ48mUAgQGFhIbNmzcp2SH1WPB7niCOOQCnF8uXLsx1On7JhwwYuuugihg0bRiAQYMSIEdxyyy0kEolsh9br3X///QwdOhS/38/kyZP58MMPsx1SnzJ//nyOOuoo8vLyGDhwILNmzWL16tXZDqtP+/Wvf41SiquvvjrbofQ51dXVnHPOORQXFxMIBBg7diwff/xxl87RpxKRRYsWce655zJnzhxWrFjB0qVLOeuss7IdVp/1i1/8gvLy8myH0Sd9+eWXuK7Lgw8+yOeff87vfvc7/vjHP/Lv//7v2Q6tV3viiSe45ppruOWWW/j0008ZP348M2fOZMuWLdkOrc948803ueKKK3j//fdZsmQJyWSS4447jvb29myH1id99NFHPPjgg4wbNy7bofQ5zc3NTJ06FcuyWLx4MatWreLuu+/u0p5yAOg+IplM6oqKCv3QQw9lO5R+4aWXXtKjR4/Wn3/+uQb0smXLsh1Sn/eb3/xGDxs2LNth9GpHH320vuKKK9K3HcfR5eXlev78+VmMqm/bsmWLBvSbb76Z7VD6nHA4rEeNGqWXLFmiv//97+urrroq2yH1Kddff72eNm3afp+nz/SIfPrpp1RXV2MYBhMmTKCsrIwTTjiBlStXZju0Pqe+vp65c+fyP//zPwSDwWyH02+0tLRQVFSU7TB6rUQiwSeffMKMGTPS9xmGwYwZM3jvvfeyGFnf1tLSAiB/e9/BFVdcwQ9/+MOMv0nRec899xyTJk1i9uzZDBw4kAkTJvDnP/+5y+fpM4nIunXrAPjlL3/JTTfdxAsvvEBhYSHHHHMMTU1NWY6u79Bac8EFF3DppZcyadKkbIfTb6xdu5b77ruPf/3Xf812KL3W1q1bcRyH0tLSjPtLS0upq6vLUlR9m+u6XH311UydOpXDDz882+H0KY8//jiffvop8+fPz3Yofda6det44IEHGDVqFK+88gqXXXYZV155JY888kiXzpP1ROSGG25AKbXXn47xeIAbb7yR0047jYkTJ7Jw4UKUUjz11FNZ/i2yr7PteN999xEOh5k3b162Q+6VOtuOO6qurub4449n9uzZzJ07N0uRi4PRFVdcwcqVK3n88cezHUqfsnnzZq666ir++te/4vf7sx1On+W6LkceeSR33HEHEyZM4JJLLmHu3Ln88Y9/7NJ5PD0UX6dde+21XHDBBXs9Zvjw4dTW1gIwZsyY9P0+n4/hw4ezadOmngyxT+hsO77++uu89957u+wJMGnSJM4+++wuZ7L9TWfbsUNNTQ0/+MEPmDJlCn/60596OLq+bcCAAZimSX19fcb99fX1DBo0KEtR9V0//elPeeGFF3jrrbcYPHhwtsPpUz755BO2bNnCkUcemb7PcRzeeust/vCHPxCPxzFNM4sR9g1lZWUZn8kAhx12GIsWLerSebKeiJSUlFBSUrLP4yZOnIjP52P16tVMmzYNgGQyyYYNG6iqqurpMHu9zrbjvffey2233Za+XVNTw8yZM3niiSeYPHlyT4bYJ3S2HSHVE/KDH/wg3TtnGFnvYOzVvF4vEydO5LXXXktPu3ddl9dee42f/vSn2Q2uD9Fa82//9m88/fTT/P3vf2fYsGHZDqnPmT59Op999lnGfXPmzGH06NFcf/31koR00tSpU3eZOr5mzZoufyZnPRHprFAoxKWXXsott9xCZWUlVVVVLFiwAIDZs2dnObq+Y8iQIRm3c3NzARgxYoR8q+qC6upqjjnmGKqqqrjrrrtoaGhI/5t8u9+za665hvPPP59JkyZx9NFHc88999De3s6cOXOyHVqfccUVV/C3v/2NZ599lry8vHR9TX5+PoFAIMvR9Q15eXm71NTk5ORQXFwstTZd8LOf/YwpU6Zwxx13cPrpp/Phhx/ypz/9qcu9w30mEQFYsGABHo+Hc889l2g0yuTJk3n99de7PmdZiP20ZMkS1q5dy9q1a3dJ4LRsaL1HP/nJT2hoaODmm2+mrq6OI444gpdffnmXAlaxZw888AAAxxxzTMb9Cxcu3OewohDd6aijjuLpp59m3rx5/OpXv2LYsGHcc889nH322V06j9Jy1RRCCCFElsigthBCCCGyRhIRIYQQQmSNJCJCCCGEyJo+VawqOi8cDlNbW5teCE4IIUTXGYZBWVkZeXl52Q6l35JEpJ9xXZf58+fz9NNPZzsUIYToN0499VTmzZsn6wX1AElE+pn58+fzzDPPcOWVVzJhwgQsy8p2SEII0Wclk0mWLVvGfffdB6S2GRHdS6bv9iOtra0ce+yxXHnllZx33nnZDkcIIfqNv/zlL9x777288cYbMkzTzaSPqR/pWGFxwoQJWY5ECCH6l47rase+Z6L7SCLSj3QUpspwjBBCdK+O66pMAOh+kogIIYQQImskERFCCCFE1kgiIoQQB6FYLLbPn64MQzQ2NjJw4EA2bNjQc0H3gDPOOIO7774722Ec1CQREUJ0i2OOOYarr74622Gk9bZ4epsjjzySQCCwx59gMMimTZs6fb7bb7+dU045haFDh6bv+9nPfsaPfvSjHoj+u9s5pptuuonbb7+dlpaWLEZ1cJNERBx0HMeRgrNeKpFIZDuEg8bFF19MKBRizZo1rF+/PuNn+vTpzJgxIyOp2JtIJMJ//dd/cdFFF2Xc/+GHHzJp0qT9jtW27f0+R4edYzr88MMZMWIEjz76aLc9h+giLfqNL774Qk+cOFF/8cUX+30u13V1OBLXzeGoDkfi2nXdbohwz5566il9+OGHa7/fr4uKivT06dN1W1ubdhxH33rrrbqiokJ7vV49fvx4vXjx4vTj3njjDQ3o5ubm9H3Lli3TgF6/fr3WWuuFCxfq/Px8/eyzz+rDDjtMm6ap169fr2OxmP7FL36hBw8erL1erx4xYoR+6KGH0uf57LPP9PHHH69zcnL0wIED9TnnnKMbGhp6tB26i+M4etPmOr3qy/V60+Y67ThOjz7f+eefr4GMn7Vr1+oLL7xQDx06VPv9fn3IIYfoe+65Z5fHnXLKKfq2227TZWVleujQoVprrZcuXarHjx+vfT6fnjhxon766ac1oJctW5Z+7N5en93F0/H30Fu5jqOd+i3a3rBJO/VbtNvDr1lDQ4P2er160aJFu9xvWZZ+4okn0ve9//77eurUqdrv9+vx48frN998UwP6s88+01qn3r8lJSXp4+PxuPZ4PBntP3nyZK211jfffLM+/PDDdTAY1AMHDtSXXnqpTiQS6ceuX79eA/qJJ57Q06ZNy4hxX3ForfXGjRv1mWeeqQsKCnRhYaE+66yzdFNT015juvXWW/W0adP22l7deX0VmWRlVbGLlvYY1Q1hWiIxXFdjGIr8oJ+Kkjzyc/zd/ny1tbWceeaZ/OY3v+HUU08lHA7z9ttvo7Xm97//PXfffTcPPvggEyZM4OGHH+bkk0/m888/Z9SoUZ1+jkgkwp133slDDz1EcXExAwcO5LzzzuO9997j3nvvZfz48axfv56tW7cCsG3bNo499lguvvhifve73xGNRrn++us5/fTTef3117u9DbrTV2s3s3jJe6xdt5l4PIHP52Xk8EpO+JfvMWpkZY885+9//3vWrFnD4Ycfzq9+9SsACgsLGTx4ME899RTFxcW8++67XHLJJZSVlXH66aenH/vaa68RCoVYsmQJkFqY76STTuLEE0/kb3/7Gxs3btxliGVfr8/u4ikpKemR3707ON9UY3/4MW51DTqRRHktjIpyPEdPwhxc0SPPOWDAAGbNmsXDDz+cMVTx6KOPkp+fz6xZswBYuXIl06dP5+qrr+ahhx7i888/Z/bs2fh8PkaPHg3A22+/zcSJE9Pn8Hg8LF26lMmTJ7N8+XJKS0vx+/1ordFa8+CDD1JRUcGqVas4//zzGTduHJdddhkAK1asAGDBggXccccdDBs2jJKSkk7FsXbtWr73ve9x2WWX8f7779PW1sbll1/Oddddx5/+9KfdxgRw9NFHc/vttxOPx/H5fD3S3mLPJBERGVraY6z5ppF4wiHo92CaBo7j0hSO0h5PcMjg4m5PRmpra7Ftmx/96EdUVVUBMHbsWADuuusurr/+es444wwA7rzzTt544w3uuece7r///k4/RzKZ5D//8z8ZP348AGvWrOHJJ59kyZIlzJgxA4Dhw4enj//DH/7AhAkTuOOOO9L3Pfzww1RWVrJmzRoOOeSQ/fule8hXazfzX395jqbmVsrLBhAM+IhE46xc9TU1tQ1cdN7JPZKM5Ofn4/V6CQaDDBo0KH3/rbfemv7/YcOG8d577/Hkk09mJCI5OTk89NBDeL1eAP74xz+ilOLPf/4zfr+fMWPGUF1dzdy5c9OP6czrs7t4eiPnm2qSL72CG27DKC5C+X0Qi+Os34je2ggnzuyxZGTu3Lkcf/zx1NTUUF5eDsDChQs577zz0q/HlVdeycknn8xtt90GwOjRo3nkkUf45ptv8HhSHyEbN25MPx5SG8XV1NRQXFycfs916EgMAaqqqpgxYwarV69O37d8+XJycnJ46qmnMoaGTj311H3Gcfnll3P55Zdn/N394he/4LrrrttrTOXl5SQSCerq6tLXIHHgSCIi0rTWVDeEiSccQjlelFIAGB6TUI5Ba3uC6oYwoaAv/W/dYfz48UyfPp2xY8cyc+ZMjjvuOH784x9jmiY1NTVMnTo14/ipU6emvzV1ltfrZdy4cenby5cvxzRNvv/97+/2+BUrVvDGG2+Qm5u7y799/fXXvTIRcV2XxUveo6m5lUNGVqZfo7zcIIeMrGTN2s28/H/vMWJ4xQHbuOv+++/n4YcfZtOmTUSjURKJBEcccUTGMWPHjk1/6AGsXr2acePGpb+tQuob64764uuzO9p1Uz0h4TaMweXfvq+CAYxAOe43NdgffoJRXobqgdds+vTpVFVV8cgjjzBv3jw++eQT/vGPf/D4448DqQTjjTfeYOXKlRmP8/l8GR/m0Wg04/UCWLZs2S4f+Bs3buQ3v/kNb775JtXV1SSTSWKxGL/+9a/Tx6xYsYKTTz45IwnpTBwbN25kyZIlvPPOOxmzYBzHobKyco8xAQQCASDVcyoOPElERFp7LElLJEbQ79kl0VBKEfR7aInEaI8lyQ1493CWrjNNkyVLlvDuu+/y6quvct9993HjjTemu+r3puMDVe+wZVIymdzluEAgkPE7dVx49qStrY2TTjqJO++8c5d/Kysr22dc2VBd08DadZspLxuw29evfFAxX329meqaBioHl/Z4PI8//jg///nPufvuu/ne975HXl4eCxYs4IMPPsg4Licnp8vn7ouvz+7orY241TWpnpDdvGZGcSFudTV6ayNqYPcPLSmluPDCC1m4cCHz5s3j4YcfZsqUKRx22GFAKmH3er380z/9U8bjvvjiCy6++OL07QEDBtDc3JxxzPLlyzM+9BsaGjjqqKM49thj+e1vf0tFRQWO4zBp0qSM45YvX84NN9ywy7n2FceKFSsoKira5e8Lvn2/7xxTh6amJqB3D9/1Z5KIiDTbcXFdjWnu/puXaRq4cRvb6f4ZJ0oppk6dytSpU7n55pupqqritddeo7y8nKVLl2b0XCxdujT9DbnjwlFbW0thYSGQutjsy9ixY3FdlzfffDM9NLOjI488kkWLFjF06NB0t29v19YeJR5PEAzsfow7EPQT39JEW3u0R57f6/XiOE769tKlS5kyZQqXX355+r6vv/56n+c59NBDefTRRzPG6z/66KOMYzrz+uwcT2+ko7FUTYh/D3UJPh+6qRkdjfVYDHPmzOGWW27h//7v/3jsscf47W9/m/430zSxbZtYLJbu8Xjttdf4/PPPMz7QJ0yYsMusk88++4zTTjstffv555/HcRwee+yxdNL1hz/8gWQyme4la21tZcOGDbvsl9WZOCzLIhwOU15eTjAY3O3vunNMHVauXMngwYMZMGBAp9pMdC+ZvivSPKaBYSicPSQajuNiGArPHhKV7+qDDz7gjjvu4OOPP2bTpk387//+Lw0NDRx22GFcd9113HnnnTzxxBOsXr2aG264geXLl3PVVVcBMHLkSCorK/nlL3/JV199xYsvvtipxYmGDh3K+eefz4UXXsgzzzzD+vXr+fvf/86TTz4JwBVXXEFTUxNnnnkmH330EV9//TWvvPIKc+bM6bUfbrk5AXw+L5FofLf/Ho3E8HktcnP23hv0XQ0dOpQPPviADRs2sHXrVkaNGsXHH3/MK6+8wpo1a/iP//iPXRKK3TnrrLNwXZdLLrmEL774gldeeYW77roLIP0B1pnXZ+d4euOUbRXwo7wWxHb/mhGPoywLFej+IvEO5eXlnHjiiVx44YU4jpNRvzNx4kQsy+K6665j3bp1PP/881xyySUAGYnIzJkz+fzzzzN6RVzXZfXq1dTU1NDS0kJxcTGtra0899xzfPXVV/z2t7/l1ltvpaKiIv2FYsWKFZimma4R60ockydPJhQKcd5557FixQrWrl3Lyy+/nFHovHNMHd5++22OO+64bmpR0VWSiIi0HL9FftBPJGZnDHVAaugjErPJD/rJ8XfvpnqhUIi33nqLE088kUMOOYSbbrqJu+++mxNOOIErr7ySa665hmuvvZaxY8fy8ssv89xzz6VnzFiWxWOPPcaXX37JuHHjuPPOO9PFbPvywAMP8OMf/5jLL7+c0aNHM3fuXNrb2wHSPTGO43DccccxduxYrr76agoKCg5YfUVXVZSXMHJ4JTW1W3f7+tXUNTJqRCUV5T3T/fzzn/8c0zQZM2YMJSUlzJw5kx/96Ef85Cc/YfLkyTQ2Nmb0juxJKBTi+eefZ/ny5RxxxBHceOON3HzzzQDpb8OdeX12jqcri3MdKGpAMUZFOW5j025fM7exGaOiAjWguEfjuOSSS6ipqeHss8/O6E0oKyvj4Ycf5tlnn2XcuHEsXLiQ888/n5EjR1JUVJQ+buzYsRx55JHpRB7gtttu47//+7+pqKjgtttu46STTuKiiy7i3HPPZdq0aVRXV3P66adn1AytWLGCQw89dJd6k87EUVRUxEsvvURjYyP//M//zJFHHsmNN96YUYS+c0yQWmH2mWeeySiGFgeW0jv/9Ys+68svv+Scc87h0UcfTU9n66o9zZqJxGx8XrNHZs2I7pMxa2ZQMYGgn2gkRk1dI0VFIS46t2dmzfS0v/71r8yZM4eWlpZ91vf0NZmzZgrB54N4PJWE5OVi9eCsma5yXZdjjjmGadOmZcxYAnjxxRe57rrrWLlyZY8n63uLo6seeOABnn76aV599dW9Htcd11exe31j8FscMPk5fg4ZXPztOiJxG8NQFOUFemwdEdF9Ro2s5KLzTv52HZEtTfi8FmP/aQTHz+i5dUS621/+8heGDx9ORUUFK1asSK8R0t+SECCVZJw489t1RJqaUZaFOWwonqMnZjUJeeutt2hoaGDChAls3bqVBQsWsHHjRp555pldjv3hD3/IV199RXV1dXqWSjbi6CrLsrjvvvv2P0jxnUkiInaRn+MnFPTRHktiOy4e0yDHb3XrlF3Rc0aNrGTE8Aqqaxpoa4+SmxOgoryk1w4p7U5dXR0333wzdXV1lJWVMXv2bG6//fZsh9VjzMEVGOVl6K2N6GgsVTsyoLhHpux2RX19PTfccAPV1dWUlpYyY8YMPvzww4xhmR311N4+XY2jK3ac/SOyQ4Zm+hHpOhRCiJ4h19ee03e+IgkhhBCi35FERAghhBBZI4mIEEIIIbJGEhEhhBBCZI0kIv1Ix6yI3e21IoQQ4rvruK72pdlnfYW0aD/Ssd35smXLshyJEEL0Lx3X1b60qWJfIeuI9COhUIhTTz01vTjPhAkTsKzuXY5dCCEOJslkkmXLlnHfffdx6qmnkpeXl+2Q+h1ZR6SfcV2X+fPn8/TTT2c7FCGE6DdOPfVU5s2bJ0MzPUASkX4qHA5TW1vbK3ccFUKIvsIwDMrKyqQnpAdJIiKEEEKIrJE+JiGEEEJkjSQiQgghhMgaSUSEEEIIkTWSiAghhBAiayQREUIIIUTWSCIihBBCiKyRREQIIYQQWfP/A7ukLrsJqxjWAAAAAElFTkSuQmCC", "text/plain": [ "
" ] @@ -264,7 +264,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "To use the Gaussian initialization, the samples of source and target (`samples_source` and `samples_target`) need to be passed to the {class}`~ott.neural.models.ICNN` definition via the `gaussian_map_samples` argument. Note that ICNN $f$ maps source to target (`gaussian_map_samples=(samples_source, samples_target)`), and $g$ maps target to source cells (`gaussian_map_samples=(samples_target, samples_source)`)." + "To use the Gaussian initialization, the samples of source and target (`samples_source` and `samples_target`) need to be passed to the {class}`~ott.neural.networks.icnn.ICNN` definition via the `gaussian_map_samples` argument. Note that ICNN $f$ maps source to target (`gaussian_map_samples=(samples_source, samples_target)`), and $g$ maps target to source cells (`gaussian_map_samples=(samples_target, samples_source)`)." ] }, { @@ -274,12 +274,12 @@ "outputs": [], "source": [ "# initialize models using Gaussian initialization\n", - "neural_f = models.ICNN(\n", + "neural_f = icnn.ICNN(\n", " dim_hidden=[64, 64, 64, 64],\n", " dim_data=2,\n", " gaussian_map_samples=(samples_source, samples_target),\n", ")\n", - "neural_g = models.ICNN(\n", + "neural_g = icnn.ICNN(\n", " dim_hidden=[64, 64, 64, 64],\n", " dim_data=2,\n", " gaussian_map_samples=(samples_target, samples_source),\n", @@ -295,14 +295,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "/home/michal/projects/nott/src/ott/neural/solvers/neuraldual.py:276: UserWarning: Setting of ICNN and the positive weights setting of the `W2NeuralDual` are not consistent. Proceeding with the `W2NeuralDual` setting, with positive weights being True.\n", + "/Users/michal/Projects/dott/src/ott/neural/methods/neuraldual.py:154: UserWarning: Setting of ICNN and the positive weights setting of the `W2NeuralDual` are not consistent. Proceeding with the `W2NeuralDual` setting, with positive weights being True.\n", " self.setup(\n" ] }, { "data": { "application/vnd.jupyter.widget-view+json": { - "model_id": "c4e2a1cdac674c588497d0803d003ec2", + "model_id": "fdf9e1aeda2b473c93d15d4815247286", "version_major": 2, "version_minor": 0 }, @@ -345,7 +345,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjgAAAG7CAYAAADOue8dAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9aYxlaXaehz7fHs88xjxkRORUU1dVd1V1VVexSYrm0GKThChd0DZhWDTl4YcHmGjLhilYtmUZatuSIdmgYdmApRYg69IWrsRr2XTrNkmTItXVXV1VPdSYY2TM45mHffb43R9f7DgRkZGZkVmZGVGV+wEOIuKM++yz43zvXutdawkppSQhISEhISEh4TOEdtobkJCQkJCQkJDwsEkETkJCQkJCQsJnjkTgJCQkJCQkJHzmSAROQkJCQkJCwmeOROAkJCQkJCQkfOZIBE5CQkJCQkLCZ45E4CQkJCQkJCR85kgETkJCQkJCQsJnjkTgJCQkJCQkJHzmME57A06DKIpYX18nn88jhDjtzUlISEhISEg4AVJKOp0OU1NTaNo9YjTyEfJHf/RH8hd/8Rfl5OSkBOQ//sf/+NDtURTJv/yX/7KcmJiQqVRK/vRP/7S8evXqPZ/3t37rt+Tc3Jy0bVu++uqr8rvf/e59bdfKyooEkktySS7JJbkkl+TyKbysrKzcc61/pBGcXq/Hiy++yF/4C3+BP/fn/txtt/83/81/w3//3//3/L2/9/dYWFjgL//lv8xXvvIVPvzwQ1Kp1LHP+b/9b/8bX/va1/jbf/tv89prr/G3/tbf4itf+QpXrlxhbGzsRNuVz+cBWFlZoVAoPPgbTEhISEhISHhstNttZmdn99fxuyGkfDzDNoUQ/ON//I/55V/+ZQCklExNTfEf/Af/AX/xL/5FAFqtFuPj43zjG9/gX/6X/+Vjn+e1117ji1/8Ir/1W78FqHTT7Ows/96/9+/xH//H//GJtqXdblMsFmm1WonASUhISEhI+JRwP+v3qZmMFxcX2dzc5Gd+5mf2rysWi7z22mu8+eabxz7G8zzeeeedQ4/RNI2f+ZmfueNjAFzXpd1uH7okJCQkJCQkfHY5NYGzubkJwPj4+KHrx8fH9287yu7uLmEY3tdjAL7+9a9TLBb3L7Ozs59w6xMSEhISEhLOMk9Emfhv/uZv0mq19i8rKyunvUkJCQkJCQkJj5BTEzgTExMAbG1tHbp+a2tr/7ajjIyMoOv6fT0GwLZtCoXCoUtCQkJCQkLCZ5dTEzgLCwtMTEzw+7//+/vXtdttvvvd7/L6668f+xjLsnj55ZcPPSaKIn7/93//jo9JSEhISEhIePJ4pGXi3W6X69ev7/+9uLjID37wAyqVCufOneM3fuM3+C//y/+SS5cu7ZeJT01N7VdaAfz0T/80f/bP/ln+3X/33wXga1/7Gr/2a7/GK6+8wquvvsrf+lt/i16vx6//+q8/yreSkJCQkJCQ8CnikQqct99+m5/6qZ/a//trX/saAL/2a7/GN77xDf6j/+g/otfr8W/9W/8WzWaTL3/5y3zzm9881APnxo0b7O7u7v/9L/1L/xI7Ozv8p//pf8rm5iaf//zn+eY3v3mb8TghISEhISHhyeWx9cE5SyR9cBISEhISEj59fCr64CQkJCQkJCQkPCqeyGGbCQkJCZ8VZBQhd2tIZ4BIpxAjVcS9hhAmJDwBJAInISEh4VNKuLpG8NbbRGvrSM9HWCba9BTGq6+gz0yf9uYlJJwqicBJSEhI+BQSrq7h/+4/Jep00aoVRMqGgUu4uITcrcFXv5KInIQnmiSOmZCQkPApQ0aRitx0umgzU4hMGqFpiEwabWaKqNMleOsdZBSd9qYmJJwaicBJSEhI+JQhd2tEa+to1QoSjeVeFScwkL0est1GpFOEq6sqkpOQ8ISSpKgSEhISPmVIZ6A8Nymbfmgx6AZ0l9cx2ssQhSDUuWt4axltbPSUtzYh4XRIBE5CQkLCCTkrFUsinUJYJgxcHEcn2mhiDbYgZYFhgDOAbofgze+iTU0kXpyEJ5JE4CQkJCScgLNUsSRGqmjTUwQ3b9HvhkhfksoZCAFIifR9GB9H+j7BW++gTU0mpeMJTxzJEZ+QkJBwD+KKpXBxCZHPo01PIvJ5wsUldf3q2mPdHqFpGK++gmYaOE0XdB1dRng+hO0uwrbQZ6bRRipEa2uJFyfhiSQROAkJCQl34axWLOkz0xivvwaWrVJn3S5L4Tm2y8+gXbqIVsiDbSN9H+kMHuu2JSScBZIUVUJCQsJdOFixJITAj3QATC1ECIFWLe9HScRjNvTq83OIsW10TSfKCvRgFj0boWWa6g6uizBNRDp11+dJSPgskkRwEhISEu5CXLFEygbgVneMW72x4R1OMUoiRqpo1Qr4HoNsFWHbZAxPbbeURLUG2vQ0YqT62LctIeG0SQROQkJCwl04WLG0jzxwh1OMkghNQ7t8CTNj0ltvI12XtHCQ/T7R6jpaPofx6suJwTjhiSQ56hMSEhLuQlyxFNXqSClBDG877SiJ74NWrZL78ku443NIx8HYWkV2uugL85jJuIaEJ5jEg5OQkJBwF+KKJblbI1pdx8gU8fUUUa+PrDdONUoy2MuKZefG6KVHMFsd7Innk6niCQkkAichISHhnugz0/DVrxC89TappQael8ZNu6QX5jFeffnUoiSOo36m0wAaolhEnyueyrYkJJw1EoGTkJCQcAL0mWm0qUmKiw366xHRpIZ1vnyqUZJY4NjK/6wa/SUkJACJwElISEg4MULTyM5W0XwYZPZHPp0acYoqFjaWdXrbkpBw1kgStAkJCQn3QSwi4ujJWcDdK/DKZE53OxISzhKJwElISEh4AILgtLcApFTRm35f/Z0InISEIUmKKiEhIeEBkPLe9znR8zyECeWxwFFm44SEBEgETkJCQsJ9I8TDETgPY0K5oUd01ztErodWAfkElYc/DHGY8NklETgJCQkJ94muf/IUVTyhPOp01ZyrlA0Dl3BxSU3/vkeTPs+DqFbDuPUe/c0+0g9xf7B63wLp08rDEIcJn20SqZuQkJBwn8ReF99/sMc/jAnlvcUNwrffxVy/Bak02kgZkc8TLi7h/+4/JVxde7CN+xQQi8NwcQmRz6NNTz4x7z3h5CQCJyEh4TOPjCKi7R3CpRWi7Z27CoeTEHtdYu/LfW/PkQnlUkK0l/I6OqH82MdHEd13PkI6AzLTZYRtg7g/gfRp5ag4JH3/4jDhySBJUSUkJHymeRSpjDiC4zhQfIDGwfGEcrE3ofxmb4IoElwqbKg72Day3rjjhHK5W6O/2Ubki1hag0hK0n6HqNVCGAaiUtoXSGJs9EHe4pnlqDi81p5kPN2kYDq3icPP2ntPuD8SgZOQkPCZ5ZP6XO7EJ+2Fc2hCeSZNJI+0IL7HhHLpDHB9HZE3cVou65sRlX6TSe9j0HRELgumeUeB9GnmoDjcGih16UcHlrJ7iMPHQWJ+PhskAichIeEzyW0+FyHwIh0ro6Glp4hW1wneegdtavK+F5+4c7DnPdi2xRPKw8UltPTU4e3em1CuL8zfcUK5SKcQho5sdejubiMZBdMAOw9BgKzXQQhkqwXMPthGnlFicRg6Hu1QhdKqdmd4h3uIw0dNYn4+OySSMiEh4TPJ0VRGw8uy1B3ba453b5/LiV7jAUvF4wnlWj5HtLoOrouMQmS/T7S6fs8J5WKkCpUKcnubbpACy6KodxFCIAwDqelgmITXbnzmvCixOLy5nUcimcrU92+LxaE2PX1HcfgoSczPZ4tE4CQkJJwKD9v4e9vz76Uy2PO5xGmgTrDnELZtpO8/cCrjkw621GemMb/6FfSFOaK+g2w0kZ0u+sK8uv4uZ/tC09AmJyAM6Mg8Ys+l7PkC2e6gpWy0hXmi9fVPJODOIkLT8F/8IqTSyFqdjNdAhicXh4+K4yrj5BNi/D6rJCmqhISEx87jCOMf9bmUrR51N8+uW6BgOp84laFpEIafbBvjCeXm9zrge9iXOZFfQ0oQ2SxWKUMrqBD5Ie3AQqPMVFmgTU8hshmijc3PpA9n1Z9Cf8VmbuPbyI0ust5AmCb6wjzGqy+fSiroaMRw3SnjhQbzuZ3E/HxKnLrAmZ+fZ2lp6bbr/+1/+9/mf/gf/ofbrv/GN77Br//6rx+6zrZtBoPP3j9xQsJnkUdl/D3KUZ+LthdxCSPtRD6Xe5FOQ7erGv4Zn+CbVGgaolhECNDGTvYYzwNhWaRzBj5jEFnYRkQqVUAvjanS837/VL0oj4raXkAqf65K5vVfODNm3qPm556fgoNRvjNgfn7SOHWB873vfY/wwGnQ+++/z8/+7M/yK7/yK3d8TKFQ4MqVK/t/i08aK05ISHgsHGf8dUMD+xMYf+9UsRL7XORuTaUuqmV0fAI3wm9uYhY+WSojk1ECx3Egn3+gp3hgBgMQhTyZySL+DR8/m0fLZshmBnt9dY4XcJ/26h4pYWdH/T4zA0JoZyYaEkcMGx2TtlDm54u5jeEdTtn8/CRy6gJndPTwwflf/Vf/FRcuXOAnf/In7/gYIQQTExOPetMSEhIeMkfD+G5osNwb3e9jcr9h/HuluvSZafjqV/bvU3FvsMUEnalnGf+Jpz5RpChu9ncaAsdxAKGRevlziJUt6PaRpksq4yD7A2W0PeJF+SxU98TB/vHxT+6BetiIkSr9sQV2Fn3EiORCfmt/Gx9GxDDh/jl1gXMQz/P4+3//7/O1r33trlGZbrfL3NwcURTx0ksv8df+2l/jueeeu+P9XdfFdd39v9vt9kPd7oSEhJNxtMGdpamBTltOSfli7iOMf9JUV+xzkbs1qv0B9a0C7WKeqZlPFrlI7Z2IP2g3409C3H9Hjk2hXcwhVneQziq4G8hjvCiPKy34KPE8FbkCKJdPd1uOw/U0tma+iNh8l3PNH6IZeaRtg+seKzgTHj1nSuD8zu/8Ds1mk3/tX/vX7nifp556ir/zd/4OL7zwAq1Wi7/xN/4Gb7zxBh988AEzMzPHPubrX/86f+Wv/JVHtNUJCQkn5ajxVwhIGR6DwKLrp8j69ROF8Y9LdQGQSR+b6hKaSmVogHhIFoj4JQ+cOz024v47/T5ohQLpp3OYhRHsiedvSz3d7746qywuqp8LC6e7HccRBHDrFmjVKud+8TnMH/ZVpOwMmJ+fZM6UwPlf/pf/hZ//+Z9namrqjvd5/fXXef311/f/fuONN3jmmWf4n/6n/4m/+lf/6rGP+c3f/E2+9rWv7f/dbreZnf1sNb9KSDjLxN6PqNdHlIpEWztos9MIIZhO17nRmWDdKXGhdeNEYfyjqS4/UguzqUX3rFjJZJQw8LxhR+JPK3EwOpfT0EpF9Lnb50Yc3VeD0MTWfIS4vR/QWfGzHKXdVv4b0wTbPu2tOUwUwfXr6vepKcgVppALv/Sp9jp9VjgzAmdpaYnf+73f4x/9o390X48zTZMvfOELXI+PsGOwbRv7rP1XJCQ8IRz1fhD4yGaLyHGUyLFtTL+H2xowKI5QPEEY/2iq61Z3HFMPmc9uqzvcJdU1Oqq8HLu7akH6JAjx4M3+PgmqWaEyOUup0mWmeYf7HtlXK70RMobLdNwg71NQ3bO+rn6eteiNlHD1qvp9dBQKBfV7HDFMOF3OjKT8u3/37zI2NsYv/MIv3NfjwjDkvffeY3Jy8hFtWUJCwoNyXGdXbXwcsllkr0+0tUO0scm0fxNtfIytF0/mBTmU6gIQ4If6UGzcpWIlNgd3Orfd9KljMFDpEduGbPb4+xzcV+Fes8NQHvjqP+PVPbG4qVRU76GzRCxuCgWoJt7hM8eZiOBEUcTf/bt/l1/7tV/DONJQ4s//+T/P9PQ0X//61wH4L/6L/4IvfelLXLx4kWazyV//63+dpaUl/o1/4984jU1PSEi4A0e9HxKNSAqMjIZ+6QLhyhra2BjGT7yBlslgN6uEkYbjDEXInTja46Zqdai5eZp+lpLZvfcsp4cUeUmnVborih7f4htv955/Fd9Xr31HgXNgXzVGRwAoWb295zrb1T1hOEzDjZ2wR9Dj4uZN9VnY9iePBCY8Gs6EHv693/s9lpeX+Qt/4S/cdtvy8jIbG8NeAo1Gg3/z3/w3eeaZZ/jqV79Ku93m29/+Ns8+++zj3OSEhIR7cNT7UXPzLHbHAeX90EcqyFYTLZNBGxtl4bz6OlpevvdzH53lVAq2kVHIbts+Ubv+4p5Vpdf7ZO8xo9qdPPBU8QchNjXbtlpg4zZi8bYc5eC+amy4SNclJ7qnPtrgJMTOg7NmmVxf32u2KM5e2ixhyJmI4Pzcz/0c8g6nU3/4h3946O+/+Tf/Jn/zb/7Nx7BVCQkJn4Sj3o/4P3x7UGAs1b7N+2EYw8iK697bTHq0x410mkhNRywsYL720l1TXdUqNJvKh3OnyMdJiCNN/f4nex44eV+XWEzpuvoZ65K76ZN4X/GtFWStjnQ34YxX9/T7Q6/RJ923D5Pd3WFU6fLl092WhLtzJgROQkLCZ4+jJeFjqTYtL0vLyzJqt4/1fiwsqND/rVvw1FP3fo2DPW7O1Tw22jkaM3mmpu8ejYgNuZ808nKw2d8n5aQBlLgXTHxOeNIxEdr0NMaXJhHdDvbY7eXkZ404knfhwulux0HabSVwQImbs9ZsMOEwZ/PITkhI+NQTez+iWn0/QjuWbgKw0q+q5mfT04e8H5Y1jOL4/glfR9PQxkYpPTONKBZpd072tTbsMnvit3QbsTZ4GALnfiM4cS+cbPZkj+12AaFRXSiiz82ijY2eWXETi4hc7pPN+XqYOM7Q8Hzx4tkzPCfcTvIRJSQkPBKO+mRkv09B6yJdF2erQ5QrHuv9mJ9XP+PGbvdD3NfmJOJoRPltabXu/3UeBScVOPF7azSUOMtk7lwifpD6XlV4qfRAm/fYkHIocKbPSObM84ZjIubnz47oSrg7icBJSEh4ZOgz05hf/Qr6whyy01Ul4cEi2vgYK8//4rHeD9tWi30UqRLo+yGuZonPtO9G3O4/Ht74oDysiqz7jQicpET86P3h7C/Ot26pn5OTZyMFFIYqbQpqwGfqbFbTJxzDGT/UExISPu0c9MlIZ0AqnWK7XgU0BoPjF4xz59QZ861bKh1wUuLnOknKKBYUcRXSaXPSxTw23g4GKppjWScTOPHjzjKepwzmQgwr3U4TKeHaNfX7+LhKmSV8ekgiOAkJCY+c2CcTez8uXFBfPXHY/yjptFrkguD+BUi8CJ1kCGac0vokIicWVQ8axYkfd7/iw/eHEa47lYjHxKXlj3vq+f0SpyXjNOVpEzfyK5XO5oDPhLuTCJyEhITHTjxTSMphye1R4tm5J+mLc5CJCfVzbe3e9x3d66Yf+1MehLiSavCAkw5igXOSFFV831iYnfSx8fs7y4t0q6Xej2WdjXlT16+r7Umnh8dUwqeLROAkJCScCnNz6ued/DJx2sV1lR/npMQek5NEZeJozycROHH05CQRo+OI39tJIjixiIoFzUnMxTAUkffqEH1aSAlxP9ezEL1ZWVHRMSGGx2nCp49E4CQkJJwKmjas6LmT0XdqMkK2Wiy/vUW0vYM8odKpVNTPZvPu91MTtT+ZSfiT9sK5nxTV0R44cVn9SV/jrBKL3JGR0y+/3t5WHa6FOFkvpoSzS2IyTkhIODUmJpQIqdXU4nZwsQ5X17Dfehv/pokfhAwyq+gzUxivvnLPzrujoyoqs7V177LobFb1iDlJ9+TjiDsKP2gE535SVLGIcl31uFzu3lGcOJJ1FtI+xxEEw8Gncen+adFsDqN5SZfiTz9JBCchIeFUGVfjqVhZGV53cAr5RGmAqJbZtOcJF5fU9at3N9gcjMzcK3oRL6px75UH5UGjJHdKUckoItreIVxa2Y9exQKn11PCIJ2+dwVVHMWKo1pnjRs31M9z5053O3o92NxUv1+6dPYrzhLuTRLBSUhIOFXKZRVp6fdVtEETh6eQF4Vku63R14uI6SmitXWCt95Bm5q8ayfeiQnl69jZufsk6rgKqtv9ZO/jYVZRhatrwxlbno+wTLTpKbyJLyMq1f0S8ZP0wGk01M9C4cG271HS6w3L1+9VCfYocd2hwD5/fhiVS/h0k0RwEhISTp347P3GjdunkEsJOVOFLra9Elq1TLS2htyt3fU54z4qJzEQf1Ifzic5248jOPt9eQ5Er0Q+jzY9icjnCReXCL73LlGthuuqCI6u31sYxKXkZzEiEYuK++l19LAJgmF5+rlzwwq1hE8/icBJSEg4dTKZYffiQctFej7sTSGve3m6fpoI6HgZNYXc9/enkN+N+Ez8Xh2RY5/Og0ZxjpZt3w8HIzgyOhy9Epk0QtMQmTTazBTSGRBdu04URfvv6W7enQftsfM42N5WPwuF04uYRJEqBwfVOfk0o0gJD59E4CQkJOxznO/jcXH+vPq53MgPp5ADaV39HETKTbvTSd02hfxOxL10Ym/Fnajuzfu8mw/nbvsmrqSKG+rdDwdNxsdFryKJGlbad8DQ8de3ibr9u45ciLe19fEastWiXHp8n+PB17/TcRRFw8haPF7jcSPlsJFftXo2OicnPFwSD05CQgJwZ9/HSaqWHgamqSIhXj5Pf2yB9OpVtPQUGUONzc5oPqGERl0wdvnwFPI7EQuPe0VmYrFwp2Z999o3mYwy8zrO/c8qOmgyls5APf9e9Gq5X2VQ63Nh9zuEzoDQfAb8gNC+gTFzHiFub018cFu3nRF8LUvm4zb+M5fQ588hRqqPdIr4SY6jOCV0WuIGhuImlxs2fHwYyCjaH0si0qlHvr8T7kwicBISEvZ9H1Gnq6IHKRsGLuHikvK6fPUrj0XkzM/D1asaWzNf5HxrnWh1Ha1aZsreYa1doN+OyGVStJ96ntETLhqZjDIw32nuVYymKbFxdGbTSfZNelztm37//rsFH0wjiXRqP3oVBQGDxQFho0nUWWdgFSHrE+kGstsjtXINcW6GaFvsL6ZyMMD/5rf2t3UgS0Q7u3DjTdy3vot2bgb90sVHJlpPsq+C0Wl8X73f0zI+Ly2p/W4Ywyjfw+C0TxISDpMInISEJ5zbfB/x6p5Jo6WniFZPVrX0MNA0tei1qdJ64xcoXf0O0do6Kb+BjC6SG68iLl+kplc56Un35KQyL6+twYULd77f6Kiq5mo2hyLlpPvG+uVJQHugZn8HU1RipIo2PUX4wUeE7Q6hMw0SyGYYiAK4LoNUhiCdI9fbxPzWDxh8z1UlVaaBbLURgHb5ImG7R7C5i+4PlMnIGSA7XYKbtx6JaD3Rvvru29yYzoAfcP48yOjxRzc2N1WkTYiHa24+KycJCUMSgZOQ8IRz1PfRDyxs3UcXEiHEoaolMfYQY/l3YHJSjRZoGOOM/plfgpoK9y+QZq1fodPVyKOEyL2a+MGwEZ7v3/1+pZISODs7BwTOgX0DgrafJq17mFp4276B0Qca2nkwRSU0DeOVlwi+946q77amEaYOg4i6MUIzPYam6wTtAZZfJ929iRiZQ4yNIOsNwvUNZUxud9heD9mKprmQXkIICylUpEc/XyJqtB66aD24rySC5d4IU+nG/r7Cstj97hWCMlhagPzBNt5jjm40GsO+QA+zkd9ZOklIGJLs6YSEJ5zY9xFXLa31q9zsTAwrgu6jaulhIMSwb836xnAKeX5uBKFp5HJKFNzLOHyQWAjFHXPv9LpweO7VwX0TSI0tp0TDO9B45si++aRVVAAilUKUiohSCaKIKJQ0KbOZPc8gN0qQzuG5EZbXJ2X6SN1Qi6ZpIGwbGYREt5ZZdsYRpklFNKhTQeo6RCGE4YlL7e/rfRzYV6v9EbzQxI9UeVTU7hCurLA9KIJpMncu2i99P0njxodBt6sELDz8Rn5HTxJafppIDl9ApFOEV68RXrn2WI37TzqJwElIeMI56PsAKNvKkXu9O6nu4Lonrlp6WMRdd7vdw0MzZ2bUwhQPj7zTJPKj7AumOwz2jInHGez3jjmwb0xNLUwt/4DAObBvHnTBPDqqQToDMEy0py9DLouTH6M2+jRhuoDQdXxMQqmBLjB0jZYoc6s7CroBuo6wLGSnjSNtEIIIjTpVWmEBNB1hGI9EtMb7KnR83FCFzTKGh5SSaG2ddW8ckU4zUnDR9GHpe9TpErz1ziNd+AcDWF1Vv1+48PDL0g+Ku+udCbadEv3QUsLu46sE128SLt7C+//+E7zf+SePRdAlJAInIeGJJ/Z9RLU6UkpG7A4ZwwUJ1zvjRLUG2vTJqpYeJrOz6mdccQOqa29sTo2iewuWGE072eiGuJomLmE+um8A5YlBlW4f3Dd3K9u+G0dHNcRCQWoGWj6LjAToOppQd5RRhJARhCGikKemjeJHBiKbQRTy4HmEEUgEuvRpUEVKSdbZVrdnMo9EtMb76tZ2FolkOrMXHeo7eK0+jp5DZFKQHkY3bk/zPXyCAG7dUr/PzZ18Avv9INIppGlxvT6ClAIhJJn+LtG168hGE6HriFwWUSg81qjVk04icBISnnCEpmG8+gpaPke0uo7s95mydxDegHCnwYa9gPHqy4/dOxCLmSA43F9mZkYJllZL/X3S5nyxeKndZR3N5dTPeLzB0X2D5yKjENnvqwqvfG5/38RN4jzv3tsipaTreDS7A3qOD8ihwNkTCl69iyhXkIYBzgCiiFCC9ENMPISpKnRgKBb06UmEodOSBTAMCs4WTmCA42BZOvq0iso9CtEqNA350heJUhlkrU7aayLDkKjTZSmYRVgW5TGDulug5uWGD3yEKdCDjfympoZtAx42QbHKYv7zRO0uaWPAhdyGqqRyPcjnIPARhQKiWnlsUauEROAkJCQA+sw05le/gr4wh+x0iTY2OR9eQZsYw/3CG9Tt06n+WFhQP+MzcBjOXioW1QK2dsIT4dg4fDeBA7ePbTi4b3LuLrLRxGl56Avz6vo9g2y8eB5XSXVQ0GzUO3y4tMN7i1t8cGuba2s1VrbbdPYW+FhUBdkS9B3Mch4yaeWS7jkgwMqaWDkbclmkO0A6PWSvh8zloFxmc+wLiFSKafcGstdHS6XQzs+DYdwmzB4mS+4U+isvMX9e2z+OegM1T0KrVmibEwBUrQNmqEeUAj3YyG909NGVpDsO3FzU0C5fYqTgMll7H1mrE7XaKlzU7bJrn6M7fhEhxGOJWiUokiqqhIRPEY+yiZg+M402NXno+Z+tVrl6TaNWU034Hne3V8tSvUqCQEVq4gjL7KyaY1SrqS60/f692+wfnDAehnf2YeRyyox8sG9OvG9GVuv0lySDEZ3SU6VD+z4WOP3+4f3U6g1Y2+nQ6g9wvYDuwEcTgko+RS5j4XZ06p0BHy61uBSVmSjn0Gemib78pwi/exNZ6zEw8wgzh5FJE41OUiyZZD++hffWDwj1i6SiPkF4CzQdfWaazoWfxBAGmVIa44MWxcE6stcDTwkz49WXH3rl0mCg9q0+UiX/+s/vH0e3NnLoH13B3F7Fp8iI3UXbi1bFaT59Yf6hp0BjcVMoDDtVP2xaLTXQFWD2hSqZudcJ3nqb8Op12BOci6VXEJUKvm1SYq9Vtm0j643HZtx/UkkETkLCp4TH0URMaNptpeAXL6ow/8aGEhyPIsx/N+G2sADXrimT6NNPq/vH6au9QiNWVuCpp+79OjMz6r5bW3fuojsyogTO7u7hJnBC08idG0HrQ0eDySO6Mp5HdTCC0+oNuLpaw/VCUpZGOwwJo4gIaPYGRFLS7hu4nqTe7vOD6y7nRouU8ikWXZvauYtQ7uP2BOhZsuUCUWiRSTfIaH16IqeGkcqWalAIICCKBFohS/vc5zFHIyaKNXT/0XbWXVpSP8+fHx5HW1uglSDz/Hk6/7yD3K1TnmgiQxtcV6XKHkE06eZNJbYs69F1S97aGqYyFxb2DOp5JYTDK9fo/s43WbafQhTyWHrIuezO8MGnYNx/EkkETkLCp4DTbCJmGOoLfHFRLWIXLjxco+a9hJuuQz6vREccsYGhWNnZUSmIe3UqhmF6q92+88IXV1L1end+nuOsE7GHJu63I6VkbaeD64XYlkajM6Db91B+G4EfRPQHPq6bRQgDy9KJooiNWoel7SZuJ49tWohqkW6k4wcSrzfAjATGzjVS0qH1uVcx+hpFQ8e0ZpHpFP2VOnJ9g8xEjsFAA6FhTT3a/kW93rAzcHxsRNFQALj5MfRXXmJm/TvIzS6y3kCY5iOJJq2vKx+UEMP5Zg+bW7eGYz0uXTocDRSaRm/yEqsTDdjaoWJ1GEkND6ZHGbVKOEwicBISzjhHm4hJBH5kYGe0x9ZEzLaVoFhdVV2BL1+++xTrk3JS4TY1BVeuKDFTqajFK47ilMtqMV1aOlkUx1bBAzxvGHU5ylEfzklvg+FtvYFPqz9A1wX19gB3r/Zc1zQQ4PohUoIXRERBiOP6+2k0P5AEviBjqp0cBTqWKfGCgEHbQW/vYo6UGAQpOoGBMCSg/B1buQtEzQ7T2SZ9Ko9lkvjKivoZe6ZARVFAfU69HmSmD6euHkU0qVYbtg54mI38YmJfTzzO4/Ll2/vprK9Du608OdPuDeydGrJa3j/wHlXUKuF2kr2bkHDGOdpEbBBaLPdG2XRKj9WwmMsN+8nEX/KfhNu6v2bSKrVxTH8UIYZVUAdLw2dmVNRgd1dtz0kqmKanb3+eo8SG5OMaA8Y+oONe6+BiF4QRYaiiNEEUYWoaIAmjCD+I9vefjEATQlWMhRH9QYChafhBhOepUFEYCQRg6hqR5xN6PpHn4a1t0NrqsnO1RvD+h4QfX2XLUTX0GaFyZXFPoQfhJNPlY0GRTg8jGYPBsJdQHAmbm1PRjbhxozY2+lAX+HZbCWA4Xnh8UsJQiWwplVZ56qnDryGluj3eH099qUrhF//UIeO+7HRvM6cnPDqSCE5Cwhnn6IRpW1c5kI6fphOkuZBeRfqPx7BYqajoR6ulRM5JIiZ34nbhZpLae2/HjYioVtUC1ukMTcJHoziLi/fepjhqc6fJ4aDSYPW6Ek75IwO7i0W1Da3W7VOodX24sBu6hgQGXoCUkp4XEES3q8IwAhnuiR4gkpLuwCMIQ4JAYFmgaXIvaqCBISAMia5cxdFfQdg6qayOCCxko4nrdhDpLO0gC+aDC5yTer5ioRj3LYJh1Zuuq89qYuLhC46DOM5wOy5efDjRxYN43jAiVSqp93MQ31eRTVCC+8KFvfd7jHE/mS7++Ej2ckLCGedop2FdSC7mN9A0qZrxNUYJDfuxGRYnJ5VIkPJwE76TnO0f5GD310gKVnojbDgHRnEf0x8lNv0eLBufmVG+jziKc6+ZUzAsGb6TzyaORBzsvxMT+3jiPjwHiQ3YQQAZ28DUNPquR9/1jxU3MZL9/oH7f7t+RLPj4QUhQkhkBFJGiLSOEbp4PQ8nVcbWfLKiB6ZJmC8QuT5mNMC184fey/0Qpw7DxSVEPo82PXnsaIW45L5QGIqK2HcTi5vYDP6o8P2hwXl+ngduuHgnut2huJmYuF3cdLtDcVOpKIF1UMw9yqhVwt1JIjgJCWec/QnTi0to6am9XhpwIbfJplOgtTtgafzzLGSqZO/9dA+F8+dVON511Uyo0eCYs/2pSfRLFxHF4rFnrgeFm5ZRyqDrp3AtA1sPjq00yeX2PCr+0EMTC464ourWLWX8vBvj4yqVsLp654hPvEBH0eGIQLx4xZGag6TTKrqztTtgq1Nnu9klvFcvtwisfo98bZvAtHDyxf0X6buSWtshDHWITCQReT0gZ/v081MEniQyBF2RphA22XXLaLZJ1ahDp4so3X9d//0MjtzZUTtmcm+qh5TDeU/xiI2DvpyHTRgOxcXMzL1N5vdLvQ7b2+r3c+dub0WwuTkc3jk7OzwWj/Io2zsk3JlE4CQknHHixm9yt6aatB0wLI7W1sgUx9i5fJGVVY3R0UfX8+Moly8rkVO/UYMffpuCs7FvFI52a/j//Dv4f/THiLExtGLhtvTGUeE2l9tmqTvGcm+Ui/n1O1aazM+ryNHBdFTcF2d7W/mEguDuZ/K6fnh0w3Hpk9HR4QJ2NM1zJ6NxJgN91+PWjQa+1sG/x3jxXH2X/PUV7G6X2eBjIt2gW66ytXCZDnlkKPD8CNf1kYFE0wTVdB9bG9A59yzUbQI3YMcvUYi2WS88DZkKRblJ3/Me6Fg4mjqsezkVJTLcQ6nDzStNEJV90zcMzcamqURoNntnI/cnRUrVPgCUYM3l7n7/+2VtbejBOlo5GL92HKS8ePHOx9vjaO+QcDynLiH/8//8P9/v7hhfno6bXdyBf/gP/yFPP/00qVSK559/nt/93d99TFubkHA6HNdpODYsVn7px7n4RbWS7ewMw/WPGiHg4oWI6Oo1ttspnIkFRCaN7PaQK6vIMFS5liCAXO629MbRMQjmoENO7yJdl7Wl4I6VJratBIqUwxTT0e7Gy8v33v548Y/PwI8SN+uLjasHic/kj0ZxLEuy2+rTbIWEYYR/F32Tq+8y/97bpNstAtOiW6zgpdIUdraYf+9tUt02AkEUir38lcDUTSJTEvh9etKCchVztIw1N4nxuWdwKucQus7AyKvRCOU7v/6dOJg6lBJqgzzr/QMKz7aJPJ/GrnpzoyMqNTm4sUpvo0UURvtpwoN9hB42cSO/UokHep934/r1PXEjIy6WdtDWh2nXIFDCPo7sPfXU3cXNSVJ9CY+GMxHBee655/i93/u9/b+Nu5x6ffvb3+ZXf/VX+frXv84v/uIv8g/+wT/gl3/5l3n33Xf53Oc+9zg2NyHhVDiu03Ac6tZREZVr15Th8sqVR1NJchStUWO28x7L+YtsODYzbOOutci6HlqxQBBE0HGwkYiZ20va9Zlp+OpX9s9wx/wG7egizugC0U/Oos8c36zm/Hn1XldWhs3/jkZx7tatGJTA2d1V9z9ugYz33XGRmmJRiatW63DErO/6dB0P3wctuIu6kZLxxauYjsOgkEciQNMILJtu2SLdaFLaXKczXSXyTYTYS5NFGpmRHIPdCt2dPn7RIJ/yKOYEWFlkF3AcgqlxjEL+gfw3B1OHm0Lt//F0c3gH12VTm0ZYFiPRJt7vfJdobZ1r/RnQdWR1BPPyBWaerz7U4+9gmufGTh6ZLZDOaLd5Yj4JUaSOKylB1mssbPwJ/vow8jIYn2dj+lW0avVYs/HR7T2Y6gOBGxmkHmN7hyedMyFwDMNg4oRH6X/33/13/Ok//af5D//D/xCAv/pX/yrf+ta3+K3f+i3+9t/+28c+xnVd3ANuwXZcx5eQ8CnjuE7DMfHZZNyE7MoVJQQeVYoA1Nm+6TvMVNusOaNcq4+g97uUsxZjokZNH6et2VzyQ4xjKqPgduF2SU+z1Kmw7GncKZYbV1D1esrUWi4Pozj5/LC78fz8nbf94OiGoz6bmFRqWPJ88LwrToccFThxabjvh5h3KaNPd1rkGjUGuTwIgTzo0xGCbrZCutfGdPsMtKLaNiGRgKZruC8+T/f/aTPoeKT0DnmtS7sDst2lnHHRLj+Hpj/YonkwddgpLiCAgqlKzqWUBLtNnOplNN8j9ye/S9jp0i9OI1JlAi9C39oi6rTIzj0LhYeTgjmY5llzqgy0HNpIhZmfmQUezmsEwXAwZ6q3w8T7/+RQf6bddor6zQCx8S5TP/85ShOTd32+g6k+J7RZ61fRtZDzue1jqwQTHj5nQjZeu3aNqakpzp8/z7/yr/wrLN8lvvzmm2/yMz/zM4eu+8pXvsKbb755x8d8/etfp1gs7l9mD9YzJiR8xpifV+MGQFV/HNfL5WERn+2n/Q5j6SZWOGBLTNDSy7QoYocOaDqL/px6wB0mRx+sNEnPjFAoqq+mu/WqidMfW1vDKMvsrDL67uwoYXKvYc2xOTY2kh4lLgPf3T18fSyGjlZsGbqG0OBevmLD99DCgMAwCaRJU47hyuEMDM/IIKIQU3oEvg4CDEMiUKJsUJ2iMXURkc1guh30rTXWmjm0UpGRN55Gq1Yf2IsVpw7rqWnkbp2S3EGGwwnq6/YFtEvnGVt7dz86sckkQmjotoEYqXDOu/rQpmUfTPPU7Bmc8hRaJs3C7lsPLc3jOENxU61ETC59+1B/psX+BE2tqt6bf430+987cZXgmpxkra8+jIrVHd7hEU5RT1CcusB57bXX+MY3vsE3v/lN/sf/8X9kcXGRH//xH6dzh2/lzc1NxsfHD103Pj7O5ubmHV/jN3/zN2m1WvuXldgJl5DwGWVkRFV9gDJLxpUtD5v4bD+q1SkYfSrpPuNyg005wbYcxRq0yGRA2jZLvdETz+CJxyi028eXaoNa6ONFPP73PxrFudek8bhc/E4+nLuVhB9nNM6mTDKWyb0yM4FpEekGRuATofJIIcN8kgwEUtOJDA0ZamqIpaFhmkpc9TuCIJPDvDSL+eM/hv3/+rM0n/0x9KcuIauqG+Mn8aXoM9O0P/+n0MZHqQ7W9z1fzC0QvPQammWR3b6JVq2w7ZaIpMCJdDQgbw6wRgoPpfnkwTRPd/wCNTGCEBoXq3X02cPNIB+UdnvoW5uehirDyEuExtu186z2qiDgUn4TeyR/ovfmGymuy4v0+6pz9UJui5LVH94hmUf1yDn1FNXP//zP7//+wgsv8NprrzE3N8f//r//7/zr//q//lBew7Zt7HjATELCE0ImMxyU2WioSM7Fiw/3NY5WeI1USvRTEdXOMlvmBCJ1kfNTEUtC4IY6Ow0YuzB9ohk88fyrxcWhz+Yoo6OqF0urpSppNG04UmJrS/19pyqpmLjix/ePn7F1p4qpVEqd+R/y+kjJVMqj2+9gmy2cfOHYF3fyRbrlKoWdLeReI0ERd8KREs3x6RfGCLIpcCVhIAg1iaZHWKmQRhNswyCXMdDLRfS5ItFeBCI2Pn8SW0erBVq1SuFny9jWhX3P1/VmFS3SmBZrSM8nslO0exk2ByUA0ukGk2k1TPNhTMuO0zyD0iRLvTEaXo6XKjf2duknT/Nsb6tScFCRz1QKwrqKvDhGjnd2LiOlIK17XMxtqGPhBJPAd3dht11Fq1bI7NxiasQYltuTzKN6XJx6BOcopVKJy5cvcz2OFx5hYmKCrSOno1tbWyf28CQkPEkYxrClfBDAxx8P+5M8LA5WeNHtMWNuYQmfIg0apfPckgssiJvI3TpNawL3+S+eyFRp28MuwndLVcWjF+Lmf7E/JptVUZyNjbu/Tvz4O90v3oaDU8JhWGUVW/rC1TW83/knjLz1/zCzeIX5t9/lwrtvkqsfyW8BCMHWwmX8dBqr30eLArQoxPBcco0ajp2nMTqL0ASIiCgU+D6EYkDf66Ohk7YtLEtQLA6jXHEfmAcxF8OwWePaj3aQrRZTk+ynDoPSKFGkqTlgZQthmdxqVnBCEy/SEQKmMntq4SFFJ6QzwHVhMZih4eUYtVuk9QN5wU+Q5llaGoqbixeH+06kU+xqY3xv+zxSCkpWjy9Wrw916l3eWxQp79vuLiA05n/iHFMlldqT/f6hVN/jnkd1v404PwucegTnKN1ulxs3bvCv/qv/6rG3v/766/z+7/8+v/Ebv7F/3be+9S1ef/31x7SFCQmfLoRQIifu63Ht2vBs9WFx1Cj8TLPJ1Xd7hDsurVrETavC+YsRy5MvsR5WSd8hWnKU6WklytptlXY7zjCdVz5dPG8YhTkaxZmcvHMUJ94P/f7xt4+MqNff3T08jiCfV6mxVgsKveHQ0GxpgrQ9guz6VGrXSXfb3Hr+FbqVkUPP262McOv5Vxhc0dCdgKxTx9I9WqMTbFafxrEL6KguxqFvggyxcz6mHZIlR71nkk4roRULwFh0PYj/JjbydlfqBM4IthXhr7HfryXuWj0/D8Ks4k3MEd7osZWewdJCxlNNsob7UKMTkZXiplxgt5dlJNfhfH4bQzuwMD+AkLrXwMyPd6qsWp9HNttcKO8ym20ceOyd31unM0yJmqYy+AsxRWgPqwQf5RT1u/Gk9uI5dYHzF//iX+SXfumXmJubY319nf/sP/vP0HWdX/3VXwXgz//5P8/09DRf//rXAfj3//1/n5/8yZ/kv/1v/1t+4Rd+gd/+7d/m7bff5n/+n//n03wbCQlnnulp5TXZ3FTRjvHxh9s/5GCFlz43yzPPRVx9p8PuToSX11mbyHFuWtufSH50WOGdiFNVN2/eOVU1N6fe082b6nnjKE4mo86qt7fV+70TcUWW4wzHLcTEouroWIfhYMmI4MNhOXAKgdkx0Uqj6BkHa2WVscVrdMvV295wtzJC41wZ2Q65VYkgreHkiwzqBSJHQzNCNE0SSkDqlAsG6ZSB09bxfbWtqdRQ4JimWrjvdzTCwanuG+nnERmTGWOZcHEXuVtj8FM/R9TMg+9hVoCRKmtTr9Fcvo7WaZHJSy5kVpH9wUObli0lXKtX2c3MU2yvMDc+2J9Vpm6/fyEVhsPmgKapGvjFRBG8887eJPDpKb4gvk+2voMUd58ELqU69uIo2uTkUGjC3ds7PA4OfrZxRRgDl3BxSfmIPsODP09d4KyurvKrv/qr1Go1RkdH+fKXv8x3vvMdRvfKF5aXl9EOHAhvvPEG/+Af/AP+k//kP+Ev/aW/xKVLl/id3/mdpAdOQsIJKJXUori4qKIbvd6ja8amGxqXXi4irqsF2HSVwKpWlW/mpMM641RVp6PSSJPHVOemUipSE0UqEpPJ3B7FuZvAmZxUXqW1teN9SnfqeiwERM3Ooc6/AgilAC1NPptiI18k39gl3WnhFErHvLogsFN0qyPohopOqNJ1jSgQICQIiUDDsgSuI3BdiWUdnt4dPw7uz39z0MgbTM1Cz0YTEiNrIzNTRFevc+P//R3IpJnjFu73NZojlwinv0Bn9jnMnVVmvR8iNreQDzE6ceUKbGxqFC6OM3/jR2Q2N5HVu4uNu3FwYGaxePg4chz4znfU8WPb8NpPFNC2fuKekRfXHc5jE0IdO8elB+/W3uFRcrQXjxuZpLTg2LEbn8VePKcucH77t3/7rrf/4R/+4W3X/cqv/Aq/8iu/8oi2KCHhs41tq1lN166pQYGPsimgYQx70aytqS//iQkVFfE81avmJF0b4lRV3HfmuFTV+fNKpCwvq0hPHMWxbbVw7e4Oy+eP2044fr4UqFENtZoSWXHlFajtcDyP0A0wRlUhQ8PLsjUoMZpq4QQ+2DZmt4Phe8c+dyxKju5/GQnCuMmfLvcqsySeayBQU8aFGJbCx+vT/Q6bPNivZbk/jh/pXMgrQ5LsdOm2fMKggV1IY0+MIR2XnSWH9dVVzAvzjD4/wVTeQ/A0YnLitoGSDzKH6coVdbzYNpx/vkz1uR/7RGmeXm84RuJo5HJ3F374Q/V7qQQvvXSySeAH51BVKqq55Fkj/my7xRm2O8qneiG/iSbkE9GL59QFTkJCwuNH15UIuHFD+VauXLl93s7DIpWCqcmIsNlh/QqIDsw+m2d9Q6PXU0bPo7OejmN+fpiGOi5VZRh7s6D6auEplQ73ytG0OwscUNtQrysRdTDFEN9Wq6nF8KDAKRZhsG3RM4oUB646MxZKcfQ9DZeQrIiIbBNSKQSHp4YDSLmnbMTRWwSBb6AJia6HaFJHyghDpgk1HcNQIi7u0RMLuvv138T9WiI7hezCjltgPN3C0tqqsR7TCFNwLrON0AqsMMugnGawGZB+7z1mC1fw+nWELtCmpjD+1I9jnFOq9UG8H0tLKvKmaSr1qITDYbFByqafK+BIMByPbMo8VKV0kIMDM48OxLx6dSh8Lly4vTHkcZGXIFD/N7EwXVhQQuws0tr1WenNIFJlhIC81R9W64EyaT+EarezSiJwEhKeYC5cGJbK3rhx+wLwMKYgh6tr2G+9TWm5h+uUWL4yRvhRwMJPnGPVn2J7W6XNjnpfjpJKqUW821Vnz8cVTs7OKrG2uakETrzoW5aKdNxNTI2Oqts3N28XOHHawTsShCkUYLuQp1tdIL/9fbT0FBld3cndm/6d6rZxpqYwx0cwnIAwiogOapm9tJc4Kn2kJAwMhBGg6WAIyOYEOSPNVk0ykumS67S5upkHmcOy1Odyv/6buFnjSqtIPcpRtTsYwkfu1mk1QjA1UmJAiwk0N4Ubmmy1s6TcTcbbV0n11pBhgPR8wqUVwg8/Rv7KL6ONj9+392Nzc9iTZnb2cHQvFhut3oC1nQ6t2g5RpAaQFjMppkfzFLOHzcbr68Mqt4NdvYd+G/X3yy+fbL/FHjZQx+vc3P3t68dFbHiWnSwYOpmoy3Spd3uU9jPeiycROAkJTzhjYyrysbqqzmarVbXYP2jlhZSS3sAnCCP0zU2MP/gDZKdLtVrBK1lEHYe1dRv+zw+Y/wVYC6ZYWlJps3uVN8/MqFRVPOH7aKpKiGEkJhZBMzPqbHt7W0UF7iRwDo5uOK53jmGos/eDYx0MAxAa3oVn0ZxrRKvrGNUypvBwB2B1dglyWbwXXmSiWiDveDS7Lr2BRyQlkQRdE+ho2JZOFEUEoRI6QpPIUCPSdFKmQSGr88xCgeUf9hnc7GAGH2P88CPqwRuIfA4zPQr2/c9/EiNVxNQUg6sDBjmDctgmu/FDwlqdLfccDDqMGausDi6w6c5gax6i3yI9aLDQ+SHINBQLkM0ifJ+o3sD7h7+DdvkiMu4GHG/UXbwfjYaKzvm+Skke54Vq9QZcXa3heiGZlIGua4RhRL3j0HM9Ls9U90VOHJ0ElYKNPzPXhTffVIZj04Qvfene40ykVOnPuMXCzMzDn17+MOh21f9xTGY8z9iCj7y1BCU1DyvmSejF89lzFSUkJNw3udywoqRWg+tv7TzQFORWb8BHS7u8t7jFB4ubbP7eP6O9uUswNobIpJlMt8hloVTV2O5lWfrjFSbGVUonHnJ4L+I0QmwYPUrshWg2lRjJ5ZRYMQz193FdiWNiI/LR0QwwHNvQaBy+XggQleqhae9mpw5BiDt9kc2Xv4Q3NoYQglzGZnokTyFrYxo6KVNH0zQMQyObMjENDYGOoQlMU2KZGpVMlkoux7mpDFWnRve9m9DrYqcN9JlJsCyiZpvw7XfRmsds+D0QmsbO+ddpWyMU64tUlt4lqjeom2MgNPJ0WLYu0d9sUwp3qPXTaAOHeecDNBnSLJwDU6WIhGUhKmWiep3wvfcRlTKh1LnRGcePtL39ddj7AWphVoNiJeWqz/j0gK7jIfcOCBlFhFvbbP/oCtH2LoW0gWnoaEJgGjqFrIXrhaztdIgiyZUrStzELRJicVOvw5/8iRIqxSL8+I/fW9z0+yoqGIbDsvKzJm56PSX8Y3GTSqntnJvXsF57BS2fOxO9eB43SQQnISEBUGezTz0FV65EDD64wdX6KBfniwhNVQXp96i8OHp2bfc7ZGq7dDM5+p0BZTRW3RnG0i0CqeNnTRo7A4xrHYqzRVottchdvnz37TxJqmpqSqUnlpeVIIqjODs7arE7moKKKZXUc9ZqQ0ETUyioKq7d3cM+F8NQi6k2PY31y8onknlPMOilMV58DgZ12j1vGHGIIkxdxzYjtUijEQQRkZQYuoatG0jbJKObhIEGbgTSJ5M26bz1PpFbxs6ZaGmbfpRGmCaGJZDdLUq3vo989aePXbDulm7spccYnLcp/OhbFHtrCNumboxDxifvbdLN5Wh6RaKWQTrTo+KtU+mtUM/P0zQn6eMwxhYmAcIwkJqO7PYgiljsKtUYSh0zntK15/2Ien0GK7t8/LFgt22ilUIco8uHSwdST36H1Hvv4S2vkm50mLEswvFx+p97Hn9PkQohyKQMGp0BP3o/IGWZZDLDcSWgIjpxM8iFBZWyuherq+o4A3U8POh8r0dFv6+irvGJgW2r93wwEqrPTMNXT78Xz2mQCJyEhIR9hIDLlRo3G+s4+TI3uzaTmTob/QrVVIeK1T228kJKydpOB9cLKWQthBDonoceBmipFJ4f0ugOkKZk2ykyka4jwyJbPYNOMyA1NizzvlMp+EGmpyI+/l6H+o5HKQR78rA3qFBQAmcwUOIjjuLEVUedzrBD8VHi7Tg6QTzOshyNMhWLSvT0epDLKZ9IegrEOhgaXJ6pKs9If0DkBmiaYLyco5izaXVd1g2VmvKDCFPXSJk2ucAjs7nKei1LJ8ohNBe9s06rVSNKT2NrAYPQZKmnXNNp3Ufkc+S2P0bufn74ueyJmvDWMuGVK0SNFviH0411e5p2G/IFjXLBxxh9jm3GkGEZGYVsbNk03Sx50WLLzZEzPC50fkCAQb04RyAsJIItJplhRYU6LBMGIUvNMhQhbzqHetjguuD7DP7Zm3y0XWVnkCcja4xXGwSp54gmJwnDiN7NJVpvvwkygEqZgWZjyxBzbY18q0nnx358X+SEoc72hsm5MbmfZgWIwoh3/lmPZiNCGAZfeCNNdeTuEQvfV4Io/tzPn380BvwHxXGUeI+PRctSfqA7pXhPuxfPaZEInISEhENIZ8CUXKWVT1HzbNb6FQahBYM8/cBi2g5vq7zoDXxa/QGZ1HDmzkDT6UeCQbtHYFu4QUAmfYuOdo5Np8Kktoq0cuy4KayWisQ0GiqFlMncOcoSe4MmVnZZcca4+l2dp877t3mD5uaUYTVu/jczMywX17Q7Nw2cmVGLx+bm7T2C0mm1uBycW1UoqOdsNiIyfbWApNw8yAK9nsbsbIpCxt73JRm6tl/1M1WVrF4L8QPJF59JYRoa9Y8a1BdvIJwehp2HyAIZInZ36DQc3AmNvBUSSsGaM0pa88iZA4gMcIdjC+L9FF67TrS8ClEI1SrauVmEZe2bfXee+zN0BlUmrQEj0Q6UJ+n0xthxyhTsPtY4yKZk262QcptMZ9qkxotcb14EoSHREMAYmyAlsu8g8nma9gRuP0AvSibSzeHxJSXh6jpRz+FqmKeWGsfOtBgL1yhs14n+eZPuj/84wdgY1ZsfI7pdOtPTFDMphNcj1AwYH8fY2iLzwfu0RkdxBga72xqaiJiaGoobZ3Gdb3+zid/sY0ifL+beJxWMEd7FR7a7O0xR5vPDUR5ngcFAHdOxsInbMJykNcBp9eI5TRKBk5CQcIi4qqYc1UlnA1Z7I6R0j4avyquu90c5b9QOVV4EYUQUSXRdnRF2ei5bvs5svkhxZwuvMgJC4HkOGWOFPjOsN1OMnpN40wbrGwG6rjM9LdjaUlGcVOr28tuDXVnT1QqZQhanL9i43mBi958eqsxJp4emYcdRUZx4+KaUKuISV4wdTN+k0imQVbrd289uR0eV+NndHUaZLAuiWo3md65R7Xwf6fkY2hih9XlccxSerir/Tfp2s4cQAkM30DUYK5vIKKJ25QbeIMQoj2B4GrYfIISBnivSbbi4PY98Viete2hEqBaAYISD/YqYeD+F7Q50ukqNpfPQ7SFv3EReuog2M0VzuU3nwxVSz5YplgyEZbLezuNgUjD79IIUbS2DVfUwulAIHC7/uRdpBD+G+O0/pNlPUcr0KGptrLCP7Dtgmfh2nsbUFxCBz3zzXaRR2m/QF+7Wkd0e19OfZye3gCk7FP1FQh3qhTLZ+i7yne8TvvQS1s4OfrmMF4SEUsPXCkRBC83SCUslzO0t+it9GqKC6wUsLAgmRpXyrH20wTv/1xaR61PIS74wuopw03es4oqiwz6wubl7V/Y9LlxXpdfibdN1lWa7355Hj4OHUXn5sDiDuychIeE0ESNVtOkpwsUlUjMpzuc3udmZoGz22HILjLQXuTnxEk+Xq/tVCoauoWmCMIzougEbtQ5BJNlauEyq0yZT38XNFogEmF6DQn+XrdQFVq2nsIMdBsLm/SsZBr7JhXmLrS3VIfZg9cvRrqxCCGZpcC2cpGPPU27+EHHEG3Thgqp+WVpSEZuZGZVBib04Tz11fLWYXnoe//wzuG71kMjKZNTPdnsocMLVNcK3P1Rf6BN5RMom0xPIzTa9d2qEF+bvUXV24PfdGu52myAzjh9ZBKEGSHQRkc0JtnOjhF5I6IK0BZYICKWGRFLqrqBdmIZKmeD/+L/UfqqUCDc2IaO8OpFp0etE5NbW0Z++zE5mgcZuwDm9w+TTRdyPp+ld86lns6R0n4LRw5UmG06ZfLjNpYsgLl2ifkMjfPUNSt97E9HrUQ2vE5kmIptF2DarxefRP/8Cc5Muxg+cw96PsTGuuHNs2POEbpcobCGFo6alawI3l8fc3qK5vEo4cJHFEl5gcLM7jhBQ0lUq1DQMdrwxWnWJkw2YnPVYmFZicvFmxLU/aBG5PucmXC7ktwHtjlVcB+dIGYY6bh5F48v7xfPU/8HBDtULC2crXXaQszbzKhE4CWees3RG8CQgNA3j1VdUF9TVdbRqmQuZVW40R6i0dqibo+TPzXP9htgf2plNmRQzKbYaXRpdhyCSaAKc6ihLL7zC2M2r5Bs1DKcLhkG7XMW9NIXMlOl3IV8a4HmSj66lEAJmJw1qtzpc+bbHU5eV6DrYcVcIQSgF24MiM5ldVvsjLKef4eLax4e8QYahtm8wGDbw03UlcqIIujfWMX7/9l4tY1s/YrnusGx8gUuvHe4OGEeFYCi6GAhVapv2kEA6JxCFHINO/b5a4UtnQBhEBFYKNzTohSl8qZPWHVKaj1ecwNj1iLoDGroNIiIlB8jdOsUyGK++DPXG/n6SvkdfpnCNESo02BTT9LM2C+0PcDsRjpbBlB4ZbYCmF1mffZ3u0lUKnTW8TJm6yBIGkqyzw2iuz8iXn+H6TQ0pQZtfwBrJce7G76NtTO2XrN0svIR+6SLjT1fJVkHO/9Kh/98b1yPWlhZxwxAt8jBFRChSWMIhiiQDYZD1PBzXpxeC2wrYNacpGJLJTIOUsOkPfBabI2gM8DWdy5dhZqxKIZPi7behue4Qtbu8OLZOJR/S9tPkDWfPhzWs4op2aqw4o/ujLo7OkTotjgqbs+gDOspZnHmVCJyEM82jOCNIBNO9OVp54Ts1RsJ1lvMX6U2MsusYZNc69N0UF+YtikXB9Eie5e0WfhipbhsSAinplEfovFQl3Wlh+B6BaeHki+TSFoHbod+xaHXBTveJ3IhrP+hjf/8WqcY6fc/k6luShXmJNjOjjoGUCqkMQouun8YJbdKGRz8y2G4VmDnSlXVubm+u0YZavOIozvZ2RLi4zPljerXYsynk4gD3wxvIL1ZuMzC3WirtZXeU6CqVZ2ghWHMqeJHB+dw2phbhpwqEq++duBW+SKcQhk7oRzjYBJGOEJKsMSBCQ2o6dsbALKao9S2yUYei1UAbH8P62ZfQZ6YJl1b295OQERvmPDIyqegN+mRAk2iRz1q/QiNIM26tM3NOIwjAL4zSP28Tra2T6W6RCTtsMUGxkufZn5+mnZ1CdlWKbnQURmdHyb7xL+7/P231shiihGlp+xVHB70f6+uwtNvFB/TAIWV0yegDMsJVolNKDN8j1A26xQrbhWm8pomb9SmwQafZoiclu8YlMt4Ac2aMF392klzGJggEf/zHexVtkc9rxvewcqNc60yBBCvrk9L3ZnHYNoNaj8WroI3efY7U48T3lWfsoLBZWLh3Kftpc1x0FTj1mVeJwEk4szyKM4KzFkI9y8SVF+2lVdbev44XRKSmxhlJ56nXoO+43FhRnc8mxyzyJdXPxdAEfigP9+UV4rZBk92BjyZ8DDuH9G1CzyLnbWMtbXOLFDOVInrexvd8tq6vMbryXQh82BuJkDVcEBBGGgW7T7+XpiXKTJgp9MMvTbmsDMxbW6rXja5D0HYIdxv4lXFSQlBzc6R1j4zhIYSgWBG0ajXaSw2KC8P64GpVCZydHZiWasxBoRLQcmDDqVAy1dhxSwvwDZOBr2GfsBW+GKkiKhWi9QGOXcWXOqYWkjMG9AMLtx9ilgvkXz1Pe9FDMzwys2CP5NFn9vrM7HmoGLiE6SxkQkS3RyNdBAHFsIanpRlgg+OQni2gj1W5fkP1iSnNFAgncnS2ZujVHQpWyPmnLcyZEks3lLAb3RMFSsQoAdPrQdtR72NhQdJ1Dpuq63XB1avgWTZh2mKsvYJn2FT0HSIJ/p45KtXt0BqdoG2N0h4tMd6/xvndd0n7HaQPG+Z5tGgJ0/YZf2MMoWm02/D22+q1czl4+Tmf1lKKm/UqwoaU4Q3FDbDdSdOU5zAti3L57oNYHwdBoIRNPFdMCGUePqsjII5yXHS1F6QomM6pzrxKBE7CmeToGQEIAqlhZrQHPiM4iyHUs060tk779/+Y/NISVhggLQtveobs059nyZ6kP3C5tqga1i1vBCAlhqbhxy1f74GUoNk9kILQNciubmK7dWrZeZYdizHZxrZsWpUFUvUPyXptwt2aOkt0BiwEV7kZzLEhC0z2P2Jr5AUWW1Wenjr8OuPjSuA0GqoR4MwMeJt9dtwiepTnElvUXVU3fqmgBk2O5hxajRRrqyH57M5+xM8cqQIa/T6IihITtq9ETSjVsRhEGpYe0HU0BnqO0pFW+AejiPQrkEkDGkLT0C5fQqstEzoenmZj6x522KW15eAb06SnJ8jmdFLFNH6URhRh5MCacdBDtV19Dq0iGe/fYN2ZACug0r3FYv4l6h2LsWyPhZ98Cj9QERzHURe930FbX4VGSFrWmFj7AR9950XE+fMwfg7QDnUaDsPhTKfRyQEfL++Vxe+NUrBkhuZ2ASKTIAoIJ9P4fZ3Zxg8Z5AoEhonhe6S6Hbx0muWpFxj00pBLI2YMMjstogFspC8RaTp5rUaWDo1//hbvd0fYDWYpZC0unje5fBnW16rUC88gt3YYL/YpWnuVZVJwszNOVK+jjVdZeCFP6hSNxEGgzMPxkFchVMQx9SmbnCCdAZHr0ypWqXeK+wPX4rTgac28SgROwpnkuDOCW3sNw9KGx1jZQ9zHGcFRweRLA11E92xe9yQTrq7R/+3/D/biEpqUe6ZLgb6zTbSySvZLP0E9OEcYtfj2933yJZ+BZ2AVnRMbNCUQRhJhdWAXzE6bIJ0irzdpySlW+hbn5BpQYCt7kXNhH6PbJfyTN9V3qK4zIpbZsubYHJkg9/x5XKGxvX37dOfJSZWmWl5Wi4iZtgh0i8jt4kU6ph7ihzqD0CClB2jeAEKT8P2PGHz/w0P9Y+TElxGVqiq73hMTsrhASlNzqLpBCkt4SCfCnZs91Ar/aBQx8N9AK+QIn8qrqFm1inbRJLo2IHJAG/QxRZduZZrQniHKZHAc5ceIz/gP9vQ56KHqbPYR+Rz2/BRi3UJrN5GBJEjlEaUSmZcuYM2NcOWKGmWRz4NsNuh8fJPdQZaSMeCi9x6tXYcgvM72UsDU3CJjP/0Muj7ssHj9+t52lAbc3Dw8SsFxJFevCgLfoVQICfUuTjaP9sUJeh/XSdV2sYI2kW7QGp3g1sTnaZnqw8uWO5SWNtnJnWNn7Bwiiihr26pKS+bZ7szSveIymPEYm+0wOlniypUUUiqhOOddQd9qIatlWlqZ7U4W2amTzmqc/9kZ9PTp/K+HoRI28SgJIVSDvrNStXU/dLuwulXA4yKik0LYgICpdH34PXBKM68SgZNwJoknHMd+Cw1J3nTo+GmcwOJWNIvs56muhkyODCtt7vh8RwRT28vQcHMgYDzVJH9KIdSziowi/G/9AdHNRYQEmckgdR0ZBMhen/TaKmM/eJvaGwXajSxYXRp1G8MWeLU82XIPTY9O/npAyd5GCwN8PYOt+eREl+1gkiUHLulryNBkcTfH+fY1xGCgVgVTJ5cKaJgTBBJsK8JFpVrK5cOmzGJRCRzHUWfMM8/kGbwv2dm00FKjzGe3udUdZ6U/ysXcOtHKGhUnRU1oNEemqIx5+xG/wqZP68WfojNZJbcnJmStTiqbR2o6na4g015FWJcYLJzfF83HRRHFuoVsNPF/99vw1a8A0xilArKSQ3ZCjFIa+9lRatsG7Ep0t8f2dhrb1vanox8VlPrMNIOf/tOIf7aMvbXMipeCtM258zrbE/8CzWCG8dk055/TGAzUghsE0GlHhNe28FzIpzxKjVtknHUWs8/jaAVGe6tESxrpP7pJmPvT6DPT+31ZsllJa3C42WMQwNZKGhnqIDycsI+hC6ZmXRqDMjtfeA2zNfRm1eUEga8+tFy1Q6bTxG8atDNzhIZJSnQJtBTgcjN8kcgGu9/j8/NbNPQcb3/fZXbMxrIEC29UieZ+Bv+7b3PjlkEYRKA7TC9YlH7sc6cSrQ1DVdEXD239tAobz1NVZ667d0U+jzZSobxzhUo1i6adjZlXicBJOJMc9BGQSSMETKSbTKSbDEKT9WYGX9dp+2k6V9UXxciIGqR4XPTgqGAqmH0aXg4kbDklNqM8lrPFfGeAPXb74580ou0dgg8/QtM1wlQGKTT6WomUqBNlspjdNsWVW9itJtkSBAMLB/A9AyEkvUaWdMHBsIJ7vdQ+vmmxIBZZCT+Hr5uM2A0G5Gj5BVabXS43fki/L7mZfZEL4+tErkfkuJiWztxFgxsNqL9zi5lfHmVzS9tv8HeQuPnfjRvw1FMa9tPnCRvLBNurMNEHGSLdAL++gdbvU8oIGqNVdtAoiw20vYhfcWWDxtXrbI+VKV5ShuziH12jtdHH7QowJSOzVbTUBdxsAbiLEdM00a0cUaeL9913kc9Mout76SpTA+Gjffw+3noa3U8hNx3a9iTluQKZ6cId0xlrO5ZqwCe2WJKTCCGw8xZOdgT8LKm0EoA3byrhZ9tgDLr0uk361iiF/jqm3+OD/JdJiz4C0NM28/33kbtlgrfeoZ2ZxHE05XMa8VldHDZ7jCJYvWXT7+qYZkQ6FdJxAuZnQ9q9gIEXAoJgz5vVb2YJA/Vc2UoHIaDfyOJEGQJNLVUDmcMJsrTkOBKwdYfPdf8ZO80/g5NO4wceuaLPzKRy5bqVaZafnUSb6aD5Hpcugj72+IsKokhFDuNqLSHUpPS47cCngTBUHrZ4AntMoQDj4xrkZ/F/90MVmayW9/seRbXGqc28SgROwpnkoI9ASx9YDABb85jr30I/P4/7dJ6NzeGcoZ0d9eUxNXUkbH9EMFlayKX8hpoy7RZpdXRcLcPN3QJaoNIb5fIpvPEzgtzYVLOEikV0KegGFjfNy1hRm9noY7DT2P0emWaNQamEnfFAC5HtDGGo4Q/UWbiV8bAz7j1eTeHki/TKVeZ2f8CK/Soeac7ZK9ySM9TbBTb9MmWtRpTNsK7Pk8n0qKUrLHR+gLG5ybmZNMs1nY2rHTITRRxHHQ8HZ0odbP43GMC5F6t4vqDxkc1y/Sbj8mM2mGar9DRTkYc+Po4IBFtOiYLhMJVpqDEU1SLR5ib+tSWiYg5tapLRX56k/36Hbi0iW9UoPZ9He1PbX9SORhEHoYkmVJRLCNCqZZzVbfzxLmFYQNNAC130zVUCtvGNZzBsk4zWp9Nu4lzvEI37VF+4/aw4XF3D/a7qzdOpzKOJMqPssnZzwOatZSpf0JifL9PtqrNxTVNn405b0vUyFNJd7EEbz86hi5BVzjHDMhNiC12GkM8zWN1i81oHUSxy6RK0+8Nmj1LC6i2LbstANyOyedV5OVfp0OwKHO+w8O3Wckgp9sWNui6PKXpEmoEeBUSaSU8WcCPVSqCs7zAdXGHVuEyjY5HLaJRH++T2/vH350gJjbGLxVOZIxVFypvkOMPrZmeHDSbPOlKqaOju7uF+TbatOjwfqu46gzOvEoGTcCY5rhfLcWcEdlGjULz9HzFu2mWa6h/RvoNgEgLG7CYjO+v4c5fYLOaRUp2pbG2pl5yZOdv9Jx4FkuH+sQ2dIPIIIo2uPoFDmnPRR9goY61q8CfRrZBspUuvnkMTEYOuCi2Enk6m1L/3iwrB5sJlUt02l7rfZ1V8noFhM+d/zKpTZTX7DJb7Puh5wEGieprcyj7Phfb72NKlIJv0PW//y7hWU0L1YMfX8+dVBGdpSUV4UhMVorAE2WmK4212twt4Axfe/ghSNpNRnXWnzGq/ylSmgWy3iVbW0JoVfM+nf72GOTOB/eoriOI0bh9EkX3zahzGPxpFXOmNoGt7AgfAtvF8H6/v46EiPvR7mEGPcGIUr51GJ8SwBamCgdfxiK5eJ/ulMjA8M5ZRxO6ffIB0XMpTNi0vhwDKhZBNzhFt+6RWrmKZX2RxUfmVhFDHuGfooGsYQZdSsMuWuUAKBx2fPllyYQup6ZDOsFSvYHge83NKIB1s9ri1mqbdMNF0STYboetQqjqs1yIGnhz2EZLQq6v/OaFJcpUuUSTo1dW4bj+bZpDNkek02dLGIQIj8pjRrmHqLrXBBP18gZ5lMz3aBQEy0vj44+HnfeHC4///lVIJm/6Bw35m5uxNIb8T3a76Dj0oaoRQ36V3ew9nbeZVInASziz3MwU3LlmtVtVZ09aWKuX1/XiCsEZq7g1GdppwB8GUe/0FnprR9qNB9bpanG7cUM8/OqpSYE8C2uQ4IpeFbg+tXCJr63zO+wHX5AWa2gjXjC/QKoziVJQJJP4e1DRJrtqhV89jZwcMOmnsnEO3lldpB+Rt/XAO5hS7lRGWX/wil7aWmKtfYaU/g+fDfHiDpdwYi/oL5GUbU5SZZB2NiFDTWRWzzPX7jKcclmyLwUAdC7VanI4avjfT3P/oabfVwjMYaOy2i6QpMnoBdm60qOujjAxcchkNS/NxQgunOcC8cQPZ71PRYbfyFK2URXnxlhLjz/0ZLEuFCnx/rxxdFZfdFkUEVeIOoAkJrotvpPE1C28Aoeuj+S75nMSJ0vQCm5zp0PazmJokn3OJajX1Jg/4xuRuje21QA3fNLo0vBy6kOy6eWpukXy+w1TrQ5qLF3GcKrquts91oTHIUCwa5Gs1to1pqtEGa/oCKVyeke+rOVPlEje8WdAHVEf1ff9I3Ozx6lVJv20iBGSyIYYpGZ10afeD/WGnoF6zW1PRFt2IyJR6hIFGv5ndO5YiokijMT5Nr5cj3WyjEfFC5//B0XM0zAnIC9y5Eulyh94gTd4os7Wu1MxpzJGKT67iCeSgosmFwuPdjgfhNl8N9079H8dZmnmVCJyEM82DnBFomqqYmZxUi8zGhjqTGmRHWf7cLxJdvUa+sUQ12kK3jNsEkxAqRTU2pv7ZV1bUIrW9rS6WpRbFs95865OgjY1iPPsMwdvvQruDlklTtiTP9n7IrWCebfMcKyMv0nWmSaVaaMbwVE8IZRB12mmEcHB7NmbKI1iSXNp9i2JTmYkj3aBbrrK1cJluZQSBEkrdcpX65XnSvQ5aY0CnniGzfJO5bJ0bQZFtf5wxucu2GGeMLdX9Vs/RaDtUn53kwufz3Lip1n3LUl/cR1NVc3Nw9apqPPf006ost1bbS1udg91CnmZxgUrtLbT0FGOpNiv9Crc2TC66HlLTKZQ1aukUDZGmOuMRra4TXbtO5gUVUel0lJDq99U2WEeiiMCBXkERUa1BMP55AjuL04DAl1iRRzYV4AQpBqGJroX40iAvOxS0JvagRdTvc/C/IewNkH6IVjRZc6o0vCxls8+ukyOUGoWUT6rb5dZ6RN0bnqX7PhQKGro1hXC3yPZq3GKenOYwF97E6tUQtsXu6DPIdg99YpSxy6UDn7tAeAU6TRffDymVI0wbSqMD+q6PaejYho4fBofEjWEFpAsOgWvgdNJ7zyWJIo0w0Oj2CozQJBX1+Fz3j1hPPUWIrvxF4RKOPU4vytLayVGZzAHisc+RklIdS53O8Lqz0hX5btzdV3P6jQ8/KYnASTjzfJIzAtNUCxaoPPiaUSWolOm3n6HneWi2xej5PJXq8YLJtlWHUynVAri7qxarmzfV7SMjKlJwFubWPEyEpmH+7L+AbDQJV9f2Y+2ZSDIffIyd9/lg5iJOz8JxyuSqXezsYa9NuuDse3G0uktxY42el0VPldDMCCPwKexske62ufX8KzjVkf0Fvzvw0f2ISsFkYS7P9mCEaGuHyYJJUNPYikaYYoOGKFJw1pC6wW5+gfIXPodtaUxMqGngcbXK0VSVpqnFJ27YNzPDvmcnm4VMVqN7+RKDH90ktbrOdDlg2c2x289xIQzRMmmiqVm2ByWqVme/mVmmsYLjPU1gFQ8JHMcB2z6cdpWpKpFpIX0fum20ao7w8jMEPQ3fh1DqaFqEHTk0w71IR+ARugP6bhc5WKHoLRL8M4kwjH2Bvt3PgaEzJrfZZgI3tHB0HyewSekek9oWdW2Ujpfarz70PBV1KJVg8lKBDb6AJq6R3WxgdnvMhB9CqcigMk2rbSDSKZ766dlDJxo7O7C+YlPKagQM0EwPO+cSIank01iWznajdygFZaY8UjkXt2/h9W0ltqQADXzHxPd0Kq1VyuEmhbEGH5R+Ck2GSKFTtdawmi7FG6vsXFjgqXN5Clnrsc6RklKdQB0UCBMTaj+eVe7LV/MpJxE4CU8M6TR7zck02u0iGxt75uRddblbjjkO1Y6MqMVgZUWd8e7uqothKPPgp6Xz6EnQZ6ax/sU/h//d7xHdXEQOXIyUjT4xTXt0BsvysHYcBr0Ura0iqbxDfqRzaHExUz6aHlC4voLhuexmZhiIAnnqZK0O3bJFrlFjbPEaq6MjCKFRbNX53PIa6fouehgiLBMjXWVVS5N360zkBet9g61gnHHnFpbIkZqdwnjpWZbcKk9JtcDs7Kgz1HxenVkfTVVNTiqBU6up69Np9cXf68GlS3C9X2Xrxa9wfvNPEGvrpHs79CKdXuUcxdkiYbaA7kW0g4zqo2Pb5MMGA9+j11fiOptVzQX7fbVNB9Ou8qZD1O0jIxu9Wsb86lcIeqNEHRUxjDQdI20RdvoMbAMR+WT7q7jSJtBTCF0jXzIIt3fgd4eT1DtaEa1aob2zRDtvU7G6uKGBFxlMpOpkWutsVF6jK7PIvX6MjqMEYCaj9lt1rsCq/gVS47u81v8WemOaUBis+eNo41Uu/tQsxuywm2KrBe+9p9JPxbzJ2JhBqWqTSqtOxmEYcWW1hudH++LGzrpYaQ+nnSbwjD1xo7w4TiuNBExvwAXn+5DW2I1m6OpF8lqDcX0FKQWr2aehK5g04NlL6ccmLGKfXrM5vG58/GwXJjyor+bTTCJwEp5ICgV1iSMztZr6fXVV3W5Ze+bkYwSLZSnjYnwmtLOjFqTFRXV7pTJsZf9p57gUYXqkyqVGl961TVKzfTqNiOZOmkEnTejr5Ec6GNawk3HOaXC++0NW7KdJiz7NaIxelKek16jqGwxyefKNXcxGE0OGTL/3LoYRYU6Ow1636VRtkwktx1Z2gYrTxDMFO1qFev4ZxEyG3PNVMFU04epVJVguXlQzqDodJUCDQInRuHcMqEVpa0sJ1pkZJW52dtSxIQRQqaK98kuYzRrnftTkyndqrBZfplzYwiagYPbZdQvU3Tzjcp2sFbBrWfT76jniFEW7rbwYB/ep+b0OwvEx2mmM0bQatXBF3ScIIIo0jNEyopGi2wYZuGi+i21p5Pw2mmmgz5+DfH6/UaVfnQShkf7cBfrf7tCuh4wUXAKRQw8HjDavsps5R2v8KRAamhiWLoNa6Ho9JQbLZY2nXxojN/mryN0aV66CaVpMXs6TKg8jN/0+/OhHStzE6d2JCUGppEIBUko+Wtql3Q0YtPKAJJUbYNg+3ZoyGMtIgJAgVLm40CI0AVOVW/irNi0xQjsaJa81sISLL01q4RRClxSCPl+46FMqPZ5/uK0tJVpjxsbOrjfvYflqPq0kAifhieZgZEYNYFRno/E0X1Bf+hMThytx4sfGxmbfV+LIdZXoqddVGmR29tPXxOsox6UIM7ZFIWuTsgy8YshqqklzO8ugb9FYr5Atd8kUVW2s4XsYoceotcqOPEdB7LIWXmAgM/Rlnln9Y9Jhh7QMGFm8jun0aU5PYhgmtqbtD+zLra4TlgMaL7/OlBtAI0Otn6Zva2xsKoHiukqUbmyoCM25c6r/SNwKf3dXRVLiz7JcVgtWv68EbTarzsrbbTULaGkJVlY1zp8fZfYnq1y7ElJvtohG90YQ6ErItb0Uo+29ZmaFPFFvbz/t9TnpHykiE5qGKBYRWRAhiCMZ0ihSFy2TIZp+Eed7HmLQxNHypCKXfNrDL81AvrZfYh6trbH6QQsZauTTAa3zT5FZb9JuhniBx7jYojiuc3Pq8wyMAnLP7BunpsbG1P4RQgmzTCY26WqsuqOIEXVd6UCUwvPghz9UP6NI7e/R0cMpmt7AZ6fh0Wtk0DWXfKmH1P19D04U6GhGSBgIBp0Mmh6hmxGliRqtjREMrUM3KlMydshoyqheC6cQQIUWlws1SqOP3uyyva3+r2Pi742zxmfdV3M/JAInIWEPXR+akz1v2PW22x22oi+X1Rf4UY+zaaqpv6DO7ra31Rf+0pK6rlRSXy6fljOme01cN3QNfa8s2DYNDCukMN5E7OYZdNN0a3n8gUl+tENgWkS6gRH4jFqrtEWFCW6xFc4hQ51QPstz1ttYQUC+VUcfqeBF0Oz5WLJI2e7te1xKteuIL7xMU4wyWYFwVS06uq7OVGdmlJhptVRqKpdTl25XfUa+f3uqKhZBN26ox3e7KooTL9LenhHXMDVKl8Zp/NChvtKjOqpRMZqs+zl6XQetutfMrKfti9p4lw3uMILn4NTog39LOSzdDnIVZN7B9tp41hjCCOmlA2yrC9TUA2ybaHWdwZvfR7oDNsOQHTnJaDWiNzFCsNtnNGiwVkuxtbuFl/XRJidwjfx+eqLZVIvg5qYSMq++qq5vtVRUJ+66GxOGStzEJur5efX/cbTfTLMZUd+xMHWNYtVFCsHWeixuNDQjxB8Y+AMLTY9I5VzShT69Zg43tEmlm4z3bpEyHdXgT6rWBNP2FmPdbbLnn3qkHXLjNHRMtXrYsH4WeJJ8NfdDInASEo7BslSlDagv8LU19YUeD2yMQ/HH5dzLZXUJAvU4x1GLR7P56DuY3kuYnISTTFyPS4LrHQfL0PYX5OJYBzvr0tnN4/ZSBL5BWNXplqsUdrboli0KWh1LOOgiZCucpe/n+H7mTzGdCkkhidJpTKDlGQRkqft5LuQ29wf2jWZ6hEaFTkeJ0ShSKUZNRKxfdxgv9Alti1WZ5+IljZkZ+Pjj4dwfKQ+nqjIZ9suXTVMJolZLfV6Tk2rB39pSUbz5z5dp9XRWd6HS+Q4Fr0kqzNMtzuD91HOkZibJ7DV2i6JhgzfXvW03Dz+vnoP0XaLtEGQV19X2FnEVUXJdELqOaQg0XeDpaaTsYmvBUBjt1mjtekjZIDeWpi2qiJ7FznoH6Swxmu5SOWdwxZ/F7WSh1cbrDRhMXMAYyTA2pt7z4qJawF94QYnGuAoRODRcU0qVlmq31fbNzytBeXQqd6MBuzs6moCxCY9G12B700YXIZ6no5shTju93wenMNImkhr9dhq3m8LKuPhTeXJLbbrdAprtY1mSGX2NbKtDeryK8eorj6TPSq2mhG5MpXL7fLPT5kn01dwPicBJSLgHmYwynYJaBDY3hybDrS31hTIzc3t3UsMYiqSDj1teVtcVi2pBeFjfzScRJid5jpNMXBdCMD2ap+d6tHsukZQYuiCSEjvrYdh12jsFvL5Nc7PCleqXeLH7/yPXqDHI5UkbEdPhdbKDJmv2U9RzU8gGTJjvkfE8tJRNWvTpIokiwbXOJBe0G/sD+6bH4v5GatEJ2l12rjiU/S000SJnOGRGc1ytX+KZN6pcuKAiNDAUOAdTVXHzv1u31GfZbu+ZbfcCA82mEjijo6AVCnQLL2I+N4PlDqisFdjq52lYGoW9z7XbVRfbVp9v3PvlIFGthv/RLYIdSUgL99pV3MxLOFwicDLoQieT1ul0NaRuYGVNtE4L3VLhIUtXii2KIsLFJXasZxAT43h6RN0pUMn06LT6DPyQanqHFXmZHbdMZBiIrEG/A4X2GuVLF6jV1DTxclktjKOjaj/F++zcucOpjffeUxGDfl9FLjMZtd8OsrOjRELKMji/AJu1CK+bRRcebigwzYBuPQ96gKZBabLGoJsh8AwC1ySVd7DSHq6V5+rc61S21pgf3KDodjFsm/Szl8j92JceeofcRkP9X8ectejrk+6ruR8SgZOQcB8Ui+oSL5L1+rBrKagFbWrqdnNy/LggUGfEvZ4SPa3WnQXS/XBSYXI37jgr6Q4T14vZFJdnqlxfrdHsDoikRNc0DCEItYixmQ7NHZ9eM0c9muDt6V/gqeZ3qDQ30cIOkWEiR1I0JlI4oU7bTfO+9SXON79PaTRECMGF3Cbr3gReqHNtu8Tli8X9dMT8vDIUZ70a1Y2ruG6Jmj4CqRS2XqO52afYfpdr+he4/KURRkbUZxaGStgcTFWZ5rBnjpQqVdPpqAW6WFSCp91W11cqUK9r7MhRJucgJ2HrxrAHSj6vPtPYaGzbKpLj+8OOuuHqGuHbHxL0JcKeQM8UkIHAuXqLgWYTpqawRQAf7NBOXUKSgZFRTK+N2a1h5iyqRhPZ7xOtbRL5AWJ6DCE0eoGJFxq0PAvP06mYDfLtZa63XsHTdCQCJ7LR7Ai34+C3+qSLOWo1JVS++EW1jQfTsgcjjh98oMRLv68EvGWpz+IgGxvq2Aa4dEmwuFxg0O3iuhEpyyKKfHY38+hWgK5HFMebOJ0MbjeF0CJSeQc7O8DtpTB0QWF2gs//wjmy/c+hex6ZYg5tdOShRm6aTXUSElMsKlF7FgRD4qt5MBKBk5DwAMSdjUdHD3/5uO7QnJzPqy/Ig18+cTk5qPvHpeqxQMplI8aNGpp38hTTUWECaqbPnYTJHZ/nyKwkJ7CIEGQNd98Dc3TiejGb4vLsCK2+S9/1kRIsQ2PghQRhSGl0QDrn09jK0wuq/GDs5xk5t0bJ7iLsFE6hSM7WCWsOwtOpezPIgcvI5jIXR3YxBMyJJZbrKdxUiZsTL/E0GvGac+lixAffvkbB28Wr5Agdn7pXAEtjprJLq96n9NENds5XGB3TqNXUZxCLjVptGKWJBdPamgrxN5tqIY9TWRsbakG5cEEJ26UllcIaG1Niqd0eemdAHRdSDgVOvxeR92pEvT7Bn/xzpJOD8iTCN9HcFv76JkE0gqeniKTAsiKC3SZdGmhlDd9Io41OY9ZXyfXXSIW3kJaFNjnOdlRFFPLYus/1zrjy74QdiCIqZosf8DrbgwJZO8LUQ4LIJmv2GQ036TqzbHbUfnj5ZRVxioWgEIfTTjdvKhHQ7SpxY9tw+fLh42h5eWiqvnxZ7afAtalkBF3dodEKGXSy5PIhqawkXwjp90oEfZNc3iWTBjOlE/klJsZtnj6fZnba2hPcD99M3GoN03CgPuPJydMXNomv5pOTCJyEhE+IrquozdSUigCsrytTaaczPKs/rnQ8LlUPw71mYUs1mm9eo16rQxgymapTmC3dM8V0UJiA4HpnEgRMpetkDfdYYXLs8xyZldQNUjS9LAiYzexi73lgpHPYMZtLW0yU82w1ukRS4gUhmiYgFCAhk43ILHRobufot20ag3nMYsT0OZew6zAYBEwYW6S0FlvjRdbCBbx+FtHfprjxI3TLYO5ike2FF+mlq1y9qlKGuq7e+/n297mRf5oxu4MbGgSRTivIYDgVpnMOvY0G228vkflcnosXKly7rmGaar/v7KgzdcNQC3uhMBSqxaL6LHd398y+gbq+UFD37fXUdZnM8HGtlkppCDFMPeZyUF9u0/o/38FqfEzU7hKtryMrryJTLr7IU29GDDxBkC8QYCMjiU6IXxwhqIHd2oUBuI5DmgG25qOXyhhffAkxNkrn77+D8Hw6MoMbWqR0l0CzKcomxcEmXfMZfGyk5tIOUqR1D0t6tCjhhyalkhI4pZISY7Gp9qB42dxU4r3bVRHHdPp2cXPjhhKPQqjP6No1tTg3m1ApWbgbJlk9JD8iuXBRQlhkpxZiuHD5BSiVJM0W6JogZRlcuCAe2Rypdlt9vvvHcU4Jh9MWNomv5uGRCJyHyMMweCZ8ujkYru/11BdVFA1Lx+Mz4oNltLoOk6wx8sE/pd2K2M5eQJomm16OjatdsuvvMvtLYM0dL3IOChM1HNPHDUzW+6o5x4jZoODfLkyOcnRWUtVu0w8sXCdkqZdGhBZzeoNUOnX4cQf8OAM3IJc2QQg8P6Tdc5FAytTRp3pk8gGDRhGna7N802RU32Lm+o/INWroYUDZyFMovcDG2IvU0/O8k3mRL73sY01WmdW0/R4k164p06twBuD7XBjd5WZvmrFUh103TxgaNB0LMbAYa91k8D2HGx/aXFoImHj+i2xGk4Sh2vcHU1WTk2rh291VC0qjoUTQ008PozaXL6uozeamag0QG2xj306ppEY/pNPqs0/1tomur9GIeoyN59EsC7m5iXQcou0dnGyVtCvx0iV8YRJJjVAKdEKkFCAEolND8zRIp9HSefJWnajZJPjOW8if/jm0aoVwa5v19DS6FpHWfRAGlXSf73depmeXKaYcNKkWzkDqWE4TWa7gWil0HV588XDl3/nzw8W+XlepqX5fHb/ZrNoPB8XAlSvDCNbFiyoaBmpfVqvq73RakEkbfO5z6jnX1yMMz2Uh51C0dDqDEtmURi53u6fnYdHpDIfxgnovMzOnK2wSX82jIRE4D4mHYfBM+GwRLwKgzmC3ttQCsLmpLpq2dyacGqaYCuemKIo6kRRsDYp07Aq93TpX/2AV4/VJpqa02wb3HRUm5zK7hFKw7lQYBBY73Qzb0UVGelkm5Z2/LMXRieudDtNrV/DaDsvaeULfZ7FcJX9LsnAkGhX7cdZ2OrT6A6JQdbCdnyhRKaRJWyaO51NvO9RaXTaW0rjbLs2NiHGRojo1SmRZlDyPieYPWIwG3Eq/zrZb4o8/gi8V1dnr+LiKtuzsKI/IQl69d+G6nM9vcqM9wWiqw1Y3h9v3afoZtNQ85YKFaflcu97hcu3/xvzCL0F+lF5PfU67OxEVqU5ORvQMu0GZdlujXFZn+bXa0CwcRWrxPyhwKpXDQxbjMRD9XoT+4TWkZ9KrTiIyLsgILAth20R+RDQYoIUBgZ0ixKBPmkDYGHobL9IJQkkUCgxbw7QibD2gWvDQSir9uPrPl9AuXcZt9em0IZUaIPWIPB2Kok3fvohLBtsP6JImr3UoOtu4VoFmdoGRnMZrr6nPMxYl4+PDFEi3C9//vopKlkrqculihNypETkDSKW41qgCKjI2M6MEaBQpMVEqKVNypaI+u0uX9gz6N9oUdm+Qbm/g+yE1Q0erVpj78TnyM5MP41/wEN3usJEnDI3Rp3UOmvhqHj2nLnC+/vWv84/+0T/i448/Jp1O88Ybb/Bf/9f/NU8dbFRxhG984xv8+q//+qHrbNtmcKdmE4+Yh2HwTPhsEy8M8aTyRkMtAMvLIFsdtFswWR7DFoJQCrpBiolUk8k09ETIWr2GbHVYp8j6uvpynppSC8ZtwkQIdCGZzdSIIslm3aM3eoG2KNG+oiILMzO3f4EKbTgrKbx2AxoNZBBiWhbnvQ8ZGDk2RIXemz/kI/8lqherh/wZxWyKQsamN/AJ9gRONmXum5VLpJgo5+gNfPz5gOV/eIUNdG7lXqTr1HkqvIFpaciZCS6sfoQ+yLM+/Sr1usabb6rS5fFxFQ3Q9b2USbvK1MQ5jOUbaDMpFvJbREiCeoPtsIxj5GiaBqYh8ERItWqyXB9wbvlNbj37i2SzGu7mLht/+AF24110GZDJ5QjyX6B9+RJTz1X3Z5BduDBMa8TDVl1XXapV9Vm0WsPxEJYFjbU+9m4LkZ7BCwVBpCHSWUQhD10PaeeRQYQgIgg10CXICGkosdB3NWQkkbpJpOkIIuVz0gI13LJSYrC2i1adoj35LFoUoA0a4DlU0ju8N/KzOPooJbeJ1wnRZY9QD/FKE/RK05RHM0xNKfEYp0VSqWH7A9eFd95RP1MplWZdSK8R/B/qZC50A25yAa1aIfv8BUafHmVxcZjO03UlRKtVJfryeSUy2uttRpa/R26wyyA3hiiYaIHHfO0t9D/4kDD18L4zez31mnHKJ5VSVWGnIWwSX83j5dQFzh/90R/x7/w7/w5f/OIXCYKAv/SX/hI/93M/x4cffkj2LmUlhUKBK1eu7P8tTimG97AMnglPBgcnlYehWqRbOx6up7EUziDaGmndpRfabFMCAePWLhfEdcyx56mlizSbKlVw/Xqc8tLIHxjiqFXL6hvTdZG1BpOlHObPztDOqhSP46gzbE1T0YeDX6r6zDTyT/8s4d/7X5H9PtK28SMDu5IlNz3F5Tw0l1fYvVqgUSnTaGiHpiYLIcil7/wtHd8edVpcdn7IeC7Ph41J1lybZjjCs+F7FPMRolLhfOf7lKaeYjlXZmVFRREuXFARgFJJbf/6usbq1JeYqjWwVtcxqmXmpIPf8Yn0OXb1aTr6CMEgZCG7y65XZCTr0VxZZe61Ojd/2GLw7e9j9+rc0AtckNcQ/T7jzbfYbHfY1r9ApTKyXxUkxDBKMz2tPCmLiyqFVSgMU5FxIzh/ECKDEJEx8CLBYncc2/CZnW4jbhpEbkhkgGZouP2I0PaILBMNgaGFtII0IgwxzIhAy9CPJNZezXnU7tBaaRK2XLy3f8S28QxGxiJ1bpxc2aR0/gIff1zGbWrkJkuEdZ+04VMZgYGWQR9o6Do888xhv1icYg0CePttdbxomnq/86l15D9VJ3OyUmUxNYfwfDI7i9hvrbLs/hSDdBVdV8/X6ylRMzurnm91FXQtYqL5IWLQZlCdQSAYS7comn1k5c7fmfdrAej3lXk/FhK2fXu5+8PgJNuV+GpOh1MXON/85jcP/f2Nb3yDsbEx3nnnHX7iJ37ijo8TQjAxMfGoN++eHK08udEZJ5J7B7eAVDZHdnmXymYNe+qMtb9MOFV0XX3BTZrQ/v4WW0EK38zihDZOYNMKMuSMAdEgD9FFso08M1OqMstx9qI/ccqLacwv/BLjS99BbKwi6w2EaaIvzGO8+jL6zDRl1Jl5fEYbRaoq5mjzQS2VQhSLaNUqUje4FcyDbTGW6lASPUqjJsXOuzS5RIsKGxtqG86dO/lYCukMiFptcu1VXvI+4I/zv8S2MYcjS8x1PmahdxOtUGAi1yU/WyaVUqLsyhUlNF5+WQkKXYcVqqw///NMrL1FeusWZrPNuX4TxvJEmTG2Q42mm+ZapPNsYY0OeaRbIre0TOZP3sTrmTQK56hoTVrBJKXuGmnLQzKF99ENJn6xwu6uxu6u2k+DgRIx584pcbOxcVjg7OwMDeXCMNRGBgEYyi3rhiaiUECbLBJtB0hfNfALNQNHyxHpJroWIQIfz09jChfDsgjRIYrImgNkp0N07Tqb/gLC1HELo0Seid7tIoIGlYVzfLhexXH2ytw7GtmSTTZr4wloNVRU5Y03hg0pYdjvKYrg3XfVsRJFqtfN/FyE+L+/R9jpEk3NcKs3gRBQKvh42XF2N3u03l+n9EaZmzc1bFtFSxYWlNiJew+l/Q7+bh0tVwIEVbtNwVBlV8dV68koInz/Q4LvvUtUr4MmEJZ1RwvAwf8NUAbx+flHk/K5mzUhHJtOfDWnzKkLnKO09ponVO4xvazb7TI3N0cURbz00kv8tb/213juueeOva/rurgHjrL20aTnJ+Bo5cloqs3WoAR7k3EdLUvf9WncAG3vZYVQZxNxK/nP0gTqhPtHjFRJz4wyu/gR2swU/TDFulMha7iEUtKsawyKF6jKAv5eCXqxqBYjIdSCWq+Dnx9l9blfgHMdquke1XHz2LPJbFaZauN5W3HzwdgAnd8z7oqxETRNozwIaHo2O4MCO4MCI5YyLY9leozPVlheVou+MqdGnC/U0P17nGXbFrLVQg5czHKJl8S7XOUpNsQUVzIv0e7lebb1ESnbolCAZ59VC9XHH6uz8sEAXn9dvZe5OViiynblK0y80iDXWkX83h9wrhQiBzX8noETjdL1U9zsjnHeWsEVKW6+vct88xrtyhcp6y22GKdr5ikUumjtFjPuVVZrGXYXO4yMFPdHdwihRnFUKkrQOY4SAuPjqlFg/PViWWAW0lCtYG608PJVZR4WeytvOk1kR2j5KuYz85jWBdz3DKKejh31Cb0I7DSaaSDCFgY+uiYpm12i1XXCgQeWjpersCnH0Qwdy7Yxem2sGx9QT38Zz9PIZFT6LAxVJKbZVEJjfl4JkDgQHqctpVTRsmZTLc4XLuz1u+nUcNfWCcpjLPcm8CKD8XSDppdTnrFUlWprjY9+cInSeArDGA4zjc3JqRR4Gx4iDLFT4EmouQWKVh+dYaglrtYLV9fwv/UHBN//AdL1IJVClEpoo9XbLADxMRgLG8NQ7/HoDLmHxXHWhNDx2Ljep7fyIforKbS9HgSJr+Z0OFMCJ4oifuM3foMf+7Ef43Of+9wd7/fUU0/xd/7O3+GFF16g1WrxN/7G3+CNN97ggw8+YOYY6/3Xv/51/spf+SuPZJuPGjwLpkPBdPZvD3oO/3/2/izGsuy+7oR/e5/5zjduzJFT5FBZM6tYxRKLmiezTandcre/Fvx9gGQD9pMk2OCTZdgGbD8QhmBIgG1Y9ovRaLRgQ0aLbslq0RI1cBbJKtY85DzEHHHn4cx7fw/7nriRWZmsKrKKVaRiAYHIvHHHc849e53/f/3XGnmKpGEzEebLp7U5QUfRnVbgQpiTYqViyI/v3+MFj/EDh6PaF7WxRanV5HxpAnFMZz/Hqqwhn5gnd02LSSlDToqWyeKiqSBEEdy+LclrddrUaXfAHc30InfDdQ3RyfMZadjZge1+jZJcYjGMEeWABX/AvDdgJ2owSgP2h0a0fCIrMTdtc2UZXP56m/zNy7zR7uCpkJP+HtaJby+0F5hrAYXkIq9RYchNTnPLu0CfJZ7uWiwtm4uAhx82i9WbbxodzJ/9GfzwDxuSsb4O169LdpIWS6ebBPMvUXr5OVbkHKlcJ3Ye4MBdZT8sY4d1Vhs2Vn+L66XHOGPd4AoXGVMhx+aKeJCLpZdwwgFWMEbFCXOnDKk5OJi1n8IQTp9WvP5cyJtfjXjicYXntRiP5eGY+UFJkpy6iOhcRg9GJCh8TxmDvnYHESwj5hdwWiXk3BzRhoJU41ckYukcquNDnsJggpiMKZU1C9k2qtOlIxYRjkO/skY3rVK2InwrY6GRcGmzRrSUEpQ9+v2ZJcF4bLah5zFzd9aKquoTtEeoic9LWy06HclkYkh0Ud3L9yPCWLLpn2CQlbBFxlbYwpMJu1GTijvizeFZWqWYatWnXjfVLSnNdJrW09BTx0U6gjgC4UHTG81IH0AcIxwH1e+RffXr5G9eBg1icQGR51NiHCLOn0MNhoy/+iJbj60Ahkhbljke3i9iA3dKE8TaKp20ys6wgSty9JxGH3Swr77B6aeexfPf3sfqePr2/cGHiuD8yq/8Cq+88gpf+tKXvu39nn32WZ599tnD/3/iE5/goYce4j/8h//Av/yX//It9//1X/91Pv3pTx/+fzAYcLJwW/sucS+BZwGtNaLTobF+BveR+h2JwUqZk81waPqzBfEpRIvt9pHXEObLWlR8guC4vPmDBuvEGnzqk7Ny97TF1Dq3xtIzDyNWW4d+LGAW181NQ1LS1FwlW5YhM75vFuJ22xCha9fMY+bnTVvi7mPHsgxJKdpdPVVhECzTv7VPsFhirTnBkrAS9FBel61OTrhwhr2kyd4bZvGqDDdZf+VzjAcp26UHSJwaV5M5qle2WD743B1Ce60UanvXvPlcwWBIu3IWpEUpH/J4/BUueY/Tdxf4yxc8zqTw2GNmoXzwQbM4X7pkqlZf/KJpV7VaswV7+/Uu9TbU45i63GWtGpAREMcufTnHrrOEX8qYdBMW7R7tfI7ctnBI6dNEAEvWPLX0BqesDTZcl17PEJudHQ7Hy298q82Z7S+RXVpjXyni15+jXH6asHmRg4MaS0vmu9qLm3iPXiC5sks0PsCb9NClEXLpAcTcA4iohBBmP0aRBAl+PSCRhc+OB6UVVLeDnWxT2ruOShJ6rTWo1NlJFlFIPDcHNMKx6GUVdGaSzj3P7PM8N8fN/LxpTbXbEO+0UZcvMzf8FnGScpXz7PjnGFZP8uCTJdbWzHkHYKwDNjhJZxwQBCn9pELLG9BNKlgiY3PUoGn3WVxcQpRmxoi+P/MF0hojsJ6bR+/tsz7XxbFm5EZrjWp3sc6cJr98FXUw9VaoVhBSkkmHpFYjGOySbB2wMf8U+maEc2qIbNQ5e5b3zTfnKPRBm8HtLtfdp+ntG0W2QLNS6rEWdCkt99D9DZzBA+DfX5pwPH37/uJDQ3B+9Vd/lT/4gz/gC1/4wj2rMN8OjuPw5JNPcqXwFr8LnufhvU99oLuvvo8KPFW7i6xOE4bvYuRSmhNHcfIooLUp5xaiP6XMbWk6EzAevra483mK0MBjfH/COrGGXF2579VcIU7OMrPQBoE5NopjJQjMcVKENJ4/bxa1jQ1ze5GKXLgp3/2VEAIW803m3vgmB90D9nuacbfDpUoVe7HF6coBVveAtUYF+2dOsCMkoxFsbymyr91mvg+NUy0uiA79NGBPNBh6ZxgcdFj+0mss/O8rqK1tsq9/k/zqdXSna96sgPXBS9zwHiSVFll1iSdqm9xKNRvZWd54w3y+J580x/j6ujne33jDVHK+/nVDfNbX4dxZxZtfvUw7a5A98mPM779Oa7BFJhSpLbgifcZyjlujKmecErveGUR4nZa1T0/MYZMS4XNJn+ej2VW8E8tUV6uMpmLZ3V3zHaypDvKF50nTDWrlFYZU6PvLNA8us9e22WueZm1tDtuemga2mlhWnSQ8id2Y4F0Eu9OCbQmR2QxpavatUmYfFeJe34dMlXBP+LQWa9jpkPF//zw6jNhLW4RuhrbHlJw+C5WIW/05Inx8R5IkZsEvlcz7bjaNdYFSsPtGm/ybz3M2fR0xP8eWXmGzu0C3I7iQfJ0l8RCNhhmT6/dhO5xjr3SWUm+TvrVC3RkT544xg0RRSbrML1tMrCrhtNJVBJgWF3BCAEIy//Q6ja++htoaoe9xzrQunCP9iy8iqxWG7Qn79jo5DhqJIxKyyjJqkiOVBpVzpjUkOP/euxzfjSQxJHrrDZd09DCiWkEIqLkTzle2qTpGCqHFvU0xj+J4+vb9xwdOcLTW/Nqv/Rq/93u/x5//+Z+zvr7+rp8jz3NefvllPvWpT70P7/Dtcb+r76MCz3cKIcziVC4bMWkBrc0JbzQyJ/ssM7fl+Syp+uhzCDFrdZXLH5zXwzHeHYSU39ZtGMziV1wDxLGp5NRqkGeK9q0JcZhTrUlGwzJCSlotU+EoTOuybBYn0WqZK3oh7jzhtlbmmGslDG/12BpBNh5zfX4eufZDnP6RU/gnVzjBNGbi5T69dof90joHQ4+qO0FpydnqDu24Sq9aYWcj5OCPb7Ny/Yt4444hbqMxutMBaaZ5Hlgd0S2f4EAs0Gl3mVtxaT7lcvWa+YyDATz6qCFn8/OG8ASBWXBeftkswo+vtjkzfJHr1QcYuh7qdJVkoliU+yhto8I+l0dl4qzEDb3Oeetluu4ihLusuDfIrXX6uoYdWlyZ+wRP/PizrJ2QvPmmufBYWIC9XUV24zpOGLGz9Ahn7TYv9apcy8/w+MnrvHEtpfv6LvrHGhRtE88DhCR1ysiFMnpOwY0hWVdD7COFS5pK8ny2n5Nk9l1WClxXsliPUF+/zp5zCqGg7a2ikVTiHqRdUkfSm8xhlVwy6eK55vs/GJj3UCoZYfulNxXq0mVOpFexTq6yEzW5MlylpyqcXdhhfrBF/dIA/fDP0+2Z1ujWlqRybonByyHecB9VcbiVrFFlhIhDFsojuvNPI0JpQknlLN/LcWYXX+fPg20vkTfvf84kV0Sx4LZ3gT1vnVT7zIsOHRZosY8jE4RKOKWu4QUZXu3j7+G38E7kuTn+bt40nwVAWzYlJ+G0d42lRvzWC8tpm00E99YZvNvct2N8Z/jACc6v/Mqv8Du/8zv8t//236hWq+xM087q9TrBdCTjl37pl1hbW+Mzn/kMAP/iX/wLPv7xj3P+/Hl6vR6/8Ru/wc2bN/l7f+/vfWCf4+2uvr9bCGFOTqWSuYo/iiIWYDSahQVqPQsIvPt5KpXZz7Ho7fsbnmdM5/KNTXpffhm1odBpTmL5bJRPIlZXCU/XaLdnoZ6uO3NNbbeZ/k2x/NILOEdOuKIUUKvXqI7HhBtttloXsJ55ms3Mgmlrql6HtfqIFlfYLj1KlMP2pMkgLXEQVWl4E8429rg9FqSXrnJ7XEfOr7Me7GGdXENFISqKIU1RvT7NRp1Ke4tbpQdQ5y8ipeSjHzUTVLu78I1vmCrU44+bhfsjHzHb4LXXzNj8aNPmiSTh3JpxNh6kZbq6QmpXOFE+IHEGpJOU63adWJe5ph7iAfdVJnKOIO4xF98iE8v0S6u4Fx9jUF2hOc0d29+f6uLCkH47pVmpk+Q2njtEacEo9bGlplKBUW/M8FYHxzFBVoUeRCmTIj56+S9Jb0EWLZDTIA9HJGtnUar+liyrogoS+Irm1a+jR2OSkw8z2JkwUiVyKanbAxrjLbZ2TzMpVSk1a2DLw9cuksI/8YlpfMJgyFz/KsF8hWvDJV7qnUEBp8v7LAQjlv0ctbnJ7qUeHWUm5UolSLMm1gULe+ca1/fLNMUulg3NJYtO62mqy7XD4FKtzfmo0H/Nzd157rrfOTNXksvP9dnNHyNKq+D3IYxRrmBZbCGAk9lV7HyINZpDPvTgYQDrd4q7dTC0WrQ7kqtXjZSggG2b6bkTawEqa5NfvwkNYw9y+FxFm239zH3f193xKv20hNbQcCf3zX07xrvHB05w/v2///cA/MRP/MQdt/+n//Sf+Dt/5+8AcOvWLeQRotDtdvn7f//vs7OzQ7PZ5KmnnuIrX/kKDz/88Pfqbd8T7+Tq+/2A789MuI4ijmcVnzieEZ+jnhcFCgJV6HzeT4HeMd5bFJWXYDjiQmsO7Xn0Rjlu5zLi5m2iytNsiiZSmuPAdWfEaDw24lnVG3LzpkAEj9GIMxyRkWPRcofISoXSKcm54WvYzYfZiBaIY6Ox2N6Ghi5Tcx1OiC10NWAzbGHLnE5cpZtU6GLTVLdZmbzK7dqjgOC1/gkCe5H18wK5tYnqdNF7+4hKGe/cOR565mF6QYv9fRj0FSulAdVWzvXdEpcveXQ6kqefNgTrkUfM8fr667DZDhimn+ATk0ucr25zZbRC1Q7ZjY3985q4Te5XEadyLm94xNS5qh7lrL6C40oCmVKrzpHNG6LwyiuGFLRahuDkObRKE3YySWiVsYl5bXDycH3bi2s0gpjRyGN3S9F8YGYIqBSo0Yj88vOEyQ6ivI62qxDZ5O0u0eAaOnsUcEx1NlPkcY6epAhX4qsUa/s248Yq6BJXyg+RZRo/HWFlMZFVZigb2I0KoS5Rnravi7H1Rx81+1prsFVMI2+zox/ipf4ZhpnPheo2C/6ItaCDzj229x1GOzn70/T1grDYjRqb8eMsLIf4ch7hOgysEvMtc44OAvN5bdt8diG4rzbm6DlTKbhx0xDZ8ahKWlmAXo9mHfx4B8KUE842nkzRg4FxsJ6fv6cE4N1+f4pK0iC0uZqtM/IWECsryMV5EJKlJVN5mg1+SPLvQJpQIB7EbIVNwuAEDKfiaJnTcKcppffJfTvGu8MHvozpo85H98Gf//mf3/H/3/zN3+Q3f/M336d39IMDzzM/rbsuItJ0RnzCcEZ8xuM7r1bAnJx8f6bz+V4I+I7xznGvUrcA5uo5zVpGdvsG3V2H6kd/DKUlvZ7Z555nWi6WZUaGa3LIbRWSuDX6iUc7LhMrl3lviGelnPD2cNIuMolYXzfHy/a2qRD2aHBQfRJv7xZrp+Bkqc1a0GYzbNGJSnTaDh3/IbpJk4Ugp+kd8EJ3nW5SYYtnOHtqm7W126jtHdyf+SmsjzxmWmtAZbTJ5T+7jTroYGc5Dzmaq+7D7IerfGFS4uJFoyl58EFzfL74okv/YIkvbsMzZ7Y5XzEkpykmbIUN1LjNiXWJfdFHeXD5skOommxXn0A1I06uwcpclWxDsrdnvg+vvmoqRidOGD2T9D2EJYljQdmWjDOPBW/AQVTlxmiJh9xLbMgzdMKA0zVz4TCZQJoo5O4WOoxIV08gMweVSYRlo2tN8naKUhl2YJGNItJuhpePSQYxlp1S6e+gBgN2gsdox1VCUULZgvnSgJrlsZedYBJWCVwLNzD7djQyla563fzeuK3QgyHr/jZ7WZmv7Z9jpD1TufGHnC6bsc7b/Tp9BN1+CTm1shiPzbGTptBoSBrNMoOBMWOt1QyZsSwO4y/grVWbex7DGjY3FFuXx/R7CiwbHQTMPbBA8NpNdBiy1ujj7t6G9gSdZVAKsB9/BOdnfuq70qnkG5uM/uDzXO212BfPoCYTdBQjsm1qO2/ywAOSub/+7D1f491IE4qKejEFqftVUllDxMZipOUNaTij2ZO/TYvrGO8MHzjBOcb3Ho5jytWFHXuBLDMnxNHInKQK4hOG5mdvb3bfwsun0Pkce/l8MLjbaDJVFr2kTM2Z4FkZ9nyD+c7rrLUeQc0tsL1t9m0hOs4yo82piyq5FbAo9s1JVcA4yziIK+g0o51q6rlkaRKwNBWLFgnq7bZk94ELRIMhV25E2PWAk7U+J7jN8qjHVu0c/VOP034tpdd1mSvHnKnscRBV6SRVro1WuJa1eLT6Cstrq4dXvfnGJvpzn+PscESvfpo2LfIk5dTwdQbJPlviI7zwQomDA3j6aaMtKZclL+RVtl+J+eoNi8cWNjlf2+Bqd565UYcd5wT22ilOnZZoCi8gSXvo4VU9rBGcbJqFuUgbt20j0C3M2dy5CnMtQXsvo+cGVJ0QpQWRciDXeN0dZO0xRpRxHHOB0OuBCiOs0RBdrZLkmnHmH5qC5toi9EqoicZRCdl+F5V7uI4iFWVcu8fK4DXU3j7KH3JbnyVXEs9K8H3BRMwxikvYIifVFvZUyzccGnLz1FNw5Rtt1KXLnB6+RC/WfGnwFGORsVA7YLXU43zVyAOuDhcZdUM61dPYpeDQ66fTMWTN88y26PdngaLVqtmWeT4jN+fOvf0F0d4e3Hq5S+eNXfLBGK00NWdCY8HFuniB1WcWsf/gs6jNTUhS8yDfwzp5EvunfuI7Jjd5DrduKq5/rk/SeRA8F3pdSumQM/ZN5twuoteH1x0S1cf9uf/pviTnftKENDXHz3B4p4MxQGmpSuO0xrn1MnL+rdO3b9fiOsY7wzHBOcYhbHuWmXQUeX7vkfbCy+fgYHZfIcxJrWh1FSOix3h/cLfRJGh6SXk62QI2CQvJJdwwOpyeArPfNjcN0QlD2O5Uyf2zDPc3qcz7OJbiLJfpdhIGY0E3r7Pl1dj90wMWdiTVk3OHk1itFrR+uEVv4VE2vnKT7KDD9YGLsANOnprn3Cceh+UVrvZvcLDZp+2s0EkqtNwhF2pbbE3mGA5SXmk8zbVLLT5auTOAVJ5YpSUy6mqf6+MlpOdS2z8gSF9ns/4kGxuSft+QnIUF+PjP1nnRU1x/xeP5A49e7zoPld7k1tpHcE6fZjueQ26YikzhKbS5aSayHMf8Xl4234PCtfiVV4znzpkzhhBZ59aR/Ztk3TG1esK+WsBTE/Q4Yqe2TvPiIgNMxazQuYk8Q6icxA4YZppBEhDlhgHkWjLRARqBHU9ItQTLRcscWypKfk7jxBz7vRUm+336NXPfljvCkzH7cZ1x4mAHDo7v4Lrme7m0BB/9qCE3+TefZym5Ca0KX8x/iLFvUx9tcmb7Nc76KSpocbW3wKCf0LeXsNeWcbIJ8V7K3iCgvuiS55Jq1ZCbSsVUbYJgRgDBXDgdzSi7F3o9Y4q4f31AfuUmWZxTqyoaQYzMcpbbL1P+2jdM4nqvC0EJ5nyQEhFHqJs3Sf/rZxH/+//6jklOkQN35cq0cj0OyXoTnMChMbyKjIdUg4yW6COQ6GoF4gR10Pm2gt+izVa0/3evcodYHMw5sNUy28g8hSS3nyDt7n5HLa5jvDMcE5xjvC0sa2YUdhRK3TnSXhCfJJmJVwsIYZ6naHUde/m8N7jbaNKRitOVPfaiBmHmksaaTX2CvZ0aVmyEwYWJ5Llz5jmGQzMhk3kL9L7eYWvPxAv0BzZeNKamR6y4u3RajzAcbLL3zZCDns3ubu1wMZubg8YDK9TPLxFudrh5U4PjslOrsjOSrIzh3E+ts/bf/we3en26wSoHUZX22KcZb3O+FrN5+iJxbII1K4x5eGMXZ1qZypSkm1Q4X9mmk1Ro1yq4vV3OPDhkP6ofeuKsr8MTT8DTP90kWK7zxosJl/NThKuaZ3+6xOaWRPZmuUAnT5rFKIrM8XrjhlmASiVzjJbLs3buK68YUbNlQWl1jnom6F/epT3qI/M+DWfCQe00W2cucuGhOoM3TEur0G2UKpJYSsJYY5cUAkNsBKAQhKmHAiwdo6VFrgWJtrFVStmKsSxJf/lhbh3MobIMy1JU7An9yGM4MhlPVr2MZZnqQbls2kO9rpmYKsVtKqca/MnOEwzSEqUg4UJtwtmbV8lvW1wP1+hjM6otYdWr5Fs7hIMhnaxGwxoiR2WslVWSxAiJK5XZxU9Bbt6uajMaGV+m/X1j7Bpt7FNOx6wugUCwWupRcWKUapD/5TfRgyFUysh6DYQgxcYuBTAYGu3ZX37zbSeNhkOjz7pbd1jzI2yxh+145JMJQ2+BoXAYU2Wdq0jLApUjq+X7Cn6zzHyWweCtVZogMNv/fvEl7+X07THujWOCc4zvGFLOprFWVma3H/XyGY1m0yBFO6Tbnd238PI5OtJ+THzeOe5lNOnKnBOlNlprkt4uneVHiWpVlJplDhWxDEXC88WLoB9ocdA8y+7XbxC/eYluUqXrn8HybFoNjVdxOWGPiHcuM+pm9BtPsb0t2d42C1y9DidOSEon53loGq5465YhvNvbAGs0PvEp1i99jZObL3I7XqDNAt3aGfprKyycrFGpTOMO+oovD59gyc95MNgiVs5hZWrBH7DeaHNt7OHoxBgNVsxrXb5sjq9nn4XHH5f4vs9LL8FmD/70z+DHf3xWUdnaMsfliRPmGH3+eVOZuHHDbJ/Tp6fj97l5/65rnvvsWfM67oLxtpGTCc1SRCcMiJIAryQPq6C9ntm2AG69RFqrMOoNqQUKhEZraZLElSTMLFM5QKKkjaU0SW5hWYpFv0+mJLpWZXd8GiVcmrSxRn0mep6xVcMueThSUikrJqGk2TQVp93LQ3Snw+pCxue2n2ZnUqMsQh6sXefCfB9dfZiruzU6Z54m8epYeYa6dp0wksRui2olIRCCuDvAmYxwH77AwlqddttUa1z37as2UWTG+adDsoQhlMWEk9ElZM3Hkg5aC/RUrS3CEJ2mkCSE7io3xKM4JEg058VldCmAyQR17do9iUcczybvjsL3zTnG9yHv2nRklck4oKx71OQQgcAmQ6Km2RYWlEroweBQ8DscmufNsru+i8J8D1qtd27L8X5P3/5VxzHBOcZ7jqNePkdRtLWKis9RL59+3/wcfQ6YtboqlR8ML5/32pb97Ywm3VqFkz9xHuuE8Vg5ODCL7mFI586dAYALjywz1xKE+1/jwHYZuFVu50tcycqURhFVu0q97GF397n4bJe+aDEczkwot7bMCX552Sx6Z8+a19raMvu85yzRe/h/pnShz/nqiPO+z81Ri4O2PDQinJ8HGcDWJcneyGcvfoiz9i1abHCg5tmjikg8TvtXiBctOtq0k2zbEJGDA/jjPzY+OQ88YEjKN75h3t/nPw8/8iOzKUFDvGZp10V69u3bM8v/NDWf5eAAXnwRfuzHZse270siKgycChqYrxlCd/u2eY0kmS2oWkvU8irhaBvalyHwybVGqAw1GJOxBtJCaEGWC2wrI9UOjhWxHPTYiZrsjSrkuIhymaVzNsP9KqODHCtJsIdjGCsOumVW130+8pGqmZpKEtb1Vf68/aNsjX1UlvJg9AVOt98ka1e40XiKbV1HiQpWEJBfukQ3DHB9gaMTPBWSuD5OXVEdbuJsQ7fxUVrTqamzZ+8dBQJm212+bI6zYpChMGvUByFprhCeg55qkRxpejs6y9iVq1xp/jjarSGwWWKbJlOn0ylL1VF8SDzy3HjV3LxpqssFbNvsg0rF3N7vm2pdtVKlMe9Q37yJYsRytk/VMc+lp+JD0WiQSZuOWCbcrcEshWe6/02Vpgiq/U7wQU3ffif4fouVOCY4x/ieQQhTri1Kt0cRRbNWQOHlA/f38imXZ+Tn+8XL5/2yZX+npW7LMlfZS0tm+xZtxEKbsL9vtm0t1FSlxfKqYEV2sYYu+1FKnNsM0oCd7Azl6IDhdUlz3RCSxUWz7/b3DZnZ2jKkYG7OVEjWph/PkBjJxG1yNW7iKDizbn5u3JjlPUGV+XlNtLnNUJe4krgINcdD2beYVBZJZMDtk49T8RqcWzNtj0rFVCz2981zfPWr5n08/TT85E8akrO/bzKsnnnGCKSFMCSnaFcliankmFyvmetzHJvP1+/DSy+ZVtWbb5qFLQzN35tN87jJxFTKCu+cwmW624XUraFWbKysC/sRKknQIodGjXxcQuQShUee5lhCI9FUnZDASplkDjeG82DZVGoSneWE+yMmaRXXUkhHgMqpxG1WNrfYe/MjyFaLM2cE3/rzB7mZNEmx+Jj+GueDTch8ro6W2I0lceDg2jbZOKTbgVLeJ+o7KC2AiJI3Zr4Z0QvmsTptGtaQRqN+hxnpURRJ9ZubhjgOhzNiI6UhI47vomyBHo5ZCnrU/IRclHh9sMbe8DyZvwtRjKdjHuRVGvSOfJmmIhfPY29c5uqX7kzt1no2PQaG1G9uzm5rNkFKyeKPnMb7s1fJu5fRkxTdqIFSjCOLA/9h8uYJ2AuRS4vY1SpCmMe2Wt8/5533Ct+PsRLHBOcYHwoUXj7z83feniSzVlcUzXQ+xbTXURQEqtD5fJi8fO62Zcdzodsje/V11O0NnL/5N7BPfef5aO+21F1Ubebnzfbs9WYeKb04YJ9ziHZAraI4U97nbGWXg7jGflyjM/JJJOwNfLZeNRWSZlNRzoecCiIifIZU6XYl3a4hCq2WIRTFaw4GhlikqRF9Fi0hI+KF3V1J2z+BCvs0wuuMy4skbsBr8qM4wyEP6lfpP/UkUWTM2FZWZpNhKytm3+/vm6v5dtsIhH/4h01K9s2b8KUvmXyrwhG6qDCcPm2OuRdfNNWGopJTjDvfumWO027XfKZ2e5axFIbmsUX1oCA4GxuzK/w0Be2XcB57BufmBHFTIn2JWHTJX5TGtdgLyNOEPI3xrJhFt8t4mDPamzAWFXI3YHUVem8OGKYewrYQEjwZo6SkWVFYkzHq0hUWfq7Jzf0GL6uHiLXkI/JFHpRX0EhuOBfZtpdIQo30MhLpMdnuURrvE1sB2rHRWJRVl9Zoi/1kkcWFNqjYRCMsvzUaQWuzzQrX39HIbK/1dbMdk8RsC8sCNUhYnlyhtH2Fkdfiq8HTxH4Nyj64Dg36XJx8GVc4hxoc8xqaYWhzyf9RovID2FsNEJCmCi+fMF8OEa5LL6+yvW3S1BsNsw8tyxyHs+ryCnnpfyJyfPZf2qY/qILnIqolRLUKkxCvbLPyI6eoXfjwVireb+S3bpP83u+j+wNzXlltIeLkQx8r8SFaAo5xjLfCdTmMGjiK+3n5TCbm52jvvfDyKXQ+9yunv1+426uG4RB185YRUOY52dY26v/4vxC//P/D+i5IzrstdR8tN9cDn8YFQ4gG/Sq3X58j39ln4M0xTI1KMrBjzle2oL9F+9RjdC94dHuQdAdcf7WHDMeU9Ih5t095vkLz4nmS6jz7+0dMARtm4V9dNdqUKDKLodYz7cvKCpw5rbh05SY7gU3XX0ckKbVwl5HdIq02eUX8GJXNCec/quj1jQ5ICDh1UnHr1SHzIqHc8tgaVhiNJH/yJ8YQ8JlnzLHw5pvwwgvGlbbwBy08StbXzUL8yiuG+G1szKIGFhcNEfzWt0xVqNChHByYY7GoEMx0R6bqU2TOZdn0yl9IRKWC8gBv5lgsBAjbBU+SpyD1gBPjN9iMGlz3H0aoMp5vkXV6JMMJEQt4QpFrwSTzWS51OVfZJZQVRGefaKfHl75UIcbnwfw5Hp58jd3gFH2rQVvPk2UC7diElNH7I+z2LgNRw7YlSjisc4XU8siDCvPhJtWDiKVFdc9ohJ0dU7UpImUcx2xfyzLfV88zxFMIWLO3sF79Izap8Xzzf0ErDcKCScTp0WucrnaRy2V0fRW1tY3q9EgqTa7Ki3TyOsoGWaqgK3XycchCNURsXaO7n7GZ2QR2QnPBwb54Hm+xxcrKW9tI47HZ50myBs/+bfLVW8gr12A4pGYNmXdv45xdwX7mcawTK2/5vH9VkN26Tfx//F/orS206yH6fUStilxbRZ74cMdKHBOcY3xf4n5ePnk+Iz738vLZ35/dVwhDdoqKz/vl5XOHLftwyM6VIf18BdtfpWaNqIV7iK0tkt/7fdz/7X95R1dC320v/NuVm2sn1njwZ06S/uFrjHrb7JfPklkekzHc2LERwQOULj7Ew49I9PYWt37/BQ5Cn7E/TyzrXEvmcTbG1No3WfyYYHm5RZYZ4lnkpt24YUjryZOG6GSZITppOhX/9ofUe9c5fUpzLT/N3qDJQbaEb+dU/Ixh5DJqR7z89QkLczmBFZMNJlz9yg5Bdws3G5PJFqfnWmzWHmKY13jpJbOgfeITM13OrVvmeHnmGXM8FJWc9XXT8rh0yZAXIQxB0tpUbDodQ5AuXDCtjzw3x5KcJoAPBmYxLzxkikmqYpqwIDRFtafQoxW7UNg2uCW8msPqT/4oV/Zq7H+9SpIo1rNb9K6lDGIPYUUoFLkIKPspJ0odotwDJ6cxOeD//WKJMIZ16yZPnupwY++j7MYN4szDIUH5PkPZwo6GZFs7MEmQWqPinKbTYWA3WWETKTQn7ds4e7uw/Jg5/pRCSFOlu3QJRkPFqB0hdcbaMtj1ClkuCYKZo/GZMyCF4uX/c5O9g0cRzQoiTnDHHS4O/5J61kHHMdRWcf/WL5AjufL7r3Fr00KlKeSgLIfcL7Eoerhb36J/u8JupPAdRbNlYXkKJ4tY6LxK+dVXcU5/Equ0Rp4bQtrtvnXiyfUkiz9yhvJfO/Udfa++37Qp7xTxzU26//efMdh3CesfA9cBpWj0t5ifXEFeOP+hjpU4JjjH+IGCZZkr6Ppd1XOlZm2toyPtcWx+7uXlU1R83omXz7c7wRVeNXgu6uYt7FSiy4tkQtAhoBPMo5hg9Wr4f3qD+Z9bod6Q9+3xf7e98HecYvypT1L9+jcpb15GhymJVWZ/9UHScw+SVlvcuK7IvnYLESY8dDpDyhGbkxaduMLYdxgOXNrPhZTP5TS8kIYXEnguqlSl05WHIud63YiSz5wx23lrCwb7CZ2oQrfcpOrGnFze5+vti/SSKuMop2xNkMmY9PINdtIIkpjl/utgO0wWFwmbiyywz/7+HieGAwYPfIztcYPdXfjDPzQk56d/2rSqOh3z+9lnzfbZ2THtjLNnDeG6ft0Q40uXjGNyQcJ2dw0RKibS9vfNc5lwW4UahZREyGQcMJkECCGRctaqKaYLwZC/4t/Fbyklc0seo8ZJNq5AnqTIJESnI3JZJbHKeERkiYVtTViUbRxhGNOS2OW/RX+NyLZYW854evd1rmdn2WvM0w9dqnKEFnWGqY87bjNRZXw0seVRpY+KU5JMs+ZuUHf6LE6uo7tdSFLU9g7J7/7fjBfOcNV/jIGqMh5k6P6AxfgWdh4T3/LwFzzcBy5gN1qcPm2qdd/4BkSdkGxPIAKPih3RKo9YXevjRqvobBGdZOzEdW6/2iRzK3BxBVYnpL2YOXFAsHudfuYyCFZwXUV99xJy0MUrWay6AUHdsEk912J0u0vnT2+gPrYCYkY4hDDH3fz83a3sdy/4zW/dJv2LL6I2t82JplLBOvHh1qYcRXEefIsGUiuyr90mH5TQ3hy4jnFKtywqFY0eJObC7cI5dJp+KGMljgnOMf5KQMp7e/kUkx2FzkepmZdPMRlUoLj6LohPqTRL4f52hOPQq6bbQw+GNDyXhrjCddYZUyNVAtuyCSpVor0+u1eG7E0ZWtFeazTMa+qtd0hO7oN3k2Jc6HrU3j5qexcXTXVlGbnYJMth+3WTIi4qdbZCj2HmoxFcrG0hhWLDqtDpdomv9dmZWNxWkrIV0pqH1oUTpJUWaTqboLt61Sw4p0/D6kXYem5EN6kwFj7j1KfljZh3B+wndToDFzv0qDJAVWroKGbLOY0UgqX2Djge7fISsqWptK/DzRepXHiYa3tlotDnz/5Mcu4c/OzPwle+YvbzF74AH/uY2Rw7O2abnz1rKjBbW6bl5Pszofb2Njz3nCFLxTh8EIAYDXC39hgMYizRJRM+e9s+3qkTCNEAzDF3NEzzaFZcUc3xPFPhOjiAa1cVeZSyqA8YuvOMEgdLJETKxrYkC9kGS+Ob6MYKLXfAf7/xMUKrwtyCxY/+tMXV/+dRdnYlI69M1Y9IVZkot7GTPgNVo+SlJG6NarxBYpepyQ6LvaucG76K62r0ZJqRVK+RnrnAa71l2i9qQn2T3C2xEN7ElRm6tUhpvgRJir23xYn4DdrBp/jCF2ZTBSrNaNClVAmQUpIpo/UZuC3eiE4SZhY6HCPGikkEzaZk/myFXrdE+Oo+aV5lbkHiyBFeOmBu8hpeWaHDmP72aXbtNVLtggYdtNB7fZzBEHe+zuLirF34XiB97nmS//pZdK+Hth1wHMRkAv3+h06bopQ57ooLveJcd18Mh/jdLZyGJh/10LlP0x7gkZjstVJg2uy9/oc2VuKY4BzjrzSOpqsfRdHWKohPsehk2azNAiYZWj33GjoUVBunqTQzSunwDsIhV1eQa6tkr75uVjXbRgAOOYEe48cholJGVEuoTo/xICPJpwnSwey9bG+ZKyrVXaC0uE7dCamKEHEPcnK/8vjd0Q7tuEKYuzTdMWU7fku5WW1t35e8rdUV8+IKqrlGO22wG9XItcUL3TMA+Gmfs3t/it2aY6/1CG3dJI5tbu86iHaP2nmXxfXq4RX0cDgbXa9VW8y3VlnffYlJ/Sx7cRNfGqv+hjOk3r9K11qmWz6LkyeU0n1Ct4EvIm6lpwl7TWrCYpVtekMLf+sFkv025wKfbf8s7dIprl4tsbdnxsZfecWIY7/8ZTMhtbw8fR8104Yqogpu3DDEQ0pDdvb2jIlctWomxbavDOlf26Ga9RhZC6QlgcgywoMJrckLuGtPkcnqHY7gKldEowy0BQrSVCClPNSwXLkC45FpCTlWThxHRHmAp2KEknj5gJX8NjIb44/2+dPhowypUmm5/NRPSa5eg53SeQayQynqMsqrICFLFCqSuHZmpruETezVWZrcYN3bYKl6G21ZCNtGA5lT4nLjY3QO1giHKbF2mc+3cUebaMumpEaIXkhQnmOxmXDJXuNLByeQX+1iXTShla0WeBVIr6eI1Mb2LfpZib88uGC+T1owSix8mbLcAl/OvnNNZwjhNUpzFku1EK0F427EljzBrrNOZlvoWFEZlGhWMhBQK6U0o+uUlx/DOv1WUfR3g+zWbZLf/Syq20XMNZGOA1mGHo1RSWLu8wFoUwpx92g00yfeD8VFW9FCVGo2zKHihEniwNwCuqLRozFNS82C0y0LnWfogw7Wow9/KGMljgnOMY5xDxTp6qXSnQZmhZfPaASDgSK8fBk1iRDzLUYIRhHAPLp+Bn3QQf7JBo2fWaH8yDM4t7bQW9uI0AHf42R+FT0JUV7AeOkxRmpC5Fh4NZvKkXOxUlPh5t4Eaz8lKDWIcov+qIxG4MkUISGolKnd3mNuv421dO8y+93RDlHuEGYeYWb+r1WOG+6x1E6oxJtk/+/9q0X2x59BuA52ErJcguWgRz8JuDJaZpT6THoxr3sfRVRW8IXkTGkPr56xM2nSOdDE24JruoyUklrNtIWKk+tgKOlVnuTqboXmxi7rCxtI32Fj0CBo76DzCWJR0yzv0xk6tFnAExlKpER2iTxV9AYew6hJMwlZsEOsWoXA06z0LtNMdrnCxxhS4nOfM1lNQWDaUC+8YKpIKyumSlMuG/3NCy8YEnbpEjz0kCE4nY7R4Dz7rCEqydY+fjoiq9QRuUWmbSxHIBybfNDD3t8mWiwzmRgCo+OYrNclnPhAgMhydCbBs/E8m/HYECi0pkKPYewypAJCEcsygRixPLlJZbINluT5/tN07EX8usdPftLj+nXzGSZ5BX9V0t8L8OIBofLwmZDYPnbgkguPwEo4UR3w+OQbOIPp+FOek6U51/3HueFfZJDPYXfHtMI9GjJBCqiGu4hmg6qbko97XDo4x1XVQCDAz2DUZ6k8xG6ag1q5VZLGIu1dBbUKGsEk9RFCM+/1kMMEGk1GosLc1EiwXIb5eEhb7bOhH+bVnTnCzMVVEwJnE7SFEIJmtsO6u0+9aiME6MkE5Qj0ZEJ+8/Z7po/RSpH9+RdR/T7Z3CJDd54hNcrOiIXaHnowREUxbLw/2pRioKIgMsUF2P1QEJmiYFvcv7hou9u0EEB6Lo6rKdOntKzwrl1HDCN0ME1yjWJEHCPqtQ9trMQxwTnGMd4Fjnr5tHSbePgtxHIVUUoYpR5vDleJcpdMWzh+gLc3QN0cMmqtoJ7+W6T9v0B3O4BNSURUmorqao1mDeobb2Ktn8H9WBWEOXH1+7N05qASkqkewm+Sa0EnrdBNylhC4ckUhzrupEr1JUlpbdZOK5KkhXhrtMNaqUuqJL2kQi8tQ5oRy4Ctfgn1tTdRnQXcxfO0nNFbqkX55auI1RXUjVuHLsp1N+Spueuo0YjB65e41voYQ9chzCXPdcxVumPlnGveoBx9i05rwWg4xuYq3XFMa6hUgtiqkZ67QHurzP7eiApD1rx95lZSNihj1R2EiNFeSoggpMqEMoGYUMoGjGMLrQQH3im6aoWTOkLaoFvgtjs8ystc9z7GeCz5xjcMofmhH4K//EsjeG42zbTXzo7Z3wXJiSIzhfXoo+aYKKaqzi/2WUlusOOvEiufkhXRj1xqcohjKUalBRgMSCsJKvCR0QjVm5DnISnVQxdfjUbGE+YDGAxqtNuQ5pJSMmAkKqTSQwsICGmJLifLB4jc5Yb/IHvlc9gllx/7KcnNm0YnFMdGZzKOSrjLPoNug0Y5JQwbiG6PHJuqE/Lk3DXOVNqo2inU5hZ5p8ctfYar5cfo+SvE2qOU9ViIb+HYOfW8S5j7lKJ99tUSr4mPQlljJTGlGOqlhEZ1AN0eIl0niozAVylJ3nqASXuLtK9plSdUnBydKqJewlwpovTEPPUzkrk5s32vX4dvbLSIkk8gBjbCsnFkRjOIWQk3WOq9gePbkKfYwcMIYaO1Jt8wdtXJH/8ppN+9d0uSmO9k98aQ6LaPKj2OdsyUmkOKRwRCIEoBajpO9t1oU+4VgHw/CHFkSg9z34LQFETmXo8pjBDL5dkIP4BWVZINTX79mpn+vLBOvrl9OP2p4xi5uob7N//nD00b7m4cE5xjHOM7xN3VkIoTM++NSHLztdIqR8dDSFJTLtYLJI//LOrNN/HiAUnZIqzatJMMfX2ECB7CXv4o5Q15aGJ4VC+g5qD3/D5DLRlaTZa8PhUrYpJ75FqSJZqebnCwV8IamlaK582ugIMApGzhNx+msvMm5RM+UgocqVjwB8x7fVRvC33mLKN6zl6ng6hWyJTN9qTBm+kalsipuiHzNYvq5jbuT/wwtDtvdVHe3KYsJnzkZAer8TphavNif53tSZM0t3h9so6OVrE3JGcfmRn0DYdm0kUp81SNuRpZrYIYhYR5xlXb5ppQNHtf5YzYpufMo8suC96YXpQQOjViHCZiniAfIaUgVA6RW+dyskzQTzlX3cWtlsk7XR54ZEg3r3P5sql0dDqmGvONb5jFOI5nOhjHMSTnxRdNRe2NN4xGx/fNY+taE+RjctfFzSZ0QptJalHPdsmlReL4OFKRxIogUKjbm6ioRI4il4bYYGUoPFxiVvrXuXLlafJc4juKsawzwhwQAvCIWWMTG82OfYLb7nmwLZ59VrK5abZnHM/GtE31UVKqSQaRg+0p3NKQxfgWn1jdwreNeZ6oVthZeIzX8hYda4kIlyAfMZ9t0xBd6tkuHX+Nnlhkw1nnmvcwUkizoCI4lV6i7M4j3TLj0KbPMrJXgsRUHaIIGo0a1QdAbW2RDxXNeBPPzqmfLlN+8iIbSYsXX7zL68r2qNQkC6OrrK/GNNzIVGm8nHwiUZ0uYn4e7bkwmRhy024j5uaQtSq8S71aHBvS3e/fOe0WhjDa0sTZMnh1XJ1SFjFrbFBl+oaLjS7l22pTjlaFR6M7jU7vhTw3PwV5KbK/tL7TfLD4+7cjMd8O93JJlxfOQa+PanewalWcv/k3vitri/cbxwTnGMf4DnF3NQTgdHk2h56PQ8Z+TrJoMZmOo+uFOmrxHOrSZdJ2j0knJ5Il0uoqcm0FL68x2jUn1aOhhWakvYW/eIr65mWWToaHAuFEWQySgP72hHRljfQBj/HEnIyLGIzdXfMcnidxS0/iUMa/3qdctbA8i6ruUxlsUq5VcD7+JH4eUuUKsrWCEha9pMxO1ABchlnApmrAeI3q5hr1h1eYu/0Ctf0riGzqonz6NMp1kVPTocDJ+Pj8ZQCi3OKVvRVuJ020sLh2bfY5V1bMT69nTvbGz0gSBGXKVbNmgOIgOMXe7oDKnMPJ4ID6woTOxi79qEJPNBm7TeLcJ9MuJREytEtEqUucO7zSO0nNHnMme5MsTKjMmRbVK6+Yq+SvftXobnZ3zbj3tWuG5BT75MEHZ+GNW1vQaCj6Bxk3b8HDOmUlu8HmoELMPLbMGLst02KKDhBakcWKvNtHt/fR9imUdMixAInKBYIcz0qh2+FGPyfLJLVKRjhxULlEIfB1gkdMQ7U5iCtcKn0EZdl87MKQg4M59veLbTULxfV9c1sUFVOCko8+ZnPq5VdQ2yN0q8n+2OeVnSX20gaxLONZ0MwPaA2vUytl7IoV9kpP0nPmEWjcfIKQLuW0wxnrElJqEgE72QJq4sJgRFSdJ4kCSnKWBQUwd6ZG8FCFctpHRzE322WuhmXSV2atDts2lbRTp+DkSYm155P+4S3U3ghaTXRhrlOtIoVE1KronV20bYPWhtw8cP7biumFlIThTOxekIs4nvlqFQiCom0tyMpdyAY4YZ9qKaUiZmxMZxkiS5Frq4falKMi36OTnPdClpnXL962788sBCzrrQTFZPopSkmfkgipNB3spe++FfcWl/Q0RTgO9iMPf18Eggqtv62O+gcSg8GAer1Ov9+ndvdYzTGO8Q6hlSL57O+boMujE0kYt1W1sWVaTr/w82850UShYni7x7CbkQjPsJ/pGGsh9Isic7UIM1GrF3awX3oOoghRrSBcGyeLKI12qFYF1Z//SawTxvOjiLkoTtLjsVng8hzCgxHhTg/CCa6O8awMv+FTObdMZa2JNe4TfPlzNOoavzKtSGkYZT7tuEJ35DKcWIiHH0GUy6CNUMiTCbWGZPlcmcqX/hBu3vi220b83M/z+huSK1dm7vsFGg3TXivK89G00h8EYEVDsmu3sdMxVsm4+c4nW8xtv0yPBuPyIgdRmaG/RB5UyaSL0JpYu+RK4BJj64wTjzU4dbHCYGAqZhsbM0v/YlKuMI1cWZk5Ffd6Rvyr85xlbw87ioiUw0p8jbXBa1wtP0HqVYiFS4JHRY9oxttoIdiZexw3n1Bu32S7dB6E4IB5QGLrGIFijQ1OWbf5hv3j4NgETs5goNAoLDJqWY+noy8ytJq8GTxBhsvDzpvkD36Eg0mZLDPv1XHMYlqtmm3oOOY4Wl42waO+b6YAO196hReuVNge1whFCcfWBIFgOejjH9xiZ1ShUzrByG6CUpSTLkIITsdv0HL65KOQ/fI6meVh+zZxZYFJJLEdQeXcEt6cOc82m7NsrtHITJ/dnfJdRLmcP290WXdbNNwxtThdcOXaGtbTTyJ931RWJxOSP/5TksocA7tFmLsseX18KyHMXfpDyXAksH/kh9HV+mHExtG4hyJNvhDgFgMJ9TqUAkX6336f7LU3YDhEFyN0lkWSSUb9nLBxAvWpv4GYv7f+piAxaTqblixe614odDRFJaZcnhGd9ztG4cPk8/Nu1u9jgnNMcI7xXeBOT5nmHUGXslrBeZdjonl+5yjn0au8opQ92ekxvraDGowQWuFaGcFcidLFk7iL5mqxKEsXra7AFJgOdT3DIYQTxWh/QhLmZDjEwidOpqGHjsLduIYzOKA851K2Ehwrp+5MqNpjnO1bWOtnyP7az9PuSNptQ6ayDNAKPQnR/T725g0qesByK6VZjrHT8L7bRikTzPjaa7PqQ4EgMBNNhV9HloEOJzi9XZiESJXiiRSrXqGyXGP5BIyfv8yom9Aun6KflkmUTa4luRLEiUA6Ft5CHduWXLxoFq7x2Gy3l182ZMBxDBkoXIkLJ+wsg52bIZvboIETzj6p9FBxyvrBX1LK+tyee5K2tUhEwKnkEmU7ZlRaJs4sMi2oRAfsOqfIhaTHHCAROsXXEReGz7EdnKPnLxJYGZkTkEQKhaAsIx5yrlDLOrygHieTLmfVFUSjQbt+ljSVhyPoSWLebxjOWp4f/agJIQVz+/PPw80bOaObbWQ6wfdgpTzEtTK2wiYHYZksSgGNQ05JDbk4+BoeCb3KKfr2Al7cI7YCQllBeSUqTkKp7iLXVmierFEqGZJeZJ4d3b9CGDK7vm4qNe/EafxeC26aycMJx2x3n/RLXyNpLhIqnyh3mfcGCKEJc49J6pANJ1gPXkTW65RKd5IZyzLHW71+bwNQrWF4ZZve//gaw05CnihUbMyNdJ4jyyXsT3wczpwjimY2AEXb+Gh19ihMJeZOEvN2kTNviYGZtuJUu/MdnYM+7DgmOG+DY4JzjPcS97uifC9LuEfNuEwul0L3p1eOjkviVYliU2ov9AKWNcv48rzZVaiUM/fmUsmQpn7fLAxFtSeOTQTD5OoWcSKRroNlg6cjvHhIJcioPHWR0mrrsNLiujC5tsXeVy6xt5MzjANSNZ09lRZIiWNrqi2XtadXWXh46b6Lmdbm6v6FF+68qgbz/uv1Iq5DoaMEoTKkY6Ecj1ptOqE0HNC48Rx2NCYpN9nPW/SigDCRKOmg6k1i5eG6M9fhRx4xpMD3jS9Pvz+r5gyHgFaINKLuJcS3ttkYVelgrtCX3C5pJilP9lgffYst/zx9d45Q1nHtnLqb4HqafDhiQI2SHtPNG4xEjVgEhimRU806PN7/C55v/XVyy8UmJiJAY+EzYT7Z5qHkmzzv/SiZcFnJbuGWXdpzD5Lb/uGxYttmG1nWbBrwp3/afLYkMcLoq1enJoNpjNfeYrk0ILZ8NibzpMoGDUIoZDxhJbzC2aWQiaywFbXwky5ZohnnPplTpjzvU1soIUse9YYkmC+TpJL9fbMdj8LzTGTHhQvG++jtjDTvRpbNjtmCLIXhTOOjRmPi1y4jXAdh29jSfCmEgJIVE2QD7HiE+2M/TLBUP/TIurv183YiX9Vuk755lfBgRBQLEjyChkfpoTMEqy3uVeT4TkjM/XB3FbmY4Rbi7avI3684Jjhvg2OCc4z3Gh9kCbeIpzgaQFp8q5NkFlNxtAXkeRD4Ci8ZIvME6bmIWpVqzQicHcc8V/vyAcPXbjI6iIgyh1S6JEGTrLmACEpIObsqrage3uvfwk/6NOYk9XKKjhLaBzn7LDE58RBZtTl1wzPbpiBby8tmwbtfXMbOjlmQj7YziqiDSsWc0It2XlH1chwo00fcuo072qfCCHyP/WCdXmmNiSqh1KxV4PtmgWs2TYWj0zGk7fXXDUGwsxHZQQ8mIXmuaY5ukgZVbloXGFDDQrNgtVFxzFy+x2p6g+u1p9hVCyAlDWvIIntkacoBK7hWxjBx6VMnxwMUAmglG9SyNreqj2Ah0ECCwAYqjPj4+HN80/0xMmFRzzqUxYiOf5K83kK4HmFotmMYzip4Tz5pMrfy3JDGS5fMgl2IuZdKAzqvbbNrrZDlFgjwrRRX5jxQ26SSHLCx4xoNS7nERJdIMgsnGVGXQ7zzp6mfm8ey5VuIR4FKxYiy19ffXSyKUqY62OuZ/VS0cMfj2f8LEbWU09wwFPrSm/i9HcrzPo7UIKBiR1TsMcH2deyzZuHXGsKtDqNOylgFJEENkKYSOTAXEcpyib0qcSwPhdvFxYNjq8M2lXDN90hIE5x6lMTcr2rzTlFUcIvWdRRBctAn/eKXwQ8Q04067w9oumPzmMkEPRzh/eLfQn7IYhS+UxwTnLfBMcE5xl8F3H86QzO4fUB4eYO4H6OUBCmRtQrOqRVKi/W79ACKctrHFyE4HiOnyWAoGY9nCdpRpIivb6HGE2TJx5MZnpVSsyMCKyLobTF3qkbjF36S8cSEYxYL1t2oVk21YXFx1lo7im7XtFXa7dlthRtwEJiFJE0hH09I2gN0GFFTPQIRImtVyhdWqazNsbtrnqNoBUaRITuFQ/WJE6a6oBRcey0k3ungqwnKdbF0RjxMmMv3yR2PS87jJDjYZNRpY6cxrXyPvD7PftokVJJq0mM1vsrYa9GxFhB5wpAykdXAzHvkSJ2zlNyk5y2TWgGQo3BRQJAPedJ6kUvOR8hyCERCzRnRyRskKUjbJguqSClRymzHxUX4a3/NLMSvvw4vvWQ+b5Gd1WoZInewm6GjCWDiJOb9AQ+WN+hmNQZZgKsmTMaCxK0i85SmbuM6mvKci1o9SV83DwlmAccxz3/xonkf76RKo7V5f0U1sRBITyazeIuC1DjOnS2lwrfKcaYWCeEuwZf/B864i2w1yV2fyUQw7KSEfgv59FPG0O7SZVS7Q55qEjsgqS2QVFvkvQHu0Oxzz85w5hvIBy4g55pmVDo1hKayUqVSld8ViSlIW0FcimPx7VZntb9P9uWvIeebICSeTFnye3iWmQnXeY7a3sH73/4m1ukP77TTu8ExwXkbHBOcY/xVRX8csfPKZaw//QviCYwqy2ReAxeNNZ6QB2X0o08R+c23XIEXV6xFpaPwBJIS0oMunT/5JhOnQSgrhLlNN6mSKgtH5Ng6wc9HlB46Q32pdKgHaTTMc+/tmYX27kUSZu2V5eW3JkIPB4oXvjJha0eAtBCeS64k426M3dvFzUKE55IKhySRuPmIhjMheGgdWatTLpvqwMHBbIImisyCEwSmdXD2rEJfukTYDdm1ThBYMWHqUA53iYVPNe2gHIcr9qNkWDgklPIhlazPaX+PjWSeA7GE0BmNbJcxDql2cPKEsVUj9RYACWTYKqYW3qZTPo+0BKYWIXBIWA2vEZYXSYWHloKmPWSQlki1hcpNh0u5ZUpVi3odnnjCePVcv24iJfp9s2gWOWvttmI00KbaoQWemnA6v0p1zmI/aWKTkSmLSDnkUcZCI6H88BnsNGQ0UIwSB+F6d2Q8BYGp0Jw7d29yehSFWV0xMZfnZv8XYvg0NWQmy2YCXCnNPinIjG2bvx1tMR118h1vtMnfNASGPEdZDmljiWRtnTiRZJevIpKYoCzxPI2XTaDdRo9GUK1gzbfAdSjlQ/zBLmUZ4s1V0KPROxbzFp/raPXlbWMSjuDoJJXvm+1abA+1t0/8X/4rolpFlN66wY8rOMcE54N+O8c4xvcE/XHEK9d2qP7pn1LZ3WbSmkcxMwTzbItS54Bo7QLyZz+F0MFh6T/PZyfnu6sutj2d7vrW13FbFaRlyvtb4RyxssmUTZTbJJOUfHEVZQeH9vDFCbvQBNVqpn0xMesM4/FbP4fvm8rKstjGe/nr6C0zOZLaAa+WPsaGewHV7qLGE3K/zDhxsbIxfjoEpYjxybwK9VMN5lbLh60NKQ3RKohAFE3H860Uf7TPqdIeuRWwHc8RZjZ5nFKPdwCBnacIR3DdfxiFhUdEhSGldIiX9Nh01km1JI976LgHdpnAkuROjcSdx2gnFF7WI8Mit0pmMk06gCLIOtSyHpPyIpnwKVsJoXKQaFJlI8jJEczNWcyv+vz1v26Iw1e+MmsXFe3EbheSMEOkCRpNhRFn3dsMrSZZaCIGYqtMbvs0ZJ9y3EW5HqPl8+RO2ehFtELHCULntObgoY8GLK/Ie2pOCkTRzFcmz++c7CtCb4uWWTEW7TiGyBRks1o1+qsipf1eTr5FNa8gFEopmIQ4JPhlm2C+jONA9rWvo3b3kfNmVKtkx5RkhHv1ZZzt28jVZawHLx5OAKr+gPyFl8DzsB57BBH4ZGHK5GBCUpoj+/iPklQXvu34990odHFHiUvxWd8JvptJzu9XvJv1+9gH5xjH+CsArTVXtzpMNndZbO+T1utIKUnTnO3sFCBwckFQXsXd7hNdvc2Fx09xZt4/XLSMqNcsSoXgsoiRCEOXvpiHkYuY1ukFUHciLJGj4pSxK1GLy+SOWeDS1Dx2MDB+MsbrxyxwpZJZzObmjH4hTY3+phgXv/XagBtXdiBZwSmfolkNWRPbfKT/BZ50vkwWJ1xqPs3V8So66iHznNx2GckaQinKYZt0I+La8AROyWNubraQ1utTt9ru9D1OBLGq8MakQs2dsOS22c4ahBoGsoEUmqrukcgqa/FVbtvniC0fpAsyZ5gNSbIDcn8NvBaEB4AgwcaWklm4DyityO0yiMJXX2CpkEa8x8BbIExtKk7EICthociQaASO0LREl49+pM6pR3w+9zlDEKeRSMCsQqXzDCuNmKNNLUhIRMBB3CIdp2TKoawHzGWbxHadgb9AWD6JM19HBiVswMlGnJi8ybnRC/j5GDFwkOkq+pmnYVrFSNNZZluSzMhMkszIB8z0W0UV5mibqV43/z563A36iv7tIXmUkOAROxXiRB7qywqy4Pvm8c0mgESIMlA+nJbyx/vk/eeImzUmQoGwaNoTRJQwHqUM62cZjyuM95ZxAouSjIm2crT/CMpysCcnEPk03qSpTSzLS9ewP946rGgVrbO7Ky/vxGTvneJeZnx3T3J+WGMUvhc4JjjHOMZfAYzDhL3uGDeJcXRO6LgkuUJrjSdCYh2gwoQUjZUKBgeKV96I6S96HJ3MAHOSbjZnhEBKGI9KdNtDhhs9VG0RhSDKXaLcYZx5qHGKbFRxagGI2Um+8GspWhF5Pmsx7OyY+9j2zI3ZiGYV6uoVxmlEVmuQIdiPffZpor2HkJ02tckmJ1d6XGx/kY1JjRvlR9kQp3FIcWVEZrlMVAVGEZWGQ78v2doyr9FqmTViedksxJ19TdSHiXJJYpt+UqKu9mnlAw6cNTKVseueYkFtMbHqLGQb7FuniJVFnJdBNCHqgA9ggVsHy8eyLTR3rnaZVTLkxnxy0AmluEPXWyKzSnjZhKGs4ViQYkJbq/aEltrnR9dv863eT/Lc782qIUky81kx1RFFU+zjZCNSr8pQe2QJ+PGQatZlJBqM3Raq3MTKQ3wrQc4vMLda4qGHYFlvkv/R0ZHkGnmY0rnaY7j5MulHfEK/dUhE43hGsgoyUyqZ1mThrl0qzab8Cq1Nlpm24dFKTNYbmEnF4QhbJfh2StAqUbt4Fmt5Zo9QkJgguFPbMh4b0nr1RkrnRkba+RihWyHTFmBTswd4KsOxlskdjzyFdDiHigPq9CDO0cECZDkybHIqGBJYKb6V4C1McIffImid/Z63gt5ixteZmm2un/m+MON7P3FMcI5xjL8C6E9iojTD9jxSYZGFIbltZrQX4xtUem3cMMTKjTix3ow5qD+FsC9CbioyRdm9mMrqdI6+gkSvP45qP4/d3qHcdPFLGjuZEHYiJvV50ifPQdNMoRTPYdyVzU+Rl1OMORdtsWJKpt02HipkGewv4thQTlOk0CS5hcJCCkXJzUhHId3NBnrioz0blGBR7uCIjKEu0ZHzuFJh5wPiic8k8Q4rUpubHIp0SyVozlskScgwTAh1iYn2iPJFfLtCnR49OYcAduUpmvk+iSxRCfcYuYtgVcBOwBOQ9sBfAH8J0i6OUsT2kRh7rdDCBiyMokbjZGMmTpVceJAl5HYNoRUpDg6KOWvARV5n5LX4o/6PkPQlUTSbKoJZZazRwLTy9jSpVUVqiVaaLMmIchecBi45Xt7DLZdZq4WcHz5HrbaM+zM/D0D0e9+k34fB/OOMU59J6DHKfCLHJu6lqG+GyBMK15Vmsq4ycy8u/F+knOUiKWVIR0GG7s5LKqof5aQNN59HhxGyVsHzNU46QbRvo1/YJHn6R4hK80TRNFx0YrZBQZpBk2aKMM4YRxnEAZ5YQOUSIUGSMciqODomIMaNI7x8hEwjpBD42YRyuI/tu0itqFRSTlQm6KmQSKcJajQ0Ivv34Pv6bmGdWEOurnxozPg+LDgmOMc4xg84+uOI23t9kjTnwClRrzWp7e+SNFu4UUhjbwsrzcgcB6lyslKJUmeDky/2aJ4r0bh49o7nOzQcnE63hOF0MZ1rIZ/6KNmly3TbHejmYNWQ8+vIB84j51oIYVoHxVRN0eIqRrUL4nPUH6cgOsUUTTTISbUmFj5R7KMRJLlJj7aEYiB8LNdBjSSWLlNKh5TyIdrWdLw50tzG9zKq5Qw9HtFJF3FdsKQiCVOiUKARCCz6fYllSSrNGrX0ADdLiPGZ4DKmRkQJV0dYOkNLQcdawlYTJDGkQ3Cq4M2D3oM8Nroa28OKIizpo94i1ihaVgJUSKpycMqgcrArxo7AsnCTIfNWh0V7wiXrUSK7RjhyD7U2R6ttlYrZblkGaQSJ8lB4WCrFS0eUwjauTiipESfSq6yry5ROP4io1RnZc9y+kTH56ojRRNC7dIrYfohJu0SmLVyR40wn5uqlDD+9SWmhht2oHUYKSGn23WBQkBiFHofoLEM6NlYlwA/koame8XCZtXi0Ugy+dYNobBNV1wlTjzS2ybSFskB3I/SXYvSyIldmgizPZ5EGiowoTkh1Sq4zpKewywmL4S3q4zZ51YzNaa3RSlHPNqgN98nKLhN3DlukeIQsRLdxxhnu8iJus4weDGfhk0liEsa/9GWEY38gVRMh5XueWv79jmOCc4xj/IBAKcVBPyROMzzHZr4eMAwTLm20GYYJSgNCsLv+AMFoQKXbxh8PsZKUzHOxkoTMcRkvLJEFJWr9DtYLL6LPn7njSvCORPXW3e+ihfp4k3Crw6SXEuETunU08lB8WQhK70ahxTC6CQ7deI9OoWQZ6ECR7u+S2SUiWSLMXByRk2tJqi3CxEKJJtJSIDQj6kgnAwUyVXh2SsXT5JkgERVcH9Ah7vCA0djC1i6WUGSxS+ZWiKVNJAPsYBkrGuGmI8giUssltipMlAMkoECTk2kXYfuQDMDyQHrgL0K0A2EbvAbSrZKTo8SRU7CwmOlxcoTK0c40bdV2pyxSYU/26YZten6NLU4i7DoitQ+F0jAbl7esWdvHCMklvkiQxNjJmFZ4m6XhVTw9IREeQ1nlTR4k3TnDcHKCMHWYhJC/6BiXnnQex5K4KAKZ0PKHOCLHszKESkl6CaOBOjSbPArXhSDp4ty6jOr0SDIJlk1Wb5GdOEknaBzaDhSVlzSFdJKR7q6SyzPkQ2M+6MgU38qwUFiOixMP8N0mwVzpsFrkupDkCbd2+8gwRkgFMsN2chQZTgmWXrmG1w2JyjVS28bKUqrqAI8hEVXIHHLbMdW9XKGUYlSfozkYoq5cNUabvm+yp+o11O4+6R9+7m2DPI/xvcExwTnGMX4AsHkw4NJtQ2S01gghqAYuvmuTTU/MAtP4GM3Nc+Oxpznx+kvU93fQUiKznH5liY36w2hp46QZWW0Oa2uCutnFW2kdjum+HaQlKZ+cp3wf242C5BTOs4UJ4d3TJ0fDBd1pWGmSQFINSHdcdHsXVWuSeTaZMrqVJBeEe30mokoUNEknGTkWESWQIMlJCRjGPgrjHeOMwRt10XmG6wik1OhcQRahI41TMaZ24zQnyTzyzMJjjJXnCD1CaIvM8jHVlxykhc5jcOcgm4DrmL95i1jRFrmKSZ0Gady9Y8T6rq2Etjyzw+S0ZZXHkJj3aVdPIu0GGSCyHMsSSGkdTuAUuhXbNot9rVa0Ay1W7C71Wy8R5i4DatwKLjJx6iTSvJ4WQGgjZRlHKCDFD4Rx4LXGWMLCc0zFK1eW2b65OxX7WASehV2aVfoKPdBgZ0R+e584aZC7K6TCJUsh2xGoPQWVFGzncN8X1R9b5/giwvUTPJnjWBktd0TNNUplhxi3s0P14TrlcyU8zzxOa81rN3u4UZ9mVWHbkkmUIoAkU4xbLW489jRL1y9R6bZx8wxl2RysnWY0N0+1s0+l20bmQ5Rls3/yDKVwTKnXIzvYRUQxuhSQhxm272GdOY2oVt4S5HmMDw7HBOcYx/g+x+bBgBev7pBmCs+xsKQgyRSdUYhSmrJvE8b5IcEBQ3I2Lz5CpXtAWK6gbIe+vUCqzSKXK4vUa9CejOhvKOQ9Ki4FinaIZc3ym1z3zn8XvjnF/QttRVGtOYoiQ6kgP5OJIUBF4KjvS9Qja+Tf3EUPbyOqFVLHI0ok0SDGzhKqczbKzhmPx0SJRSZdYrtMhkua26Z64rgIGZANI0IVgKyhc4nMFVLkOJbCy8cwHJFSghQcpZC2R2ZXyZGIPEHqFCtPyIVlqjUAVmDaUU4Z8oTc8kEIUnuOTGfoPCfPNVVR7JVpWwoANfu3EOZ5sglkETglstIKptZjHqd1CqRIWSIMTUxFEbINZhuvrBRRH5LN4QkuuTUyZdphAoUGMxGFxhYKkYY4cYbnKIKGh3/CRQpIx0bsi1sh15Iwd0iLjK9JjAoW0VtlstumCjPT1Cj0QKGzFtKWSGXaibad4zsZbtzFl32C86exHXk4Hl6pQCUL8T7/BSp1G6vsv/V4mUzQ/ghv0UEesYIZRymdYYjWGtexyJU6NLpUGrQS9KtLdB9ZwhuOEUlOZnmEfhWtJboqcBcnWHlKbjnEfgl/PGRl5xat3W2wJULZiKbP4pJkrmb2mWw1UZub6IP2ccvoA8YxwTnGMb6PoZTi0u02aaYo+bbRtCQ5aZZhd7vYaULquKTV+lusZDPXI/UDlOOSuR5lhpQpshAESwS4PshlSV6bTeQcNQA8WnUpFrR7mfXdC0ffzt2EyHGMMLXRuLcDbpK0GM49xuj5N5jsDtHZiIq0qMz5aPqIOR+9fZUwy9gNzpCkGW6ekQmbXNsoYSGcGnnNI26HJNIn1xaZNpGSApsQBbmDQJOjUdqQHldFZLJEhkILC6RrNDs6R6sYsE3VpWg5WRKtcrQA6fhk0RBLShx/jiRXuPbRD6imP9JwnCzmkJYGC5jFWWMLAXkKUiCEQGcZjpfilj1s2+icLMuM1u/tGeFt4bmi0gAlFZp8uqc1Th5RyobU6aK0Q6xs4rjKyGkwsBZR1422RWXrKDVE9UBIC1UIZpRCiBLCqiAm8lA/Mw3YxlIp1uAA2wfP0/gyYUHsc9LepOLlCKFgNMZ76q2GdFo1SC43yK/fJAvWyLXR32RakuaSdC9GrT6KGLXIB7NIknEEO1seYWyqe7lS5Erf06MmEiUo4iOOCJ3joHzH8RpXaxzIUzSTCUGjguNKnJJFxRvBdHvieehOFx1G9z7wj/E9wzHBOcYxvo9x0DfTPZ5jGXITp/gH+6xeeYNyt42clt1HzRa76w8wmps/fGxYrTNqtqjt7zJquncwCaEV2e4men2d2nLAfPWdvZ9iEuooGTr670KbcXc76n66nHvh8G3KZaxnFilN+th5jFtxsa0c/uD/gWREONwh8urUrOsMZYWuqpMrTaZsVHWOTPfB8UnokntlMm2RKJtx5pFrOSU7FnlR47A8EgQIjVnMBLE2KVIIDdKeLvbZNGB0On2GhZCQZzHC0ni2g0aQ5QkKiWWbDaGV0QyZIkOGI4TR8AgBSDQKBVgoU9XRCrIEIQSOyJFxRmZJksQ5jJ8otnWx3WwbpADHSslVjpYuGTa5dNm1y+zpHEmOUMpY8bh1SH10Yj6SEB4ikMgkxM5CHJ3iyAy34lBarVNecA9Hv31/JhjO9gbEe2+i63PkUUrWHeKMd+jmEw4slyyogl3CuQSye7dRnkQt/Qj57RL6RoSoVsCxIc3QwxEiWMBafxiZ3NkOkkIYDZfKQQikFGaCS+VIqRBSIyxtwkSlRljT24T+trEStWDA6uYGjdoCsnwPu+Y4RjgOInhrtekY31t8KAjOv/t3/47f+I3fYGdnh4985CP8m3/zb3jmmWfue//f/d3f5Z/+03/KjRs3uHDhAv/qX/0rPvWpT30P3/ExjvHhQJxmaK2xpCBMcoL2Pide/AbOZEJYqZLZDnaWUtvfJRgNuPHY0zOSc5fgOKpUyW0HK0vxR0PyRo399QvsbHV54ISkfo/2wN0QYlaJebcoRKX3IkdHx4ePLn5ZLsncuglF3E3AsVG1R1C3NshZQloBINACtJRYWYIV2FASqNEI/BaeE4JUYFtTYmMRxRbhOCUWPqmWTLDJZYA5Zcrpj8K2LHINVrEgCgFWcVpVoAVKSEDj2R7oDIRG5CE6C4mcGkod/XCQqwzHDsgPW1egtUKpFHdq/oeQIBRgo6RNIiUZEjHRaKnQWh6mVh/dN3kOQlhIyghhGKUUetruMqTAUhrbznEd8OYlQc1UYgrhrm07qNwiG1qoJJtmJwTYtsS2zb7X2uw3257qaHwX7WgYd9DtNjrN0J5HJI1ZjRpOTHVpNEbMLxy+3wLuUgvxiScQb76Otb+DFUY4tsA5u4D/9CN4p1tvMdBTyqKbh8jhxAiTHQulFHF6DwX0u8AgqLHplhG3tmg8ePat7sHtLtb6GcT8WxT47ws+yKDfDzs+cILzX/7Lf+HTn/40v/3bv80P/dAP8Vu/9Vt88pOf5M0332RxcfEt9//KV77C3/7bf5vPfOYz/PzP/zy/8zu/wy/8wi/w/PPP8+ijj34An+AYx/jg4Dk2QhjNTZ7nLF69hBNOGDZbhytE5nqMmi6VbpvF65cZHflbITguhJaFoHKwsEzy+GNUT59gMEnY3B9SK3l3nMzfaxRjvf67uPDNNzZJ//KbxJt7JDFkTkAW1IntiCQdkSZGoRyoEX7cQTgucm4JLXbJ3AR96gJxpEkO9kn9ObQ2C0M5S0izHpn0iG2f7UiQu3Mo4aKxUVgYybJEaUN6rIJ8FKQEiRKAVlMCNNXWaNB5hJQWvhSkOkNpkNJCaYFtFb0SgVIKOW1DWZZjqjaHra+CRarp6xXQd4h0YdaeMgRUYgUWXruHH/dx3RxPpDg6xkpTsCykZeMutPAeWsWZtgxt+6gLr4TFWfumwN2Hh21PH1epold8+NZzWFmMW/OxRI5NZqab1BCkxB638B44dZ8FegH9VOsdL+aTOMOyBL7rECcZWZaTq/cgmUgIdtYfIHj5OeSl69ROLn9g7sH5xubM4O8dZmP9VcIHnkX1Qz/0Q3zsYx/j3/7bfwsYTcHJkyf5tV/7Nf7RP/pHb7n/L/7iLzIej/mDP/iDw9s+/vGP88QTT/Dbv/3b7+g1j7OojvGDAqUUf/HiTXrjCLfX4fxffpHY9Uldj06+RKJnbMHKUuw0YfPCIySlOxcngcYNjaBS2Q6iVkNKSaNiFts0V5xZauC7d5ZmtFYwGKITk6xMrYq472TQ/XF0YSwW4rv/dvdvdbBP/rVvoMOJCRvUCh1FiMnYTC4NhujBwLQLpIRKCWuhhSiX0Lv7yNUVrB//MZLdA+KvPk8ySVHlGpntk41j8s0dhOuglla4vjdiOEnxSyWUcFHKAu2yI3yIJdJ2wfLR0kMLl1za6Knlm5HYCGA6KiYsSHrmNhWjhcPepEe9sorjeNMWlULcMTY+3YeHNMYYAaIyIEdIiW8LLJ3i1nzKde9w7L6YQisI5CHx2d5AvfIqKozM5Jdlyi/CtrGqJZyHLmDPNQ7Hz+9+/DvluocZUTdukvzxn5j/BD5Ii9PZZZxwiHA95IlVgPcsGLI3inj1xh6OI+kOI8ZhQpJ9d9Wbo6h0Djh56yoXLONQKBwHubb2PXMPzjc2Sf/wqKu0B1GManeQ1QrOD+io+vdNFlWSJDz33HP8+q//+uFtUkp+5md+hq9+9av3fMxXv/pVPv3pT99x2yc/+Uk++9nP3vd14jgmPtLgHwwG390bP8YxPiDcqxz9wMkW37q8g4gTZJaRlgwJsUXG0UBwZdvIOMRWKemRxUlr0IhDQaUlwJcSrTW5AseWqFSR5XcKNFW7jbo0TWrOcrAtZGsO+cAF5FsNcr7953q3l1lakb1yFTXOodRA73XQE2N7rIVAkCMXV6C2bLZVtQKVMiLN0NsjRDCPtf4Yciyhsoj+6MewLl2Ggw5WNgLLRdfL5Dgo12duzmaS9hlHCtsVOBoiKQnSBKRLgiLPYpAaYeVY2iSYCymnc9f6CCPQYLnIPMFLumiV4lCatpBShBAopclViG25ICDPM9M+0omZqErGkIcwTaNaXZqj7ElEHOHUlpAlhVXykJY8JChSmNDJbGqwx8Iq+qkq6uZt1HBodoLjImtV9NoKuVuD0bvcL/dA8bF12CCrP2j2UZJAplE6xW64WK05I87tdnEvgxzcn9je67Z7/S1MLAZdDyEgSiCMLNIsP3LHo+/yHrobYW6/465H7tMJVuhdXMaZD1gqOYjAQ841jeHe4Lt772/3WytF+rXnyAcTrBNraDGtHZYCZLB6PKo+xQdKcA4ODsjznKWlpTtuX1pa4o033rjnY3Z2du55/50iuOYe+MxnPsM//+f//Lt/w8c4xgeI+5Wjl595mifPL/Nar0suLewsJXM9arJNjfbh4+0kxhUhw7kTWLX7J/5ZAmzLwrEly3MgpSBOcy6u16kEs/eSfmF69Th/9OrxCvLV1+579fjd6gWKOIVku0M4eI2sZJNtvckksWmXTpJJj0xJ067as9EXLpIHGWo0QYWQY6FKZ6ExD5tl8lvFM7fAb6LmE7JEmYmqkiLtjkgPBLlwyaqSJNdoLUyGlADcQo8Ds9aU+W1TNI+OQAAqBQRCK6RTIteSql1jGHaplOZQSqHQpoIjJIdTVWBEx5YPTgPIQOVocobKJhpMcDV4mz1ceYBT9XCW5rGbFeyoj7W9gTXoILMEyxY4c1W8i2exLj4Mw1kVTtSq38af5147RRkN1Ns8XpchvdqHIEAg0LlCWGuG2CDQcYyWDrntorJ7vM67ho3ObPZ6odmmWqL0e7vYC2Cr2kDW6pACu+/p098Xuj8kvWEjgkcQI1NlFUJzvrqDEOJ4VH2KD1yD873Ar//6r99R9RkMBpw8eR8XsmMc40OIu8vRYkoo8us30Qdtlj/1SUqfeIS9629Q3dig69w5FYXW+KMh/YVlwmr927+WhjzL0Wj64xgpBEvNCmV/OhWklCFawxHyxOpMl1MK0P4a0cYu8VdfQn5yBaVN0nOeQ7K1R/LyG6R7HfJUoWwPPddCri5DUEbZLqpcvcNu/+iPUkemsAYO2egRUwlglahSIxTTtpsFKgAVp6h9iVo+g7IzslSTY4zpdE+St2fPaX4k00TM2QQSLno6taSR6KMalLwoOx2pzBy5xL9vB0dIRBbhJW1yyyPyl3AQlIRLkkU4doBruUeeT2BJ05LKESaeUypQDsgcLTTjHKT0wJJGAp0rZC/F7iWI0gQZZUg1j7QXcWyFq2LcnQnuwT7eOYdgvm4ExIA9mWluilZU8f+7f8u9LfTz30Bcv4aOYoTvIc+u4/zQx95CcLWqkmym5Nev3HncMBXn9rewzp7B/Vj1XfGro/vr6P+1hnY2YSSGpjOoNW/tUE01UXc8eHo7R27Xb31McbPwLCoNj2pw17Fzn9/f6d+O/lbjiEQNESUPZAwaas5k9iTHo+rAB0xw5ufnsSyL3d07ae/u7i7Ly8v3fMzy8vK7uj+A53l4nnffvx/jGB9mfDtCcbQc3fiFn0f+7I8y+L0/pN7tMC5V7piKioMSO6cvkucWWglTiTj8LQ//z/R3bFmM2gLbEnhZmUuxGbvV/SHpNQcRPIYYmu/VOPdIlblCzu0z5NcyxF+EMNX66OEAdWUTFfuI4CwEtskjutSDNy4hSmVwPWStglxbRVTv7K0XC62Uhoxoy0IhSSNF5tbJhcuEEhkmUkAjUC7o1IGeQknvjhH14ndBbo6OUh9ud228XkwulTTT4UKbipNSaBLyrHiQzbQ8w52mffeAkAjpELtNgmwwFcTYONLCwUOpoo1inkdPp7HEVMSc6xxLZxy2vYSFxox1S2FG/KXUaFxSlSEmMRobpA9KQKKRoozQdWScIC9r2DFVomI7H7oI2zMNTzEd53nTv48H8Np1ZF9iq1NYpNiDDGt/gH3leewf8bGOTBLZtkSe+gRsgXVziNssY/sWdjpBdto434U49+72khAwmiQMJxGeY9qtSaaQb9ktb2VGwbCPnZrYkvAe/lFHYUlw/YztfptqtfWOJg3fCyhHEgcdhEgRwfGo+v3wgRIc13V56qmn+PznP88v/MIvAEY0+fnPf55f/dVfvedjnn32WT7/+c/zD//hPzy87Y//+I959tlnvwfv+BjH+N5DH7RRm1umcjP19tgIW8YeHyBooa+H2N8YEvon2Dv313Fu3kD2uqgwQ0uLsFGjt7xKRA167+x1ldCUPRshBeMwZa46rSikJvVSeM7hOj5KfaTQSKGwHIETTnDcGLtRRgqFvv4mpPvYiw2EGMFkQjLcJ8+Mu6xWIaKyQDrYRsU9eOBBslLjjjHx4kcpyLISOWuk1gQlPEN2cMkxM8oaZSapkca4bXqmK7xgiqBPmBnDHYphp4SnIFSFaLq4vxCQRMZPx7AexazaIqf/nhGd3Djh3LltpYdEoZQ99czhUIAspSDLYjM1JcSUxxSPV2QqIxcCOX1WGxPVAJBrMaVjOZ5MsYhRUYobWCabSmhcmaKznDh3yBBkWYYmIMO9gwQWE1N5bsb1i9vNtlHovQmMlxD2EkhpSCA5Vh7j9mKcL3fxH2riuvKIk/UC8vRPIDdvY3U7WHmCbZdw5s7hXDyLNWrBXeqEo/zCsmYVpHtVlY46Zg/ChDjL8RxJkiksKRFKzQpvd6HSOTgyTXh//6ijaFYD6hWfwfh7M2l4iLkmolFHXb+FOLGKKJcOX/eDGFX/sOIDb1F9+tOf5pd/+Zd5+umneeaZZ/it3/otxuMxf/fv/l0AfumXfom1tTU+85nPAPAP/sE/4Md//Mf51//6X/NzP/dz/Of//J/55je/yX/8j//xg/wYxzjG+wYdRkZz45tqidKacKzQ+RhhWWjHgSxHJwmdBMJqjfSxxxjsHkCaomyLtFxCWBpHpIdmZgiNlNoIY4W+w+TMtSWeY7M8ZyOEJk4HnDgdUAlc1B7E39pA2H1EKUBrWPa7hxWcLExIvRQ9L8lKkLRHhJ0RWWmeLPEJc4dxNyCnSR54gCbPBWrUQEmbfJihXslgzvi5HK2wzMaeJaJSQ0/iaXS0xCHBIzKuuUIhHQeFha6WSFGoJCfPFeQSbOuQuNhHyA8opMqRIseypyUMJOPxjGCJw6kozCV8Pn1TesqURBGkcGfLanqn6QdxUXnIxG4ag8A7pqPAslw0mjQZY1sulnSmuhY5HSPXKJWjdYxlW9OxdRAoHJHhSGVS1pVLLiRR7uEIKDHCzbqU4yFlrbF1ikRhrz+CWDVTTIfJ4+mRwMt0Ri7zHESWIsMRFhGWlCAkng6ROkMJSa406WCC6saE/p0VBqXqqFoVEaQIlWM7ArviYfck1nA2sXWUJxS3FWPnR3/uV/DpDC36exUsS5BlCoTx+cnyHC0UQpofaWkqgzYrb7yCHw6JywG58238o6bwHYtmJUAIQcm36U8ixlFKJXDv/YbeIxxq8ba2Ubu7sL0Fcy3k6ZNI1/mej6p/mPGBE5xf/MVfZH9/n3/2z/4ZOzs7PPHEE/zRH/3RoZD41q1byCM76ROf+AS/8zu/wz/5J/+Ef/yP/zEXLlzgs5/97LEHzjF+YCECH+E6EMXoLEVvbrM+GIIywY4i8BHVCvnpp+iPIuZsi4PBBNtWaCwswGHytq9TQCuBpV2SSNLvWQglCfdCLm0f4JfLZP4cof9xkq0eeX2OXJssIgCNRg9GyEYdq2uqRaqnyEMPIRxEDoPUJU8StGV0QkoLJvhYylzpO67GznrYgWlbZRMj/MWyEI6DZZtWit0sIeKcrD8gyRwS4ZFKn8QJ0NLFVhmWZyG1whq0UbEyFR0hcVyBUy9RavhY1pS8jEN0v48MRwxVmS5VY7Zn2YhpOaM4FSklDZHRCmFpdJabaanDHogwdR2tplfWBdnRHAqGpTt9Dg2i8NEpHi7INZScEqQDEK7R/Fg+uTDtMCktLCuYKnNATqtGmXZQmcKyFA4Znh7hiBilbaJEMNHLYK8gEAR6RD3Zo3ZrE7tawWnWWFw0UQ9aw2gE/b4JzTyqE8l3usTXbpB7JXLhoXI4OXoNLxsZIbeWxttHOHDmoTsSwovtl+feHVqrIpTzXhN1hy7M0rTLPG9WsZntk5lWK88hSV2SsU+OApFjSW2qUlKQZ9lMCK01zet9esk8UW3d7KdC113T+OMh+lLM6HwFhMAS4DoWyrcYdj0s27Qt41QzGCrcKRETvPcGfHdo8ZaWEM0m6tZtVLuD6vXg1EmsC+e/Z6PqH3Z84D44HwSOfXCO8f0ErRTJZ3+f7LU3DqddDmOj0xTd6SLnmqS//Mu8mjl4jsVWe8gkztAKVC5RuYVSEpVLdG5+w1SHkhsNjsolWgkEFrY0C3o1T3AO9rHGYzwpkLaNrFVMeXx7ByYhjiewXIklNExChO+jH3oIWW+gej3k1Utw4yo2GY5txtXTUKEqNWLhM8l9RqpEXp9H2BZC58hojLMyjz0eIEcD0kwQWyVir04cNME11SyZRdidXWQWo4RNZnmk2OSZQEiBW/Pwxm1K+RAnsMmlR5xJo1FwbfxTy1Cq0N4O6exmpIcuxdNIhClpcH2L3PaIY3lYxRECbGnKG4mypguzNY1UECiVI6WNVfS7gDsEyXlopqK0AnHUtE9MHY8tDuewssgsrumQPE9JvHks28OyigXTmOY5KKNT0pJ8Op5ukeGSUdFt3CQks8s4IgVt2lSpWyHFBdvFqleoVCXNpskCK1AqweIiLC+bf0++/hI7/+cfMqifZEidKIKF6LYZwRNyyjBirJOnsH/qx++wDSjISuGOLKWpEE0mMB6bn7sJ0eGxqt4qPC9+H4Wp+Gh6owmTLETIDNtVU0KkUOTkypB5ZxKyeO0KqeOT2R4CRZm+yRkDZJbipAlbFx4hK5WxHQtbSqolD8c22z9XijRTnJ56Rd3PQsG6OLNQKLbD/QTcdxorzs4D+fWbd2jxtNbo8Ri1uY11+jTu//f/g7Q/8NrF+4bvGx+cYxzjGG8PISXW0x8l+frzxP2YtLlIZpVIcoskhXTuPAQl1Jf32D9xjlxLegNBkhjyopQAPSM0b3n+O1oBGsfVWFZGJRmxtHUFGQ2hWqdcK0MWooYH6KFE2iAmu4iOmdQQvo918gTWRx4gqzWIdruMX7vMKLLIyhdIM4G2bEQaYhNiZTaODTKLqQWQeRGxcglTmywPYCtCagvhLGAFIPIMezJAJBFqfonMCtC2S16bg1GfIB3g6gGulSNbFdL5FfLdNkL3sWoBGklJJNjC4oAa3TAguyzBy9EJ0yRtkCLD0Qkl3SNKPf7/7P1JkGXZdZ6Jfnuf/vaNX+89PPqIbCKBbEECJMBWhNgUy55KZa9UsnqcUBpLGkgySQNJA5nMNJCZBmomMtOTVDXQk8meSeJjgQRJgSCJJoHsMxqPxsPb67dvT3/2G2y/fj0iIxMJIAgkAP/NjnnEbc/d55y9/7PWv9Y/Ic8otRBGohvquSb5vF6Ax2OLBAOkIk0SsiwEoQvEhRDHYmCAWdO+02mrYz+p0yVDKjsO8hhIFZIJ6/j/DsRjUrOAlCH2ZBtzqYEpqgQRkAoSHBIyzCwmL30sIgxDMDUrhKHEz5aQpsIiJacmrEb3cYyYKLfEROXJogRZtEkMl6OjeUTFMKBYhF4PHjw43s3JRVTpZYrBEevRO1Qne5i20CJmdFo1NAvEwiO6/y5x42c0oRiMyKKIzLaJn1BObhg6eiSEJlKep7eZr1Uc6/2aTHR0aTrV2yyl9ijRETieTbMbMwnAH+nHUDpqqFLIEFihjRTrpMJEZhkWEUXZwRVjTBFhWyEV/4i0tMh0QVDMOZTzDo6dHOvBYDCKWXQ91pZMor0j/G9/nXgSQbGqm05GMdlRCzUcYbzyErJeRylN5Gb6pu+Embhf5m7AcWn4Rq6Na8SIQgGxsYYa9KHbg5/g0vDTOCM4ZzjDDwEzr57ZBHd6e1z0CqAGJeLii2SWD1FEHJp0zGUo2pAvIqREHUVEnmQiDOLIJlXapdkwM4RMEFIhZcZxcEbrbYyZo7QW5CoEhjCxpKBxsI0RDEkrVUo5B8sADIMkMQm3mwRWjmjpOfzMIY4VRAlMHOx9G3OcwVYTFYJd9rDDiKzfI4kFkekRpx5+bCGUwDALKKeIih2UUBjhCCPNUJgkXokMCSrFMC2kZZCfDrHDBHutAQcHqMkYlaVgCKTr4GysohaXsEY+8bRD4ngM4xzjxGUce6RKHFdGZUiVYAiDvOrg2JB3YvxAchQUaLKieaFKsLIAL51QEz1G3iaDYYE4nh0jiW0nTBNFJgRS6GIJRxinIjGzUdYCZKFSBFKbeD5OfKREJzgkXjIgkHkdTTBsiEdkVg5ZXKNhQ3F6lxTBobFEoBzSzCRRNoPUQhoZeUdSKFuU0hbxzj592SAUDgNRZuzdwBYZXppRswaUZZvqWg2z4dLpQKfzqPC61dKpqiQBw8hRKF5lGgm6sgTVF3R1mIJFf5tVuUVxMU9hzYPeu5hBlWzr3iM9nJKVDeLnXyGqLDOdcjKes2081tvjmBFy151HlGau5aeh014WrZ7Hne0B+62IKJCQ6QilUpKJH2PICK83OnaZ10tiK9tAZGBIQd6UHBVi4ugKg13BOJzSNzJMN0XlXDAT8nmbhaUCjq1w73ydSnAP4/wqQnTn13Bdke3ex2j62D/96yfpqhlJmpG0J/3NMlDH4n5lWVrbDiepYeCsNPwJOCM4Z/hE40fBSE4piKOM8KBLOI6IDZc4VyFO5IlJ5PeaCJ5N5jINMa0Ie62GE0+JIoWpTAxHIuUUshSRdJEby+xbFoPphCCOSY9TUFlikCaGLgE/KZMWGEpXScljt2WB/uuFPtZogCiVyXk5ummJMLJRSiH6GcJqoKSBgYntyOOMkSQbjjBaexjeGslkhJ03ca0ATMVBukIUJGRxijJSiBMQFqlXBNPRXXgnfZx4SC4Z4ZgJHhKzWuZIaAGsFArL9FGDDmLaxEp9yBeZyrzureMHjLdb+GGRyVTiR2skmaN9oZQkQ2DKjJI5pmJNiP2EwkKZ6d6QrlqkNakQZ5ChMFSEnYXUkyMq0QF7hWvsqzXCoQWGro2aVeiGoa4sMoVJGE2xLQ9SbYEwJzgJzMTAWXDyb30wjomQkJCFWngsLBIsCmGTqVEiNT2w8jjZAFN6dIYeflqkTptn7HcZO4s0kzrTzCHCJMNkNBWMpykd8pSoUEh6XEl3iNwibXOTSHpME5tx3EBmNZybRSptqFZhdVVHUxoNGA519KbXm+2wJHUaDKIAf6qIjRyGSignHaQdYuRdWitriNhB9ZtYv7NFQY4p16vYDQOCEGP7Hlb3iPKv/grGxQ/qRdIUfF9HaHxfk6vT15Lv663b/cBbT3r35HJQKTj87MsNupMx++0R0zDQ0bVjvVSrO2V5/B65dpdBZZlEOcTCIzPzeF4ZNRqT1RqktsTe2SfpD0iTlEwKEi+PWl7BXqxy97bF1mRCsrWMdM7BkYUlUxac4TxK6tWwHgwpvD/AXa6eGJhalv77Uciqx+J+S4v7P4AfQGn4j8J8fBpnBOcMn1j8sIzkkuTJkZX4lO/B92JZIE7doNv28cRmZtiTLlYSYBcdZOPJE0Z2JAhf7yCIoOQRZiZOOiXMLILUIgozMsNC2kWqrkfom4x9nyhJ5/uqFLY/wUhjTNehsbFEtexiGrPqIAlKIgUkRy2kITFKOaSEIEpIhQFJhBEMccwEO55QMGPG9gKmzJAqJSgpgkmXeFQhSxSR5xLHx17VtoNlOZiprnQyJ2O8KrjxNvakjxj2wTCRlRKq3ydvheT6HRLfI101EJ6HQDGOLCaTBFyToLZKmhkkqWSceYylSzqNUbsxqVtAYWOSUbAD6vYQy8jwUwdHxPghjFWBo26VMCuiMq0RziVj7NRnY/oengy5477IUe4lAqFFpooMD5/8Qo7xeEZiZyXiEbblkcZTTHms53kkDZXpaFQSkFp55h3j0hMipMvHMxAZsVlAqYyyv83AWiB1F8jMGoXokNT0mRpl9ikShzusRLss1rp0aXDoF/FTi1DmSTKIsOiYq/SI6dOgmPapT3dYa0RMzDIHvRwTr04sDVotHa0xDE0Q6vVjolCBzU2txXEcePAgx12jxuj2Plnq659iWbTyVziwiyjfxvWn1P0J9SwgWbvIIBNz+4fyBbzuLuWvvkP1L63oqrVTMAwoFPT2JGSZJj0zojOdPhoBml23/f7s+BTJU8BQCVmmsISgWjZZLUW04wj7q7/P6niPpFzBLeYpm33M4fvIegHj5SLp6/8fsmiMWK8RGhaBn5H0fJJhleT6z+B7DfyDgIAJqW2TAHF2TGJnh9m0CGODtJsw+vBm4o9YM5yIqs06YvECxs49nHUXeaqpzw+iNPxH0djzjOCc4ROJ79S5l49pJDfr4fGkbYbvJbpy4gnTaWN+6+uY/gCnmsd2BXY8wujcxfwIy4LZb0z+dD5hRLZFtrJO8qlXiKsrBIGewNMUVFYnKbxIdthCLNQQp8qPdeVSF7m0iCgXyQvJ1Y0aa0tTjgZDBmMfo9Ol8HAfcxzhCHDzLnbYPyFh9mOTrVPKEzsWXjYm5wnWcx1cIyIbDJjubjNx60zwGKgLjKK5GlWoFCftUnJiIjMgznTDElfGNJwRlkh0a5cwRBk+1k/fwLEy3D/+XXLxIenmRYapyXAoGckineIy49Ai7hkIUUQgEP4YlURE9hJhnCdJDUaJS5Lp5n/ShFwyYGHNopbbJe30GTtLmEIRpia9MM80sQhjQSJsBBa2EWKnPlVrxObgG8SxYMe7xo5Rpms0UMfN9BwSctkIDJPxyCE9rvwyDO38naQGcTJGnrBZC1DHURndnVgmPuK4HaEmP8cO4cckJ0NiZVNSwyUjIzFdfG+FanpAFAaM3XV69grVtMkSB3TFAkfmBio1uDq+y+qSScMd0OsKDuQqUSbwyRMZJioVDCkzMkp0WOSwOyRvxlwqHtL46SojS7K7qyM1UTTXuCilowzVqiY6tg2lEjz3M3XKzjdxt95mVFlnO91kPysQpKBUiJoEtOUqLaeI6togoWgFVC0dQZvmF5ns+LReHyHK5ZPIS6mkt4+KakipiVcu9+TnZyTndBQoScQjhrE6BeaQX7nB5HNLqK27iF4Pf5jRNw1yi5fIf+oyzs03sIYTzA0t7vXQfqGqYpHt7mDs/in2//zrqKWM8NZ7iELxiVEWNZ2SeWPkRUjKOvIXRfO/T+paPJsHQJKtv0p6aKAeHPurWSbECWo8RnrPYC6/iLcjT0jR7O+s99P3iqc1H/+gcUZwzvCJw4d17s08j3jlHOFeh/QrN1GfXyFOJGH4wYnhu8HpC38WWXl8ezy/P9vP6N2vkkbbyM1VhDgO8TgmKr/yiOEdQu/nbLKa7LTw//Q9Ml8iCtehpIWIamuM2HsH4xX7UcNKITGuXYHRCKt7gFdxcF2BHY+JuyOiUo34hYuEx5GCJBE4Ms9GNcdC0CTZvY2IUoxGFcNxkIkWPbqTNqWfe5XCxSVcd15ym8RF+vcNhttdetYGXYqgQMVVUstEhQmyVMDzMqSanDQddJIJjjnFWSuQDW2y5i6ioBsUmiKlYPkUDB97sI06f5FJPqD9JzcZPJgi5ApsxQw8G9/eRMUJhm3gWBOEP2Qa1YmlTeqbRPYysawSxzaoDFsmLLoDavaYgjkh6Y0YmVWmixeIB/fpDEx8o0SMQRSbpAhMkVGsWuTKBueqiuq9dxj0FQ/NS4yMPANrkQQLqSKkMMmJCZaKCISDTxFSgZebaSQkhmGTpCPSNEFJCVZBk5ZZf5ws1ToVUpAm2UxQLFLAQGYxmaGpq51MUFnE1KqCsEkNh1iVqAeHlIyEA/ciPZZIGXCDN7ktnmFg1Hk9qXBx3GbN7VIwhlRUn2G+wVHaIEoNfExipCZi2LRYpKtgwDqF9x3KZXj5ZX2+d7s6ktNsanKQppxoc0BXWdVqEsf5HLZbpTDqUq8OuZo/wk3GpO0efa/EXVZpOwWiTP9ei5ggtTlMbZJUEUUJ3g5UhRYzRxG023o7fY0WCpr05PMf3vvm8evacfRWqTz5NXE8iwAJXHeJYKUBwxFZqH214lKR3nBE/FAhvOdPOndfLh4cV9I95vu0UEeurepKJ+8JdhTHURZ7pY4rPzw6dRqn9Xpho46f/xT+W7cJWkPUJEUYBnJxEXn1MtTqJxGt7zQ2p8foowjRx+2k/kk09jwjOGf4xOHxzr1+arE7mTfZUrkyas/H2tV3fY9jdv2Z5vyiPb09rQrKJ3UYPgrLTBKXVEmUW0fd87G+8dh+zpywp8E8GiPA8CycfA67tUdu7xuUXvsLSEMSBMd30rk6E14ivn2HsN2FXgpGCblwAXn1MjJfPwmFnwgxHUXp4F1sdshdrmHI/nw36opgp0Xwtker0CAI5SmCKMnWXyE9/Baq1YNigcjwCNMcKWUcNcKt1DAlmETkjWgeSVpepLxeplDbxPryu6jmTUQhz9hdoB8W6bZTsC4gs3NkX34b+kOQApHPUUh7+JMuSuaYiApJDL5RI5QW4dQhxgJZx7YDKsaIvJtQtqdULJ8gsTgMK7QnBZRyGE1thqEHxWv4SUAQS8RxesnNCerrec4/6+A40O9X2J58hu50QD/Si7/IIgwgh48lwVYRA1XUqSqgWMgIZwJQBfm8ied5tJIAUxZJlTquotIOUkoIzHRMLu4QmmVUmmiNjtK9eUwVE2GihEFouJgixc4mREaeSOZBxiRmj1JJ4plNHkzrTNI8d9Q1fk78Ll/jZxhQ4u50nf7U5UrJpB40qZZMKknCKHaYJC5JJhhHDsPEI8BCmjbDqclgoklNu61Fu2tr8Oqrc51Lr6fJTq+nf3MYwt4eQAnpvUhZHpJvtzA6GUVzSrFxleqlOp9+4w+wS0eInEemBEFicBjWaAYViBVSRBi2yWgEM1PzJNGRmXxeb0px8vzp69w0tU7oO0V7PgyzjsjzamMJzK/VNIXJnTED1SH01ggUFMzHRLynxL1SSszXXtFzw+4+sl7Vk1AYfs8N+GZpKts+JkT1RdQLCx+qhZkRosejQx8WIfpOhEgNRiT3tTWLnNjYMqZqjymY4Sfe2POM4JzhE4fHO/daIsU1IwRgyQTLCjH9fQr1Ee6l8vcVen2a+5khGEan4uWWCZMU4uik1NV1wR51UaNvI5eLiFyEUtCP80wTh2nqMc1v0H3oY33rgwRO1OqYn6me9MPxCha5lRL5gsTzPnhnmx11CDu3EQvawFApuD9ZIs2OJ0Ovrslic/5ds6qNLF9HvvgS8q7WF9npGNswEOdKqImEwMd1Egq5lELaR3Y7yFoB65deIl6QdG4Z9NKLJCML1Qoh7QIdcFxkqYj5+p9SSdo4Gwv0xgG9rMiOeYmJkcNPTCIjT2y4JLHCznwKjPFqHrUrC3h7+1Ratxm659nzG/TCIgoYxzb9cRHLNYh8h/EEsszFq9jUZEwlH7G4WKC2kUNKSa8HBwd6Ae8cOYRRCcMIsZIQI4txCDBVjEqgZ68QSQfHCCk6GaO0TpLoMZ95NCnlUsw3CMMMpRIQme6sk6Uk0sBLJ2A6JCqdk5tjq4fk2EdKCZNEmNhMsVVIhAcoIjPP0DlH2RjiGBlXnV3uTZcZUeD31V/kN6L/i/d4lgfmDbrU+Va2yrnsFhvxgM3iEYMoTz/KM01synZANRgyjUxGhQ0mxyXhUTTX3wwGWlicy8Fzz8Frr+nHWi1dpn10pMctCEBaFkO5TF/WkULhFixqSzZHEUgronCwg9coUHMmVOwxF60jLuSbZLv7GNfOE//8S+zt62ORpno841jrZ7SGRp+7xaImPJY1X8ifFO3J5zVpKRQ+XrTnwyBFRk6NMdMmIvARxzczj+Axca+xvga/+itzvUq3pxtUXjj/1BrwCSk/lEycJkQfBx9GiMLw+Pk4QsUplCyyTBBkNkdZmULhSL/gE1y9dUZwzvCJw+nOveQ8TJmxkeucPK+mU5ST4JScHxq5edJ+GkJxqXhw0sxWTacob4xzFeTi/H1p3yeMYzgmRlFm0g7mDauUaep0xjExOt0PRKfKHr3L/Cg8TsISJTW5EQAKYRuk45QsjE5qeh6JcLl1WKxiToYUTZ9S3cJdrZHtHxxP4PdJWilDo8pw4VWyy1eR4zrZdof0m2/qu9qNDQx/THHnHYqTA4RdYlB4jv7I5oAVaDr0nU0OsmVS6ZAKC8OM8ZIRlXJGftqmVDWoff55aqWMvb2UHfcie8KBZkzoQC+r4KiAKMiYyAKJKGAFkkJBL4qNhmRhwcE+bhA4mehUS7c7W6QzjGmAyxTh2DjGlCyMqAaHNJ1zTGWFVJpUjR62AX25TJJorYOU+tjMCIJSJpaVkmURaWoSZyGWNCGLEdmE1MphkZEpEKRkmFrAbRwfd2FiSQWJQSAVpH2wF5DSJkHS8jPWKiMi2+GyOGA7WMRPXf6L8f/kVyf/kdUNlzesVxmGLveDiww6h1wyxhSskMulA9pBiV6UQ44jKvUi4TmDqa8jM+PxrCJMj9FwqEn5cAjf/rZO9bz2Gly/rsnO4SH4Bx067zXp9CSDtAhCEYcme0EVM+eAvEhBGOQOBuznl3GdhKIaYU36NIrL1F99GacouXYNrl2bn3qDAezu6qjRrFfi46Xjpql1QYXCo2Xmj79uFu2ZaXs+jvfyTFSb7u6THbVR2zvIpUWM9VXEccjnw8S9xvoacnXlR6Li6DsRosert5Q6CRRrfIKNPc8Izhk+cfi4eewftpHck/bzhNx8xH4+TowcI2E918E2YgyhPpQYfU/76LlgWYwngolZYZQcCx8VgEBFKdIwkI6e3YTQC8DsTlkPvQQqQAWl9MLXU2tMnl1BrR13VraOp5I4QfV75O+/RTG+R+5CnVRFtG916NnLdMsXwA8IOxZDWSb2iiSxIsHFlhFO1CdnxhQYkgu6LExj8ot5Hm58jlt/3CYbjiHLSKVFT9axrRj8kGkGPSpI08AtOVQrFmtrc93GDFE0jzz0ejr1ZxhQcBKS0RTbVOSdCblCTBok7JjX8XExVMp6dI+43DgWHVvM5nPHmacEJxO9wOZyOrmVpilpapEkEs8KsUhJkwRTCVLTJQHS4zenCAwrRpBDOBUMp4DrKBxHMBpZhKEkzAQGHsPhmFp+wtTyuJztsj2p48sc/730f/CFydf5tc2v8HXxEvGRLgAA0u5JREFUWXZw6XQXGDcLbBR6pEVFXbSoTgZ0CyuML1xF2JJKVR/zINDEot/XYyWEjqj0enOC0WppQrexAS+u7mPc+h3GXZ+2t0loFxhleboji0EcEYtF3JLLRJxj1B9i+D7mNKIkwK4sc7i6QqVZIj/WRKrRmOtSymW9Pffc7JrSpHRvb96jJ0k4qfqawXV1pVehoH/PLDUTx49qiGbn+5OiPY+Larl+lezWbd29ezRCXr2CcOyPTDt9VJTlRwlPmudO+nF/gubjJ+GM4JzhEwfx55DH/iTt55MmDM/UZV3f74QRBFqnMBxCHGdkfZNEXkPt9xHLzmmrJFwZkp8+oHxhAe/V4uNNZQG9OPR6esH7oIBbIitlrGGL4t2vkz+6d9xONiU7aiE3zyGE4KCbZzydIBwTIQRtZ4UgtnQlkUqxTcVCtEOh4VGYHFAdPiCLEh6YV3m79NNQqKPeOyQNE8Z2HWkLrCwgmEJH1MjcHJ6rqDiChVWLtTX5Aa2VEJxUB3W7WnNgGHN9h0xTSmJEISe4VtzjjcEFmlED33AoygkXrB32wjpD2UCaFq6rF37X1Xe+WaY/O03nwtbRSFKtaqPOLINiNUdp81k6hwFSRRC5eKYkCCVJJjEMj7W1HPv7uvS8VjRPfodt64VdGgaJmaeXKnLBLl7cJJqmXJBd9nNX6Ksqfxh8jk9vvckXFv4T7z/7m7yzlWPUMXkwNhlNJRtOQqmxzKXPbzDKV+j39b7PqqRmpeGTiX58RhKk1OM2Huv9mUwy7vwxeKOXuJ68zaXOV5FSEJfqtOvPMB3tEpjrDBqfYmB7jEwHSYzKUtrKwMpZSF8y2ZuTmp0dTWpcV+/HwgInvYaE0P9fOOV5mWWasO7taWI2qzp6+PDRY5/LwcqK/uzpdK71eXJDwQy+tk1+YFFaO4drphg5D/HMddLdPVSzSXrrDvL8uaeadvqk4kdlPn4SzryozryoPrF4pO9CHCMsC7m29ombUL6X/Xz0DvGDE8ZHlZeDvnOdEZnHjRBh3ptHdbsY0yH5zjZFOcG7sKKJ0xO+K8v0ItHrcapL76OQcl4qbFlPuNN1HdRRm+Td9xDlEsbVK/SiPL17XZKc7sE/VjnSICFvhTjRiAVnSGHSJLl6g3vGZVrjHGo01mXDz1xnemuHdDDGLVoM4jzduECaGZgixkoCcnnB5c8tU67IR1KWMyPGBw/0OLXbeqxmrtSWNdfPWJnP+v6fUS7EfG14g05cIs0kDXfAhXyT9/urTFIHq+iRK1jE8VycGoY6KjCLBlUqmgjMHLDH47loN01h2AmwJgMGvo0gI8Ahwca0JZsXTHZ29Pur1blXU6GgSeZgoBf1XC7DM2PO+bdgMiar1HCMlAO/xKFfw5QpF9RdfuXZHbo/9et87etwtJ+QJIpCHlbOWayvS/J5HYnZ25sTmulUf/eMsPm+JhGzkutZyXgahGTTKblsiCXBIWAj2+bq9HWq1gS5vkaiDAa/8JcZm1WiaB4dmkWEbHt+rs0qeGY2a6CJSak0Ty8tLOjv/sjrMYX9Yz3PePzk83gWqVxdhVptno6LIsj6A+KvfFX3XTrOZZWsCUveUPs+dbqo4Qj7N38d49qVT+TC/ueBT8p8fOZFdYYfC/yo5LFn+5kdtVAHhygEcmUJ+RHh6Y8rRMyy+eT7YZP1DFLqSTs3OsB653d0b4x6DdYcsmqZ7F6P9M4WYjBElEqEG1cZXX4Jf7KIuvnBz5uV5s7u6h/Hh5WPqkKecXGVVlJH7eUQ9TqZ4UOaIVGsRFvUoybW+jn8vQ53/fN07BcQk0WEFCg/IPFKiNVzWH5IPJzSk4uEExfPCLBFgm2HLLs91ox9CAJM+TMIoXVJhXxGeDTg/kPJKLTojF2iSJ4sqDMtU5bpBbXRgE+94PBn/+c632oVGVPCNRI2ih3WvSPe6F1gmlp4dopXNggC/b5aTS/Yur+KHpNKRY9bFOmIQb8//55qFZrbU2S/j5H6SFnTaYxEACkyikhGEtvOEQQZoZ9RsCKkMDBNi3JZMhrNzCklIpPs+xXWCmBKRaoE67ketkx4OG1wV13iP98s8L9+rsNf/IsN3nzD5P23YkZjiLdiJhMd7fJ9vdCvrcH2tiZsM2Hv4qI+90olTQ6bTf28P83IwhgjjYkND1+YWOSQhqJVWKIYdVhs97lYbLJSHGNsVskyTTJnxGk41Fu/z0mrh1nF4KwZZpbp/8/Ox9Nl4rWa3qR4tLuuXKizsSHZ2Jifq1EEe7sZ+1tT/EmKMkwGmcdgMJ9LhNCft+xMKKgtwvJ5xmnGKPFOIp9CCKhWUEGAyOU+cXPRnyd+VObj0zgjOGf4RONHJY89F91+/C6fj08YgfQYWjVGY0l668PJzCzkPtMNfMBx+E++Tjp+rIfQwjK94gX6D8dQqWG88mlkuaw77Kr53XS1qj/348xZj5fJJ5nk/mRJp9mKLmo4Qk18co2YmjzAPXwACvzM5v3Sy/S7F6DsoEYj3R8mDMikiSyXEYurdIIig70IO61QyEVUjIC6M2TZHZAzQyaJh0pt1HiKiiKWFqG71eK93+8wGSR04jKJtMD0kYUCxZozM2DHNPXYXb2qF7X/+0uSZnAen5gKfS7nD5Cm5I32BaaZRcUcY9aqBJGuVqvXdapr1kwujjkRhHc6+u9MIzJzho6jjKQ7xEpDpOcgIokhUqQUyEwgSJl2Q4plGGcGkZ8h/F0yLLrdIo1FwVLZ47BrgyFRqWKc5ZkQ0TDG+KmDQHGt1MQWKfcmSxwmC/y//4vL//rL+3xq7xvkhwZvT68wUGWaA4uw71Fby5Gmeh8vXz6utLuvyVq/r3/b0pImczNdS3s/YjTsEwoI0Y0eJSl9ykihCGybflxj179I5d0ql6QmUIuLelNKf/bREWysZ0yOxvR7inFoMcl0Q6Yk0WM5c26vVDTZHwz0+Xd0BMW0i3n/NnQ71LIWFcfHWP/gdWcc7bH6xjdZPnV9hkvnaJ57jaO4caLT6XSgPSmTJJ9FHNkI26LhDLDkqTbmn2BR7Z83flTm4xnOCM4ZzvB94rvt8hnH8/RSGEqU+uhyz5no9+OUfT5OOjIluDtanr+gZqN8Hykltbo86Ur7veCDFVrGMVkSLCyalCYPYNBHDjYZjye8Y79C31xAGSZ4LsL3sSZDjEqR+MI1Mstj4FsMfAfVlZRKsLSQ4QVtni0esF6b0A0LdMIik1gLM0SSsOq06HoW3/ofQ8a3WnSCAqlpk5qmLvON+tiTLkluDZXP4XnacuD55+Htt+GP/3gmPLVYW024Lg7otDJuR+eIsFjMTVDlKkHm4XmaBE6netxmeh7T1FGQwUBHJ9bWdLQijvXC3GhAMAhJggTX1qamUmQYQpeIGyIDJQiDjELwECtdJsRhnOXIZ0NMf4rZ3sctLOGxQiByxJaJBTT9MnVnQtUeYYqMgulzsdjCyELuTlbo9SX/4f8U/E92i8uLLovGHd5tr3Bv2KCzq/BDSRC4LC3BnTv69125oonb40RncVGfu7lwymh3j2HmMEhDUmmRCpMEE4OIERUEEb5hEQxztL+mz+FaTX92pXJMpic67THc6eMGFTLDIqk2GJWvMjXLjEY6iiOE3gchZik6sP0enZv3iUOJ9Dbo51coZEPk7RHO3lss/YqkcnXlQ69Pe+cum/0ml09dn6MR7O547O8ViLtDsExaQZlWoKODCoUcDVlakZx36nw3AocfNR+nHwecEZwznOH7wEd1+cRdZbAzYPqHdwlfWUFXI30QQuiISbGoyYznfe9t1Z9UFm4ZCZZMqVgTvNwUFR7iLN3AWPx4peaz3/mByfmxajDXiLlSOjh5zzC8xG2VZ9jM6Z4wUkGmcLIRXtxmkF8iMl1Ce5le0CAcyJOUmOPAuXPw7DM22X/bZvduwG1z7cSiwjUjFp0uD9sGrxc/R3RQpHWnhQryJHYOKRQFI8I1QjKZEYcZZqdFY3OdGy8YlErwX/+rrr4ZjXRkYnkZXnvN41vfepH7w5TUgPWlhEQuMfV15KZSmZcsj0bz1EqjoUnOdDpPgSWJfs40dbrqcCtBkGGYECpJzghxjJhx4mmCIyRRJHGDFrZbJRY2sbQpBB185dE011gND4lzOZqpRRalCClJ0pRbo1VeqGxjmyl+6lCwppyL25g1k3tjwTDN8Z+T/we/xussO0NeW3lINR/x7uECo56gKW3SVDIc6v3u9eD8eV22HQQ6dTWLusQx1BcMDGtC3gqpDVqMY4epVcUkYaJyjFUBISFyyyQDiWlyosE5ONAEZdnpsPHul7EmPYr1GsWlCIIRk/YO7WmL9KXPwGb9JD07mcy1Tf40I9gfoXyXLFdkwR4iURypRVR+ETEc0f+jQ4rxItk3dnD6BRbW6+TM41zih3ThLRbhmWclV0tF4v/+J2SjMcPSGgfZCq1pHjUNyWyHZuUSrW/Or2fL0sd4fX0uij6NH0Ufpx8HnBGcM5zh+8DjEZNmUGYY5+bmerk66nCMNZh77TytJmRPwuOkw5Yp5/PzGlo1DeG7DK9/2ORsvPLSB6rBRrHLndEKo9glG/TI5BRSH4+AmmrRs5eYqjwjUWdgLjN2l3F9n7wRUWq4lMvw0ks6BRSGOoKQ5l8gzd5GHTSpLxlU8jEPBlX+dG+dyMrRtc5BMyH2M4Tl4BoReTPEiH3CaUoWZxTiIWuDezy7b7Nf+CW+dLdBu60X3YUFnaq6dg1+//eh2ZQoKTl/GcLQYjrShKtU0gtzu81JZdQsNVUua7KklF7oxuP5c/K4waKwDEwSpFKkysWSCY5MAIUUijTVLZFtAopizIQiSWYSCK31EFmGPW7hpA4FSzCQNSIkRZmSRIJb/WWulg/IqzHj9pRSMcO85MK377JjrjNMi/zX/Vf51ZVvspLrc926Q7mww63JBvvqJdpt56TMvVTS4mzHmROdyWR+TvS6RZJSg1LvAVGjhDOYUvUHjCmQSSjTxS+t4Je8k3Lz8ViTvSTRBKW973N/+hzFqsGmaLMs+sicR37Dxdvdwmgm8PKvc9SSJ989nR77ZA18Jv6YxMohlKATlhjJmLLtY6DIlyKCXsjeuz3SAxPHu8ywM8ZSIVXHp1GJPrIL72mNXHlvn1K8x3XLQj6zhvHqy/TcEnt78zRlHOvKrdPVW66ro3rLag/xez96Pk4/DjgjOGc4w/eB0xETpZh3MhZgy5hCwScfPKCwfANj8+NHTL5XPO0eQt8p/Wa8/Gn6hwF37tWZOnWUYUCaoqYjctGA+uQ2USzBdYnMAgfOJkOzDmlKLhlRTY7I2xHXLrlc/ayLIKP/oMf772dk44nuO9LtshxvYw+bPJhc5h3vMrGdp+ssIyslMsMjC3wcQjxXcD53xHBqcDS2MNOQdXnIFesWzrTJH9z/Xzh4EDFwEmzPZG1Np6qWluB3fkcvWFLCpUvz8mHH0WR0cVFHMmYL9mAw160kiY4uuK4mNfv7mgDl8/OS69hwdWn0dICw8xhSIWWGFAqBQqYJqITM8aiLFj1VI0MxMSusTO8yMkvsu5dY54BE5Akyh1jZTHMlbCL8SNHs2tRtk+rSIsG1y5TdmDV5gMzlaIYZraDEl/af5wvJ77A2vE0jTShE32KhGHOr9AqTaY6HDzVJG43031u35iXWswZ/IMmeX6X79QlMfcoLNnHqYgaCWnDIwF1Gri9QKWsRcxDolKwQx6LhOMGaxii7ysQ36UYlckbAp6vbVJ3JCfFwhh02NjTxSFNNIvN5SO0p01vbjAprRJlFrAzizCRODRIBhsxBOkCEIaWohR1OGAUOAR49BKnXo7FqYxS8D+3C+1Gi2gY6andynaTzcvXhcC6Yvnc3486tEVn/eWSpQD4IWRddGt4Aue5+on2cfhxwRnDOcIbvA6cjJiLnPZKigeNuxrb4gQkSn2bPig+tkvI8BouXubvrMf2zPGL1iyh1gBqNyTFm3T7CbywSH06IrAKoiENzna65QiagkA5wUp96uMez07cpbtZxrvzvHLx9RO9b98g6XdR0itHvsM4u5vlz3Ft6kcN8kXjk05MLyOoCIlckTiS2BbYp2Bg9IDYKDJMcyahHPfY5b++yLvZpxyV+v/x/0JPLTDOHYjZgcb3Ka69J4hj+23/TC5NlacIyGGhSMrPX2NyEu3f1uCSJTtVIqUmPEJrQKKVTFKAXvFkl0KyaKookVqOGeTiCIEA4CmlkSDJIEzBApooECwPIM2KKQ4JJJsBUCakQ2MR4MqSsxvTSMipOsFdKRH6BI1lj6XpIUClQKEjioE/RCRBGE9NTuNmEg0mRP8q+wEuuy/XsfVzl88z4z3BFwPbKT3M0KdJsaiIxM14E3bX40qV5A75Opw4C0tt36LdizDSiaIWo9QWszfNYCwXabT0+syhXFGkSmSWKNLWJUgubBG104vBWf5OqPSFvTFmJ3sM+RTwMQ6cRl5chrcDB631c6YLrkAHD2GOautgiJo4EISVkkhJMYow0RHh5XJmRz6b0xxaD+waiVsAQF1lNclTUB1PDTxLVPildaxiSlRV9rGdIEth/b8DDb7WY5sqAYBK73IpXGUae/i6vjv1gQON+j/KF+lOP6P6k44zgnOEM3wc+iV2Xn5YXzun0Gwj6UZ5740VGxwJf5Wi1dLm0yoXPXsU/HDAZJgT2izrKc/jHhEYBxxqTi4eMjBq5bMy14A2W44cQhiTS4WH+82R3IH1dWzsUq5KF0S3ieMQ991laRxtk5SI9UUflFPgB/iDGK4CT1+TDcWym0yrTdgRuSn26y2XjLpaAd9TzvC+u0rcaKOGwYI84L7d58dPXeNCu8/Wv60hNsQgXL+oFeNbMznH0Yzs7ekxmrs5ZponPysq8f8rMHHJrax69mfWwGQ71Y0Yxj+1tIB+MIRojkjEiq4FpYdomKo2JYklBxtjEBJmFkQZMjApr01tMrQpNb50VmqRK4OMyVS7TkWJp1WIwgHcfOHzmM3qfcErkFxco7B2iqhaiE+JGFvft67zOZxnFOV6rvYu8fIGLezdpZJKd53+B23d0Wuj2bd0nJwx1Km9rSxOVlRUdmap/rs7hxSrmwxEqjBhELk41TyEvybI5AWy1jvcH/XlBP2UwiPBTmxCLRBlMMkGiBEkm6KY2h8mnWNwrUbd1hO304i8bdRrnctTuv42orTJKc0gBFeWTKcV0OCUubWD5Awyh6JpLCGkjhSIxXBLPYRLaeK0phWvrNKMqzVv6s/N5HZ1xn3BP8t1oaUwT1spjFoxvI1d0hCZMDTpRkUSZhKmFMk2C2OBgP6N5XKg1q5KsVPS588O0o/lRxxnBOcMZvg98Urt8Po2eFdk0oBvk6HnnYSTJ4ITcFKyAi8U9gtaQqb1Jt1cEp4pY0NEL0gGmbSDwEY7F2vgBn25/BWlqRfU08ziwL0Muh3XxCurOXRbCHSqLJuEw4PZknVb5PKk0GSR5sqmNmY8ZRS6eFOSCNisVl6WLBaZT6HYl2eJFipN3OD9+nfJ0C7/c4GvZT9FKK3TMJVypqLhDruV3uDz5Nu/dusY7O5rM1OuaKLXbOoViWZw0wWs2527MoF8vpa4GyjL9vJQ6ejOLUswa182a/s2E5GkKTq2IDPIYVLGcCOPAI5MGlgVREBLGHoVwRNEeEmAgFPiyQCYsymKAIYvk1AQnHlN0CoBBkOXpdvXCmCTwxhvwyiuQKcno/AvUBy1U8y7ZOEPaizybvcV74nluu59mkrvEL8tbqHqVUusOL3z+U+TyDW7f1mTv4EAvtK6rfy/oCNfmpo7MLK9IlpbL7O+DPHb77vf1c9qAVEddlJpXnVkFl8VyRDjYJ7aL9OISZIp+7DLExk1GlKsezWmR5ns69VMsamI16+1mXL5Edn8bdfcexdVlSjltpDVqBbQK5yhd8kjeuUfUyBH3UkQ4omBNGcsqkbJQKsRWwPIy44mO5JVKOnI30/wIoUXvtWqGev9dki/9AVkQIleXkZ77HbU0H7RlSVn1+ifPq+mUxJsSLEtG5vzcOb0Ps/0oFPT5lMt9H0UIP2GVXGcE5wxn+D7xg3AP/l7wvfSsmPUC6XQg65eIRQMRao3Rgj1iw2sTKpt2WKI1yaOExLJtlDqOUBx3CaZaJFtZZLX1Bk48RhQclLLophW6RgPIwDQxL1/i/DM5xJt/QjYaETRT/kz+DFmSMEgKlMwpi/KIe/ElvOEELxtQTrqsBVuI+1fpyk8ROhWkhHPPF9i8WCT9iuBea5N78Q1a1jITp0LRiFnIT3muvMsKe3xt8Ap3t0r4ybzJXas1bzJXKulFeWZAmWX68RmZWVzUpOjBA/2c52lCdOvW3MLBdXXkYTzWC5dhzE0es0yC52JUXIwuZMedkaN8gVgVMfwmjdFdevbzRNIlUyljo0wpfIA3vU8gPZbMNplbI0oUkSFIU/0d4bEz+N27cOF8hjBM2hsvspx8leywpV3XjQqf9r7NO8arDDH4/x18ml9eeB3iHkYc8Oyzmqw9fAj37unF9ubNeQquWNR6JMuCCxf0mKyt6e99+FAvxKCJUC6nXyeEHg/d5E8yiFdI/YfYfgfHGKMmI9rpAjEGqWERj224F7Gw6p502T44gJro4GzfZLn/Hqbvo47bI4tyGVkuUbq0Ru215yHNGL7zkHbtGraToroD1ofvMqTM0KyTeR4TWeYorCIG+njNehdNJvpYuS6073Q4un2H5NZtjKnBgp1QTHYw11YQpdITq7FOrsGPEeG1Lpwnf6nKwimeMUuD9vvzarzRSG8nn33c+LBSeXLl1uP4SazkOiM4ZzjDU8CPYpfPGbJs7qx9urmgKBdZXLMo7r2NX7nAYVAHVQR0P5B0NMFZqSFKRRB6cZ35BdVqkqx4jqhXI9oachguMa1c16tbEOJKn/XNlNxvXiXbf8j44SHb3nWUlyOigJGOWB+/z37uKqnMyCcdPCNh03qINDICq8CgayGC+yy+ssmVcwHWm98g3G3y+vhZWkWbtljBsC1KbsZmocUzpX0sEfMH959hV24Spwbnz+t0xIzczLrZVip6XPp9vcuWNTd0tG29mA8Gs8qpjLVaQEOOebddRWBg2xLL0kQgSTTpMM05wVFKj9fMTkJK/dzUssjqy4jhEdZgF+FAJk2cZMLEqqKCe8jBgOHCGsvLksJ4zCS/gLNo0Grr/d3c1HqZ9p5PZfc2xfEe+AE7aY5zooesLGNWKkyMIp8z7vBm7zzjyOF39l/kF+w/xD3Wi62tzRs/3rypP3t/X4+N4+i028IC3L6V0bB6VKwpwnPZPFcnU5L79zVJAj1W+fz83Go0oFQqMSmv0v76PdLmITKNWZIjUsNg6i4wiWtkRz1aqoKZ0z2IKuaQgzsPUZFFu/AChUZEpXhIvfW+jqb+7OcwX3geISXZUQvXUWzIPcSSR9IwkNPL5JKEFUuSqYDRMKS3BhxbbnS7Og150km502HyjXdIhlPsJINihaao0hxHbN55iHdlQ5OcD6nG+l4jvKb5Qd+tKJqTnkwX3DEYzBsfgt7vSkWP+2nH9O+2V9ePC84IzhnO8JTwo9TlM8v04vy4iebsLlunWiQ7wxdpHYHaCRDFEGGbOOkEvx9jeC7q8jWEkCdeS6cNLpPGGg8+81dI7LdId3ZgOqWatKl7E6xnn8H65Z8nbaxw+79uEVlX6bobJIZLQx2xZy2TIZBpTJiaXJm+gVXNk0mbZrQABYf8gse5yXssPXgXdTukObB5z/xpOmaFruNhh0Nyfo9rXotr5RHjieQPmtdpqQUyr8ClS5LlZW3COZ3O+9nMjB5nJb+2re+cg0CPz+XLOnrz7rsg0xA7GGNv3WXnFiSJoT16XJti0T1JTUWRTjFYliY8Qsw7UM9Kp2c398o0UVFIZjlYliAlwcoyAlGgn19nwd+hNn1IPF6mlod4o0Q7kORyGf44pbmTUHN9OvshN7MKLyY3yYcd/MRhO1nlwp0/hectRMXBTy1erN7n3cE606nkS+YX+RW7zqzeL5eDZ5/V+9hs6jEZj+G993Q0Z7TbQz64S9Q/Yi9NOe8d4qwvYr72CpcvrxHHOgI0IzozX64w1P+vFSPM4U3CJKBTuIAyTAwyrKRDOewROwV6Q4PQcLRFRG+MCso0SgFjf8KwlXIQV6mml3DHE1b+v39IrVbFPLfxgeiJKQUUdJWjUgp296lcOM/iK9poNgh0JdR0qq+P4TCj9UaLbOJRKFjIURNlmITCw/F81Cgm3TvAKBbBcT6yGutpRHhte94JeoYgmHuUzaKo3a7eTr5fZuS/eZP80MfZeLRX10dFn34ccEZwznCGnxDMymxnk+EMQuh0TLmsF9rDQ70BUFrAeOUlnHvvEh31iccC3zCQS4sY1y6zfqNOsfjo9wwG+v1KgajVsX7xC5yzO+QH+yc+XWm1wdYDSbo74GhgEZWuUPH3aXubdMQyliUYqyrnR2/jRX0cVxCJPEmQgm2wUou5snAfkUuJ37vF28WfpVO+SCcoEmUmnpNRNadc7nyTi/u32U9f4mvJZ+jLGng5rj1rsbSk0yyzyM3SkiYdCws6FRKGc1+pTkc/t7SkdSDNJnSbMWo8YUM+4Hy9x+/1X0FIAy8e4PQSKhcqBEEV3+cRV/DZmM/IoGnOBbRSAkFIMg5RuTw1o0+LBkJYhKmNnxawVUgcKLrOMiuvXGCpUGLyzhg1HBKMbQJSougQSzkIqUXWn7LfpOhOGFkl7mfXufTOH9L51C+h8suEgeCZ6E3uOlfxiyv87v8t+dmfnVcECaH74DQaOjpw65Yes527PpXeAStJh0lhkYobsB0XyG01WWv/Lvzqr2Ctrz3SLHDmGh4EYFsZ42/eIptOqRUzXKtFikGPKjXZZhg4TAyD1fQBNPJ0+wbTICHB5agTQWBTjMaUVZ+uXUc5S3QHQ6r/19tUftFl/VON7yp64rq6ueTsWjm63acd3IOKR5DZtMw1yAyWjBYIeJh/BjGNqQ8satboI+0b/rwivK47rywDfc35viY9M8f0pDeivR/S8Z5FjBwQsJk/wpbpR/YC+nHAGcE5wxl+jDHrz3E6jA160ZoJNpXSxOfmY4abrqvXgwF1ournUPsHGJMp5YbN8ivrmPZ8+lBKk5rT32NZesGwLAm6cwhxDFv3QPX1Xea0CZV0wLDcgMSi4h/w0LvKutzhvGiRSw6Y4BEaeUSScC7f4epKRq6i4+/D0Oab/CxjY5NpWEQhcJTPueg2F8dvU4t2uWU8w5vxzzB261h5h8uXJaurWqMynWqzxnrOR40SVtag2yvS68mTaEuno0nIjAhGERwdZdjRAKkCvIpLT1SJUxMhAdPFTY7IH7QYF18hy7TRZ5LM01IzGw6Yf/ZMwEuakiiT2MqxxCEjyiAzbKGYGA3G1gbOpMPChRKyXsfqdPD2H+L7eeq2QSuu0RV1NibvceScwzYU+2KNNQ4oumNGlQr3hs9z+eE3YeXTKFkmXVrhmWsL7ExyHB3BV76iGy5evjw/nrXa3DJkdydj/80BQ99mmLvBmuwSqxRl2VDb4Ha7y8afvE3lf9FRAdedNwvc2TnWuwxGhJ0RhmEwNksIBHVarLJHXoxpWXX64ZDIXmCUJTRKCXH2kHFoMk7zRNIjzq1ghQmkGW48IrNMDv0yR3/WpSfq5PJr1D73qxRvfg21//GjJ4YBi7kJZbYQtRVGaQ6nO8YY9VnLdeiJBkNZRGWKru/R7Urk0hUK0zpL/pM1MT+ICO+sAuu0OW7yYEhX7TPKXcDXziC6PF+m+gUfEX36UccZwTnDGX5E8HErIJJERxhOCxJBT9orK/O76H5f342fjuYYhl7IOh19lx0EWocgtm6z2n8PO5kgHlqkh6uI114hW1pjZ2deYQSaNK2sPFrpMUtVzNr9TyY6IqDGksR0qTkTjuqXqYwf8Oz0W7jphNDIMWmcg9GYymqelaUMcvMQ+9Zoie2eR8+ATObxzAg3HbEyeIcL/vs4nuQ993O8mX2KaebihkOuX3dZ2ihz+/YxuYkmVMY7RA+mbIg9undL7JqXcJYqFKoFdnf1GEiZ8cx5nyvlIe8+LNNvWxAJLuX7XC/t8ZXWM4DClTGWzKgUIui0cZMx47REkmiyOBsTw9BEYSYMPh3BUdIkM0ySJMOzAmq0SHAYCciESV/WuSbu45dy9LoZxdt3OJftE5ZuME1t3DQkBNrWKov+fTrOGmGW4mcJynNxnYzAy7FTfoErv/wsR84GQ1EhSSXn6npx3t6Gb31Ln0Mvvjg//4xehytegOtmFLM73HUvkQA70zoTx+ZCvs04cckVC+w89Dl6fcSVl8snvy+f180Ch0PYbUVYKtIpuUwhDMGQMhe4D8CibLHgDxhXr9NfhSTOOAh98rGiGu8zMQoERpHAzKMQGGGCSk2kmyM3OaS7t043l+fIXqZ27Tcwz41YK4/JVeyPFT2ZVUCJMKSckxTXfLI7O6hhxGIupJFlqDhgMvLplC4hrl4mCCTb28fvP9Gj/XBLvWXOpejGlMQBovgE5vVjbB56RnDOcIYfAXynCog41qRmPH70faYJy0sZOV8TI7/rcXuvRqZOVXocT8RBoBe0mZhWCKilhxTe+e+osRYn4pYgCBndO+Jw9z3kyy6yrnv8LC/Pq2dmOE1shkP9+YuLelGPY6it5zm8uwj9fVYaE0S5QTE1GISrCCmRozHn3UMsM4LcGkIIoszgW51LuqQ4ynCNPsKIKFkRS6P3uDD9NmmpyhviBd5VzxFJm4IZcYUtFjo2d7LX8H2JEU0ote6QhinnqiMCY5H9YQMxGZE7aNKKnqHXy2HJiIXsiPLdu2zdtDhM18gJE6UiLFfQj4tMEgfLyHCMGEukuDYwTjHS6ERgPPvNs2ozx9Ek63QER0pILRuR88iGI5SRYcuEMj1CNhiqEmEsmCxcwFhdId8bkXW6UKzipQl+AkscsJMuEEiPRDoUkgGx6RHGBs5oQOaZCNMilh5NtcTaszXk4VxQ3Wjofbt5U5tv9vvw+SuPnn+LcUSu5eNuKB6KC/SiHGFqsRdUWXN7pMIiTGLMIOL2bf2Z9VOtoEoluH4NDr45pR3ZEIQoz6Ug5qxcJQkiiamuF1h+tcj4YZssOyCLAvqqjGVlNOJ79Mw6SknKSZuh3UAYgnHoYSYxnqsjiYdNCZQZyzL5DIqxPl9nOqgn4XENjywV4cplsr19ssEQJhNEqUTlmVUan3kOY71OFOnrZzyeR0Zn11M+r8/90+LfHwQ+ib26flA4IzhnOMMnHB9WAeHf26O9bxC9OCcZMDf+y+X0e6e/+222HiYEkQGmgazXkFevsHCljm1rYjSbhEG/b3VVixOj//I10vG8k3EnLNBNiqiKQrW7cGeLCy9W8XKP3g2fJjYz5/TFRb2YB4FOdQwGMJ5Ill5YJv3mPvXebTq58wytCsJIWJreo1gTGJdfJX3jLbKtexwtPMvN4AKD0CMNU4pujOG41PwtVuwJi8Nv4+dqvCs+zS11jViZVMwhl2tjKirk1n6NgAgzb1Mc76CimPWlmFjlOJjWyaSFV8roDov0DgMs10CNJ9TlPuuNAW9MrzOZ6NKoC9Ob3CjvcTv8FCqTuEZInFmUnYAiAxLDIcy0hYfjaPISx3oTQh8neLSBnW3DJDYwz60hb3Ux+l0KBZPQKCCIEUnI1Kri37jIetWg14kYxjZ5y+LZ/ENuj1YJx2PKSUbPWqLlnmNjepPALOMYKWHswDSgVg4YeA2GsUeuB0uLGVm3T7+bkpk2xXKRGzckb70Fze0pv/ONLj8ntjEWqogsQ3W6eKMm5x9+GXnhF1koVNmeLpKmkvuTBVblIa6ZEOHgos+vdluXlJ+419eq1OuScvs2bVWk51foOyX6ssJSvEuh20TWalhf+BmElOREyOXCAVE0YndSJ8m61LMWbjqCVIEU2LRRmY1vecSGRRDMo4u1mo4c9vv6POz19DFYWvogMYcnV0CJfA55bh2xf4hYX8P6pZ/HeP7Zk2jQrMIO5lVOzea8t839+8effUrM/52iO99v75pPaq+uHwTOCM4ZzvAJxuN2Caky2A9qhIl1QjLk7S3cz1dZWZUnuf80hd03juj90XsoXyGKZUTRIp+NKLffpD3u0uYzJ8RICD0xz9JXANnRo0aiw9ijG2pFsWfGrCwNkKNdnPFFyGltwWliM1tMZnet06me0H1f629qNf09i4t12saLdN8vojpdctMRq04X2SiAgvS994mnIW9Pr9CbFBk4AsuMcFwDV2Y0goest75ObrdLjwrvNV6mqVYRWcIiPS5UxpSdjPf755kmCkcklO0UxgMWygEZLsPYw09s8laAVIqJKBBGEpGEXFV3ubY25OboCq2wTMH0EZbACKC357O9uIApE0yRIWVCw+0hBiOyxhpTkT+xO5BSR2lmC+5soZ9FEWb9dMZjULUl5PMZ4d0JTr9FTxbBzHBzYK9uMCwt4LogbBvLzCCKGRoFFkWL/TChYQYESY5I2AzMBcpZn7ZcZCW9Q4agbyywviFplYocvNdB7X2davMBoV9mJCuIWg336hVeerHKt780JAsdfs/5Jb7w8L9iDXuoNIFMIdttLtj/g8GznyVvRtwdL5EpOBq5uPUFVhYKKKXPByn1uZHPwwp7pN/4pvYa6/epRy1qpkEzOc9EljhMy8iFn2Lz126QP7ehz1HPRZZLOOUS52/dRg3HCNOgKg4JvSK7uWexABEO8daKyIseUTyvKOp09Hk4s6BoNvXjg8FcW7S+/miE5UMroJ65/h0roITQxGlGnsJQ6+FmhqYzMf+siV+jcYr8HeNp9a75pPbq+vPGGcE5wxk+wXjcrbwf5QkTfevvGgmLixPs8bdwihcRboNOR98pqywj+fY9lB9gL5ZZ9XpMUpdOUGZSff6EGNW+WGVpWT7xLvK0kSiAI2Ma7oCyNUUIUKlN1otRfvAIsZkRmMVFfZc6Gul+KkLoCb7RmHsT2bZ+jMoC5k/XuFDqYMYB2aBP8qdfR40njEprvFn5AhNXMp0qimJKVl8iNz5iKdrj8uoI2bjA7r1V3vcv0o0XyJA07C7r+T5FI+Zm9xKTxMQ2AkoVgS0iClkXaZdApXTDAnnLp2aNeTBZZKBK2EyoZx2KFQEIprFNlFmAzYXCIS/auzzccfGnGdI2MIXCygKMfheRc7GvbEI8vyv2PL24pcfazpnYeEZw0nTepTYMobCyQnLp18lNDzD3BBtVyUGyyGRqnKQkrXoRZ6FE2OxhOzbLxiFxEnPoXWCBDodqhVjY2PGEqVHmyLtAI9pF5PI011/lnH3I/W++y66fcH6xykrDQA4Vg6MWyXCEcf0qL6j3eV9eYToK+P3kC3ze+TI5M0GZJhy1ULt7VOs3Ka5uIKKAbt/gwFjDWGqwty9PUkFJoone6GGH/jffYylqUVpahGqF7OEOtDss+w9gaZHDtc8QP/spDmsNmreO7ThOp1peuIG6eQsVRoich2PbXB6+SWa5tCo/RXjtMtKUeKaORqapJtthqAnmTPBdq+nzdX9fj/tkMu9AvbioSdnTqoByHN0ZG+ZdndvtDzbxm1Xr5QZ7JL/z9HrX/Cj36vpecUZwznCGTzAeJxkF06dsTbGOKyBUKhl2bR7cBnGq9wWjEUvDmxSXTUQuIlOCzkRHXyyZsTIjRsZFhHhyZccH28wnOEYyf0EYkpgeD5sl8PXi0W7rhWHm0TRrOnZ4OO/hYVl60ZmJMeG4oqsIqg1ZlJG+8TbpcMxW5TUOgwr9sIBpppTKIWKsqDXfoeGO2LwoEMLhzmiTu0uL9HqKLIyp02KNI/JHR7xv3mBqhFjZkGI9R67mUSBFWgrimMNsiRWvB0qxPV1kGHt4widFUqPPjYUmu9MlRqlLQU51Yz4BSXWRvcEiliXwsgHjOM+COcZbLmNcu4Ss11G7ekH1PL1FYUY2DcmCDDlOUKqIYegFZuZvJeW8G3EQGqxcXifvQaEMzkMYjedO5zduSO5evQKDt1HtLm23jBRt8kmfXJoxMUsEXpEeKxTiNpHMEZDDtU1QGQ/+x0NW4vscLDzHw6zORXHIciVAOCX6BwHZ1j3MxOeZ+HXupBcJrTyvi5/hFb5BLi/IliQcNsl29zCl5LJ9xN65G1Q3K9ztlHBCTR6KxeNIhsowtm6S+AHNhWc4SgUXy4eYN0qoyRS1u4+8sMml/+3nUMLkwQMdFXzwAISQnHvxVWS7QzYaIy5cIGu3Uf0BDEfgONg3nuX8L7+MsV6n29XkeVaSX6vpc2800ukp0FEd19X7l8vp/yeJfr7b1WRjdRWKxadbASXEsZfXcWbZ9/W++r4mY/t7Gcmf7ZD1GpSXz7HgjDBl9n33rvlR6tX1NPBDIzgPHjzgH//jf8yXv/xlDg8PWV1d5a/+1b/K3/t7fw/78TjdKfzcz/0cf/RHf/TIY3/9r/91/tW/+ld/3rt8hjP8wPE4yXCPCUaQWhz4VWI/QVHAsmyE0NGRahWyh0PCZAiubmYiUKzlOuRMnR9RqSTrxx9ZGvpR4sQoldxvVpCLi2ROkfa+/u7VVb3wznxzej29eCwv60n9/Hm94M3ITS6n72qzvT2i3z8On4/GTPa7vF39BWLhkZgGOTNEIbCNjAWvz/KDr1F7ZhUhahz4Fe4MV+hHOaQV0PAfstK7Sc5NuFX6DFNRwIrH5NMxThTiRX3c1QrJQo3WfoJTjkiRHAZVwsQgzgxUqrhSOOScanEwLNHLdBveSeZx3mvyaftdDls5uixgNOoYXh3HV5y7WsJZLJApeWLKKaVeYK1RB/PWLlHLI8sg7d4hcy8hVlYRonQiPp5pdUxzHtUplfS4xrGOMLiuTq30emAs1PFevUF6e4u402Y9uYuK1jmoPMOCHdKcWoROHs+OsNKIfnETLxzg/tmfkoUZ3foFlrwhR36Ze6NlrpQOWPKGUHPod4aoOMUddbjiJeyIiyjgAedZZZ+KFZNVK4jGAvYvfAG5tsqlhTqTqcTePe6o3J6TtgV7RNrSOSGFQCjtb1axp4hCHnVuDdXvQ7eHsdjg0qVH057b4Srixv/E+t6fIg92kaUSFArIeh3zlRcf0cPUanqbTnVp+swLLJebPz7zHgN9rEwjo2qOicYxB20Xch6+r8vcPU+f3zPt1NOE5+ko1Ww/Wnf6HHW7iEKBUewwinOcLzSxZPZj37vmaeKHRnBu3rxJlmX863/9r7l8+TLvvPMOv/3bv81kMuGf/bN/9pHv/e3f/m3+0T/6Ryf/z50u+j/DGX6McJpkZM46R2GVaaKjOQqFGo2pbBRZe7mIcepqfpwYCcEJuQE+Vmnok8SJsZVje1AjG45J3AK92jMsSt1XZjLRC3SxOP93tao/a21N3xmfFlnOBKePi6h31DpbeRcRC9zuPguLLk25jiUTlt0+m+YOImqj5DrTxGZrtMw4MJDRiEawy8r4Fm7c5Wb+s/iJg8OInJXiLpRx4hHOwzu4l16ltX6NuP2A0vAI36sSJAaduISbTajbQ/JX11kPtrm/NWXPqiGVoswA0esgh+/RSZ8jKyZYgx6xUcEp53AbgADb0ikRKY8XxGGf5Na3sKYZ0r6MEhZmzkF1+wjfRxSeQSn3Ea3O7G+aarIzMwVttfSd/kwou7EBu6qO8dkqWX9Er3sZ7733qYcToumAcZrSs5Yxk4R1c59xeYm2U2e5+x5qMiVo1MnUhLo7Isnm0YDF4pSsnzA0ywRtSSE3YZNtRhQwSGirBcIwZrFqIY6tt5UfQLtDfqHO1avyxKzyzh19Xux1oBhbFMomAihaPjkznJ90T+jJYlm6h04Y6kiOqi6wU/k17EtDNiojjPxHp1pyOf3+JOGk7F+X/uuxS1NNFrsPh6j9fcJxHzed4FkmhbrLKLpGx67gOPPzehZ9+fMo/5YSFtwJRbaQ9RX8zGWYeChOfdmPce+ap4kfGsH54he/yBe/+MWT/1+8eJFbt27xL//lv/yOBCeXy7E8a914hjP8GEMh6V7+Kdo7OdR2gCgCVoqXTWiM7+LUPKwvvIRhPjq5P63S0Jk40f+zb3N3W6KSlMSIaJeusvRcg7WlEr6vJ/18Xqdj4njuOzTzdbp/f95v53Q5+WkRdba6zpuD8wwTE2G0qIsuIgppDTapLI6oqB7ns4cwHZMaBoMox1vty3SnDkwnLMb7rLCLq6bcLP80vllicbxF6FWwhMKcTCnkM3K9hwz3rjMUVVZezYjupNw7LDNOJXlGpHaO+nWPV36hxMM3f4rO9j3Sqc9EFDkXbPPc6I/ZtdbZLVzHsQUl/4DOgWLpSoYQWqUdx5qEgK5GC+/t4UwTnIUS5liQZgLLMxGlItlwglBDspyNacqTyM1svT7dPHFWCTQjOcPhXJiqlESUyyTlMhefLXDrd7bo9ELqtEikQ+iUGeQcil5KkAn6uTVK/XdhPKUtS2zk27h2PP+yMGTJGyMvPUP3sMl4qqi6bTAUWZaRhAkjp0HorLLRfJPo974M0nhECHvt2hqtlt7HnR2YRhYZVRJfUfFiRrHHKPG4WDjEEOojibfjaKIyncLDh5LIq3A3rFCwYO1jEA3T1BHEWfl2tzu3jPCCDpf2vkpvZNB31xgZJUSSIA6PYPAeiy9fg3KdZlOTzvFYR3+E0CTpad9jn75ByeXkozcn8GPdu+Zp4hOlwRkMBtRmpRUfgf/wH/4D//7f/3uWl5f5jd/4Df7BP/gHHxnFCcOQMJzfJQyHw6eyv2c4w58Xej2dk1cKsJcwXnkJsXWbpf77uOFY+x1d3PjQCoinVRoax3BvsoZ6bgWxOuKoCY1FOFcrEkaSNNXh9dld8Ew47Hl64t/ZmVexuK4Ow5++652JqIelc7Qni4xiD8uOKTlDgolk5KzTGD5gMepTC/ZJkwSmPk17gzvtNXrFHIQTluMdNuxDiGJuup9iapRZCR9QyrokScjUaVAaH+L6AUNvgVYTVq9BktS5W6qSEKEmghTJlesGK2v6t2XlOs1aDuhS7h9CEFIwfQ5LFQJzAdOCyBRYY5/l4QGCl4C5aFsIYOqTDKYkhTKOACkUAoUpUqQA5bkw9RFOimHIk345lqUJ46z6bNZ1emb+Oau42tvTxKfb1Y/5PjTlGrVXBfHR15nmFylj0UpzYGdEWYIAfLuEZxcojg7xSwV2JgtcKh4ihXqEBK//hRuws0f3bodeXKAebNM1VzAKLolXRB3scde6xpW6RObcDwhhG+trJ+XQk5rLnf1lsuE+vqjQcIZYMuPeaJmaM6TS2Tsh3h9WHp3L6WaBo5H+7eOxblhZqcwtCz4KQsz1YOMx7O5mZLfvMB4LnEaFK3aLYZRnmLgMrVXUcEz63gHFz1QplST5/HzcLWueTiwUtJbso3rrfFz8JPeueZr4xBCcra0t/sW/+BffMXrzV/7KX2Fzc5PV1VXeeust/vbf/tvcunWL//yf//OHvuef/JN/wj/8h//wae/yGc7wVDGZ6BD64z5RKytQul5H/fRnUO3LH7sC4vspDT2te8gyODqS1Otl1mo61J+kmi/NysHz+XlPj4sX9aJ8+/b8N5w//2j57WzxSu4+4E5/EZbqSAFXivuMYo+j4iZp1GN5ssVG702cnKFtAEYjHlhX2baeoT/1sMIDGukh56wDlpIjvqy+wNTwWA7vUTHHYEr8zKWmOpieyWRs0ZR1FuuQJBkH9wLUJKM7cHHzkqWGxLbhuef0orm/DzLvMQkXWA92uFHv0PF+lmZ4ATdKyRsBfuZgOeAN9lHDK3hL5ZMUSJqCTGOcdEosSyjls+AM8FMXc9Yq3zQwVIpQ2UmH41n0B/S/L17UaZXZWIfh/DW+r8e3250/NpnAlXWLTs4mMR0WHJ9c2qEZljHElJo9ojdx6OQ3cd0upe4DRvlV7qoGl60Hj5BgaZqs/dqLqP/yNQY9Rd+7wXJhwlFSh4c7KCRyfY17WY5L4hD5BCGsbUuuXYPtbYn7msfdP8kTD0KOcjlqno+TTmm3Y7q5Z7jy8rNk+wffsTy6WNREp9fTFWUzp+3HXbg/CoUCXK11GI/eZq98kRTBMMpzsXiIQNEMKnSFRbsP0Y6PyOdPdFIzr65eTx+bUkmTLik1eZqlZ78X/CT3rnmaeOoE5+/8nb/DP/2n//QjX/P+++9z/fr1k//v7e3xxS9+kb/8l/8yv/3bv/2R7/1rf+2vnfz7xo0brKys8Iu/+IvcvXuXS5cuPfE9f/fv/l3+5t/8myf/Hw6HbMzq9c5whh8iokjfCZ4KMD5SYXE62vG9VEB8t6Whp4nNLJRfreo74yybl/qCJjGuO09Had+p+ftBT/SPB2VnvT3GO132hmXSbg8jiiitePTUMs2gTDHnU7AU5959GxlPIMujhgNuFV7lsHiFgdnA8zs0Jjucm75HzZvybuEzYDss+9s0kkMSI4cSgnLSAlUkwWJgFPAYI0d9/Fv36B4V6acFcmJIGruUlkw+/ekSR0d6LLpd/bvrhRjRldRXbLbGZaZTF0smlCyfUZCj4owxo5AkinAcveg5jl74ckUDw4oZJ5BaEkukZDLGFBkIIEkxhEKYOvKTz+v3z8bX93UKRMp5J+gwnHtk9fs6ouC6+vvKZR1NO4jrLK2bqPv7dM1N4sykak6IlcEw8SiGTfyFVVqrzyJ77+N0DwjGJg+tIpsXyo+QYGN9jfX/+TPIP7pN78DnaOCyzBb70sNYW8TKO6QKticNLhSOniiEnRHdUb2OENB/d4fdtoPyAyzDY3E5h7xymbu7UHrjD6kFex+rPLpa1VurxUmbhHZbE5CZi/lHQfkBZuxzYbELQjKIc0iUvrnw+izbKcNgSLt6gZGdp9PR7ysUdMSsUNDXanhcMTZzj2829fFZX9fH5rvFT2rvmqeJp05w/tbf+lv81m/91ke+5uLFiyf/3t/f5+d//uf57Gc/y7/5N//mu/6+z3zmM4COAH0YwXEcB+cH3R/7DGf4EKSpri553CuqVJo7Wj9NfBxi9Dix6XT04rC4eOwKLvTEPauGMYy57mB2t7q7qyMHoKMQFy58UIQ5ExTv9AsE+fOwZLIQ7tIZpOynDoOay2JpQN0d00j2SVCItVWUELyfXKOZv8hYlCkyYtHpsBE/oCR6vF/8Av3GNSrCZ3F0SGC5iCThcvguA1EhSFZoJw1Cu0hDtuDNt7iXXSOWNtIyiTO4EG1R3g4pjC5yd7p20hvF96GxILiev0N/lCNWUpMTDPpxAYuEZXlEaOQxbRvfn48DQG4hj1go0D8IiV1JhsQUKYZIQSm9wOarSMsgSeaL4WCg03xBoLU2MK/Imh2PWUpqf18bY965M3/teCy5/LnnOGq+hzM8olS0GKgKk8jFiKZEbglW13QfnfXXMLMRVhSRWjaTy0Vq9UdJsLG+xvr/toJ5q0+vndIajVl/53c5KF8gVRLPDKhYp0JPHyKELRbh2k/VuVOvUuqNuHt/FTyDo6KrI4R/9nW6I5v+wsucd4+wPmZ5dKOhIzcz09eDA72trz/awPJxnNa7iJyu6HoEYUjRiVm4BllN35AMBnNLBik1wZl5vUmpj8HRkSars87V5bK+vr+boMtPYu+ap4mnTnAajQaNxse7y9zb2+Pnf/7nefnll/m3//bfIr+Hg/bGG28AsDKLF57hDJ9AKKXvKrvdR1NQjqMrjD6iM8KfK04TG4BOJ6PIiJqKYGJj1oskqTzRfmTZPNVUKMDqqqLZjvnWm2BIgWubnD8vnnjHqrKM6Z99m/vdBmKhhkCw6nXZW3qWbpYQhSnLg/fZWPTIZyOy3X2EYSLXVujs+gy9JRLhUKHHAm02xT08+ry/8Hm6aYmcEXHO7TBwKggUF0ffhihCuhkDUaGfW2UlNyTtJNw2L2AWbDpBCVskLOVGmFaBZ8Kvc+8PDcIXVhgMtA6nXgeR92icK3DrtqSZL5MzQwSKVlBCZjG5/i5JYw1RLBBFj0XkpMS4egWvd4ekOyLNFbBtMIMxajBEOB5mo4rwJXE8t3Xw/fliOBxqYlMu62PlunONzqyJnpR6IVVqrslpW2uc/zXJ9le22WuauKpPQyqOCucxz9VYvFii1YIgkHirZRoNTW6PWpAvfNA3SUjJyjM1ZBO6D2z25TnOyV121Tn8xMUzYk64xEcIYQ0Drl+XNJtlzOr82ti9M8E6immUPTIED8ZLrOfbeEb8scqjZynd5eVjUfNUE28hdITxiS7f34XexZTzcu7TqbGZ1Um5rL8jn9c3LGmqn4tjfU4MBqfSzqUP7svsOnmc0MizUvDvCT80Dc7e3h4/93M/x+bmJv/sn/0zWqfMcGYVUnt7e/ziL/4i/+7f/Ttee+017t69y3/8j/+RX/3VX6Ver/PWW2/xN/7G3+Dzn/88L7zwwg/rp5zhJwzfjTfMYKDvKE+TGik1qZmldn4YeJzY9PvghV3K926TdboY6ZTQyJHVa4grV6BaxzDmfVouXYL+OODLXw2ZBBGZgmIpYbVkEqZFXD64qB3d6dN8IBFFj5IV4Boxe9M6h1QoLPaot7fZ7H4Ta3cZVSwgL2yCY9FMF9kTBol02OQ+yxziMcVOp7zvvEivfBG3t8fm4C2G1gVwHS50voGwLOKFFVpLr9K2zrFcGCObU9qijvI8hnEeR8ZIoShZAc+U91BJnfCoz8F9H8gTRTpC9cILknT0Kjx8l2EfXA+KjMlGgrK/Ry64z8DIUXjjjxhufgqYiz/DEAr1Ou6nJOHWDulghOEPkbkBsnYDVtaxjQIi0MdlNsZhOCc4vq8X7STR0aFSSS/eo9E8pdVu60V8e3ve26Xfh+XrK5jJEivtMSqOaI89Co6HEpLRSBO4Xk9HO6TUvV7293Xl29WrT444LC2Byoq06zUeNlPOnztke7pEZ2gTY7DoDVHdHsbFCx8phD3tA1Uuw9abCjd22E3qXHCbhKmDIbL5Gz5mefSM0KSpHo8o0n9Ptyc4ee33qHeZpcaiCB4+1Mel3dbXvG1rkmma+vyZHcO9PU1OZ1Vyj9/gPC1rhjNo/NAIzpe+9CW2trbY2tpifX39kefU8awbxzG3bt1ieny12rbN7/3e7/HP//k/ZzKZsLGxwV/6S3+Jv//3//4PfP/P8JOJjzMBzSay5FTT31nlxvcjPHwaeJzYDIf6jrOUdkjf+BZO0CMoNIjsEiKKyY5aiOFIV3Et1Nnc1BP0ra2Q29sTkiTDcyVLazFKKbojn0kYcXW9TjmvSU6awtYWpEcpJCkblQGelbI9rnMYVGi4A0pWxOqiRD1YxvqFn8O4eB5qVe78x69x+CBkaiqWsz0uGg8wSVFK8X58hU5hHS8HF0SPQfUyjCdcMHcwhCIzLHZXf4oj+xwLoofTO2JiFGi6m6TY5M2AJJMsuUMKls+iO+TOYB0/CRgNFXhz/VCjAXf6q0yf8SjcaRH1JrQmIJmy7PaYnnsJIV3M3XvErYzc9VeIvcqJuSaAt1wlcMqI8RTH88lffwXjZh1SiRHMj88s/ZSm8xTfdKqJwNGRTovMUpmziqqZJmcWPA+CeRSn1YJLlyR3KXF4CI3NufZrlnY0Tb3I9vvz39tq6ZTXtWtPPpeWVyTq5Qu0/2DEgzsh56Ivcz8+RzdJCZM+a/k+8ud/9jumU2bl3/fvw/UrKTu7gjAU3GOZvJyw6kQgwTKN77o82jC0QDtJ9HmfZfqvlPrxmZ7s+9G72LZOD840a52OJoyHh/r5Wk3/xplmZ2bCOZno+WAW7StHTSp/+ruo8RhR06amTKYk798ma7Wxf+2LZyTnu8QPjeD81m/91nfU6pw/f/6E7ABsbGx8oIvxGc7wg8KHuXqn97eJWn3aL/8qYeHRUHK1qheLH3bK/HFiMx7rSbdUAlSGe/9thkFAUF9DIBAiI3MchGOj2l0Wdr5F46d/kclUcvOmYucoIEkyNs5lOG6GVssalPKS4SRirzWilHMYjQT7+4ogShAGXHAeYkVlmukyQWaz4vVY9vqULJ9sEoJl6u9UcGdLclh9nrB5j+XxW1zy30aWi6gs42Z8hY6zilN0uBy/R2/5WczPvMKFchcruUE26HPr9QnDVkR+sIcpfZL6AnflC1itA6JUYMmUshUiBNyoPGSSOKgo4ShbIBMmYaCjGzduzMuz23EV52qJ0s3XeZgVka5LvjEhzXTjunHuIpPDFHtnC/HcS5jmvGTcMCDLJMorYC4VMJZA3n60502S6AVzRl5sWy/Cp6uqZovyLE01003FsX7/4uKxv9cxZsRn5pwdBDrys7ysn7MsvQg3m1pXFcd6AZ6Jlnd25h5Kj2PlhSWyzhrtLx1xf7rExfRdHuSew8+vsG2vcvH1N5BLS09cmLMsoz3wCeMExzLZ3HRp5hz8esz04B7b6RVGmByOr3DZ3dLN7/od3CuXvuvyaNPU0ago0kQqyzTptm0tfH4anlOny89n3ZN9nxNRsudpkuo4cxPOJNFEyJAZ4dYDWr0GIn+OpYfvkBs2IUtBGqSdDrHtIP9ff+VMf/Nd4BNTJn6GM3yS8birtxCCTAlacplB+YI2r3z7HuZP1ckXJCsr84Xoh4nHic3MTHAmuiwWYbQ3Yng4RRQKOEaKLSPGcQ4BlGyfxeU+ydGUW998DUplgihBWT6bK5m+qz4FIQQ516Q/CXjvVkIYKA57Y8zcBDsX47lFeg897CUby+Sk/Xw2GJK+fwsMA//3/wf3ucShdxH73ArnPneO8zffIX1DkbXa3Cq+Rju/iZs3uJa+S8dex7h6mQsXDVxXE8zd7Q3i823i4Q71uI1KYbtXws71GRgl7HgKtotnxlwoHGHLlAfjBsNBiO81MHM2xVMi6oMDPZZSQjgIMaYJwnWp5UIWnSEHfg1TpgSZg+96qO4QOZmQWygi5VyobRiP6nMAUBki8FGhQTrJsMouhiFJkrm1wHCoSakQ8344s6Z/vq8Xz047Y++9AevlMc1Bia4qUqlK+n29yF64oEv3w1CTm36fk547k4kmP82mXpzX1vRrms25K/wslfTormcs9O4QVxTD1UtsZxe4nD/kvjpHCtzudLn2BFHwbmvAze024yBGoU9OU0pynoW7aRIexKyN3qDpXsWwDO5PLtLt7HMpP6By43mc73GRt20dLfJ9nVaKIj0mM8uQp+XVNOuenGX63BkMdDTt4EA/PyOchYKeJ9RgxKA9ZShWKRzskMYVbLfMOWNXs6DRiOTb38Z8+dOYLzz/fe/fTwo+AVPwGc7wycfjrt4PJwuEqTalEYBZ9lgevkO5fvETIQh8nNjMWvvPdD/Fon5sNIIsjCBNuVDtYpnQj/KkKmLV6yKF4nCyxMAPMcMIU8LGZspkJ8IwnqyMzlKD5p5FkPM5GkxwywPSTOAPJe3Sq9Tat1H7e2ys+BhZgazVIb2pm+ak159j27nO4biEM2jSeLDN1RefR/7U/07y9ou8+z86dAYOrox5xr5Lu/wsxtXLbH66fiJs7na1Y3Xr9R2Wk13ydY8jtcB0VCboZ9hyiiSj4h/imbDhNmkNHLJ2l5G5RlatEYeSShWefVZ/5mCgoyKOA+k0ph1XwTNYcvrYx/5gi+6A/WmN1LSZ+A7WJKV6nH2PIr24WpaOisyghgPS7QPUcIwKV4gHA2Q/xrYvE6g8vq+P2XCot3p97p+Uz2uC4vvgJkOi/X0G/j0aYgtbruNXVnE/uwms0Grp9xa13yqjkT4fZmJc0OStUNB/m0392Pnz2h7h8FCTqMdFx7PrYnmxiCEzBlGenrXMFfeQe+MlkkKB2/d9nm11MJb0dXFnt8M7D5rESYaUAgGkCqIkJUoSvHIF56UIcavF4nCLNPboGmv0S+f5av1FrsUuLyn1iBj4u4XnaQKiG/3p8bx1S+uAZr5pTwMzvd3amj5+BwfzTtSgj0exCJ4IiLMhXnBEGgf0vRUcERGrI2wVk7ku9PrE3/jWI35bZ/honBGcM5zhY+BxV+9MCRCw7PYoWgEqTckOJj90b5jHic1MqDqrHsnn9XOzEnUtuhTwxi4iKoLpUbEnVOwJ08Rmb1pHhSEYCRvnBMVNGPsSKQVpmiEfi+D0OyajEaRZTD/s4VV8LFMyGhiMhyaZbcOlZUr7b9I/6JG02uQGPaTjEF67wZ68yGFQoZSb0qilrHRukXw9xPrNX+e2+TydZzLyyZTnNwYcjl/ALBVZWZUnxM334aiZsf9Wi8V4l+qqTS8qsj1YxjBShGdihQMcTyK9Es8mf0q6P6KrLjOqbTBwL+GWcydkaeaKrpT+bMeBxqKgc8tEZCm2mWDLlPOFJpnSi46T+oSGSSbnrowzk8zTRo3p7h7pzT1UoLBzFigbZdoYnV3yhmSYu8p47JxE2yYTvT/ttl6Yczn9eUY0wdi+SRalhKUSYW2TlWjMVrPF7u+OKP+swcRbpN/XAuJbt/T7HEcvuOXy3MW6VNKPVav6sQcPdNXQ9rZO7Vy79lgn6lPXxaIc4hoxOUOHqC4WmjxUVfxxyq3bcL0Bw4nPe9st4kRhm7o6L0mzk/M1SRVjPyKq1slerSI7I4JBHsN16EXLOC6885bCSBI+dcP6volIoaCbBfb78/LyweC7axb4cVEq6W3miTWd6sjaaARGkKeUGZjjMUUnwRB9iBO2p1Uujbb1wUlT0nfeIXnnXawXbjzdnfsxxRnBOcMZPgYeN688X2g9+oIfsjfM48QmiuZVGqAXw9MiUpj3B1FZjehUmWyG5N54GZQ29CxMDli9XMDe0IrbvGtRzrl0Rz6lvEQIQZpCc89GAUEYU6xMCdIIy5C0DhziRO9brjIhMfNsLf80hcmQRX/E+jvfRq1cpGdscOhXqDojVtwBDXeIokq6u8edr49oTss4ruSFFwrs7xcQZR3qnzVzm1XMNB9MqfsPKVYtBnGRrfEKOSNgnLpYUiFcBzvus/a5S1Qv/TrbDzLMzCX2i4iWJAjmXXJBL/izRnppCoOsgJnrUZ4esOT0ALBkxmFQJM4Eeb8N1Q2SnHdimjnrbWRZxwRB6ZQnYQ1RrmPJEDFNtQanYJEf9lHjMYO+xblz8uSYzj5ndlxXVzLGb+0RhAKrXKKHYD8QXC4myEaNrNWlsvUNJs//RQ4PJZXKXEDc7er0W5pq8jSZ6P1bWdG/eRaxabXm77l9+1HR8ePXRel0Hxxgw9hj3/YIbZvbtxX9tEMUp5jHabvoFLmZQSn0awxJVC2gyjDs2QirR5x4pJHN3h5Mx/DSS99bE73HMdPEzJoEzrbTvmlPCzNPLNAEp9WC8ShHy90gUyZlIyIfD8n3d/DiARjHrqs5DzX1Sb70B8ha7Uxw/DFwFuc6wxk+Bma9MrJO9xHhO8x7Zci1tR+4N0wcw82bGVvfGpAetYg6Q+I4OxGnOo5enKbTOblZWtKL9ywyMCuTlcUCze2Au+0qKk0hCjnf/zYrleCRMlkhBGuNIrYl6Q4DjtoJOw9NkjQjySKWNwJMW4GSNPedkyqiQn2EYR6X/ArBuFBm4uZoqTr3/DoKxbOlh1yQ29TDHdRkQmbZ3PLPcXCgf8uNG3MdQ6Wi0y4z3LmjiUjeDLHSEN8sME4cqtaQaWRCHGNmPgU7xFYB11bGZLUGYWmJTlym1ZJ43qMO6HE8r46Z6V/abUlabbCYn+Ad3kdNp6g0ZTQ2mHQCrJxJ4Yq2J5hV0s0yCqZ5LAoejsj29hH5HCoMMVoHqMAnG40Quzt4fhtCn+kgPulv4/tzLc8s1cRwiBwNGVoLKCQgUEqHNc7l2ohigYcPFW57h6zVon+vQ62qj0G5rPer39cLeRzrf0eR1vdYlo5qTKeaHM+sOfb25mP+ca6LtU2b2maRIEp4uG1qqw+lCJMPkpsZMgVRkpEd/95CbYyTD1CGj1WcEIQK34evflWT2qeFhQV9bcxIzeEh3Lz5waacTwv1uv6+Gy9INp8vsUiTcWCyFy2wbV3j0LvItnmF2PKgWEAUCmRBSPL111GZPo4qy8iOWqTbO2RHrZPHz3AWwTnDGT4WfhjeMB/Vb2cWsUnbHbLbd4g6Q1SSYpkKUa8hn7nM8rML7O/P8/3VqiY3T0JUX+P+c79JdvsOqtNl2d+n6MTIi+c+tEzWkJKDXZsoyUD5VOohi3WbRiXP1vaEfssmSTNsL8LJP66s1Tgcl1GiRjLx2cjuUBq0kdMpaZaipMFW7tM07Rq5nMGNG/OeQvn8o8aKW1t6TKIIqiWTvD2hG5YYBhLGfdJY4KYTbCKUCc/kbiNz17hzd97AEI6tFXLzKMXOztyeIk01sZASzEIO5+qnMQ7HJ2XFWVYgKF/EubqEvVAll8yJzQdOizhCRTGKFHoDSsEuMncNw5QIaWIGQ8hCwmFAljnkcppoBIFefHs9CPwMu9lDRCGGm+IaAZbI8Ew91o6RoJKEuNmh/pX/xFBe5aFpcO1izMrzr3HACr2ePidGI01ilNK/N5/XmqNGY+5mfuWKJpGj0dxY9eNeF0vLEj+JiR+kTCcF8pUJ0vz4C7EQYDgxrpUQD8t4pTGWWSEIJFtbWjP06U8/vYaZy8t6XGZmnnt7H90s8PuF48C1X9ok2JUcvv0+fd9mYtZo2usIxySwFjHjgEbRp7ZaOGl2mEXRWd+cj8AZwTnDGT4mfpDeMB/Wb0e99Crb4ao2wex0iL75BqkfYRddsHOIOGap/RbNr4/YTV9C1uvk8zod9SS9wqwvSJKArNcp/3KVFfujy2QHk4C3t3q0Di3KOUmcZtilPmGS0B4mHB4YjCeSJE3JlacYVvrE3zjuFlAmlPMFrrX+BKfpE6YpTimPcHLcSS9zOC3jqBHPnRvSbhdQSkcWTpctHx1pYnN0pKMuK8tFdt47z+HDlFJwlwM2EJbAkCZmFlIbbVNKdhj3o5PozNHRvKIF9HgppT+33dapodlrpdQRjrXn69g/9RvaNHQcYLVK2EGR0JBkx2muWYTidEWdaYKYCXLaPVSiHbJzIkCiwDBQjoVQimQSEscZuZxkMtHEol6HzlaH4TsHVLp3WenuEbvPEAWKuFjGt491YsMRCzu3aSYLtPPncYtFQl8xvLtHsf07qOd/k2qtftIFebagDwbzkvKZU3mSaBI5IzmzFJZtf/zrwvZCDHcCocOkn//Ic+MD5+nxOEqp8Kp9Bn4MAmrVAqO+zWgEX/mKNkn9OI7iHwdC6PMgy3SUKAznzQIfN499Kt8nJfZf+AWWev+JhfffZ5pf4dC5SCxMiFOEZdGtbdCNHSz/iM27DxFvv/nEthVP8uz6ScQZwTnDGb4L/CC8YZ7UbyeeJty/IxE772K84kClSnzrLtKfYizoyq41r82+X+fQfg7V7mJs3ebya5/BtJ68bzOdAehJ+9IlME0JfHgVmFKKN9/16fRMPNfEy4cEakySKhzTpHOYwzQgTlJytdETSZVSMOkWdbpFgrrkYO/42NMxUaFElir2kss0swaOk/KMfZf+6x7xK8sIKTltOTcez8tv19Z0xOHgUNJfvEZt60u0ohJhLoeLj6N8jCTkGece5PM8/OouvLLCLFMfhnrRunJFf/Ys8jUrD280tAbl/9/en4fHdZd3//jrc5bZF0mjfbEkW7azOYmTYJNQKJAUQ2golB8tLRD2XqUsD/vyPGzhKQRKaQt5aHloKZSLAoXnCxQokKYpe0I24myObdmWLGuXZqTZ13M+vz+Ojka7ZUeyZOnzuq65RhqdM/OZOaM599z3+37fUjo/O1b7TltxSgNRgpDplHkqlcWDHufpcSJhtNoonM6CvwZd2BhUEEiklFhliebTsctl7Ok03lrnzlIpaKgMYT14BE/OqZnVFIcZyleYLMaoKSexTQs7KLEGhwinR5lo3UUm2Myu4Bin7GbGvJcRTD5E29B9DNc+n2LREWlPTs4ETzNGda6nSzbrvM4NDU4GbccOp8X61Kmq6Phs/xdSSgbGpsEs4Y9Y5FMBcskA/kgew1PhXLAlaL4stmkzkZK0NoSZmnRSN0884bwXrrxy7Wa6aZrTYl+pOEJry3Kuq/8za/M44LyOnt+7kcLoGMFyml35R7A1AxEJY7Q2k/DZJFIVSpqfE4czyFQDzW1NRD0zzQ2rmNm1nVABjkJxjqyVV8ZSLPTbqUiD/mwjSBAxiTU5heg9gXbJHkRiEi0SoiM4iUezOJl2vroKAZ2NGczUINpUDyxYa6HgdMe4tLYuPxdnLqUSHDlaYSpZwevRaG4tkcjkqRRthPSQTvjQNZuKKBKoyy55H9IWZBKO+Ec3nQxPJWVS9AdB6BjlEqfoYULUY3phd1OSireD7FgaTzrNnqdVo4ZyGQbP2MTP5Gjy5DFzJol4hFJJw/AZJLwdJPUwgUoSr5UFXWevvx+ztYupgh97eJjp3knGK/VEo9psGWnHDuc6kagOU3THVHi9zuuw8KSWSDjr8fudQMkdlTAX19DPMJxAUt+7F/HIAFQshF3Eb2Qw7BIUC1ieOnSviV2xKY1MEDIlyAhWBSr3P4gsaATq69CEIBXbjRjT0WyLQGGS0qRGQlaIjo2hhULUNnpI4fg2GZpFxdYp1jThGz2N6EoTDEcpFJwSppRVF+VSqSrgdad1A0QjNnVimslxi2MpD3uvCzv/Eyv8X2TzJSaTzotieCwCNVly00HyKT++UAHTVz77G3Du+wgoWAUi0WmmMwbt7Sa5nGBqyjkWv/gFXHXV4kn2TwXDcIJf1yxQSierZZpORmetAir9isswnn4A62gvWkMdhmlCIIAQgphMUVsYplTfyplJiQiFGC94GS9AT3gEIVjVzK7tggpwFIpNxEK/nUQhNNPNBLqQEA5iT8bRpqZp4wzBujqEpmFLCJkFaswsfqOEtHTs6fK8tnUpnQ/mUsn5PRRysh6rabV1LegtW2J6KzQ22eRKNvlimWLWT6VoYlk2RjCLEEvrbeyKRnba6eeeq8sxyiVsXWeyrYOEtQNZsZBGmMbgGaakgV0Jo1spehpTCBGdfS6998fJHunHMz2FbeUJ+JJMRnsYr72MiL9MToTQImHaPFNo0gTNoMHnwRoeYSzbRqlYIG/1YodKlOwogYbwbHbInQbu6k9Ms+oN09i4WMtk284+NTWOfqVQqA6+dK/nlqsMA/SuHYiaFKIk0HMFTDuPnywiFEDzhfFkp7EqXrKPnaKu/0Esz9Xk80HsoWEiNZ1khKBk64SCJo0NNtZ0GVkxmM4YeDSbqD+Atnc3jVGLoJXA1Gw6ApP0ZZoYslvZWR6jO5biVCk6G8AlEk4XlXu8W1ud5zQ05JR+ciMJ+u89Tnf6MJP5Jkq6l4HeAO2/u3vFckgqX6Jk2Qhm3suGTbA2Q3YqRCHjQ9oCT6B09jfiAvLFMo21SUJRP+DB53OOk23Dww87WacrrlhbJ3HXLLBQcEpW5bJTtvP7neD4qbauC03DPPg0iCew0xlErBZsGzlH1xS8rJtdv/w1oq6FiXItRctE4nhyAaue2bXV2b65K4ViE+L6ijDjtxMy8phaGQGOz4pp0MIQe5pTBLy2054LaAJa/FP4jZmTxIK29UTC8T8plaqp9eV0OXOxbWc/124+1lhE82UYm8oymcwxPuwlkxYUyxXMcBKpLX2SqpT02eDGFyrMEx1XTA+WbjBeascyPegBg321x/B7BIlSmHwuz07/EFqg2g989N44xQcOU5xI4Qvo7OioMGm2MTwsiPU9QGa6TEH4qNWS+AM63qCXHu9p5ImTDKciCE2j7I8Q15sIZ0cwz5zETqVm23cHZwxk3aCkttZ5DQsFp9NmbvnJ3aZQcLI3pukEOe5rOzvvSHdeTzfAEfUxtIZ68Hox9jhDKfWWZkRtHSI5TSAXRxiCbE0bZsiHmZwg89gpitM5asPO6xcy83QE4/Q0JjHqahivv5xKpA7tskuhsxPh8ThdSDPCY0Oz0TSJLFUo6UGMkG/WTyefdzIebuZGSuc5G4YT9EwNTFM+/ASV0Qn6jL3s7Kwg/H6SZ9IkfvBLrIEzK3bziHlnYNB0SbAuA0Ax56WQOTdRixCCUsUiV6wQCtv09DiBTEtLVf80MQH33LM+XVA+nxPouHqwfN75XxkeZtnusNWit7dh3nwIvbsTmc5gj4wi0xn07i7n9q4dCI+JKBZp8iXZEZxEm/u/vMG2FZsFlcFRKDYRrq+IzBcZ01pIl6stG83+KULlBNJbQbQ0o83xrpnr6uq25+rdXZTCMU4fq37gtrQsODmv0KmVSjkf1uCcAGNNBY4PJrEllMuC3HQIZIWKXSZQm6WyzId6KW9SzDoftP5oDmOBsDQfjnIyvB9/ahrpEbT4J6gQJlGuQ9dsWpOPIS/vmW3BHxy0qRztZTIboLnJpj2QYDDXRFb3Ut9QYWwizPRQhUgUunLHKNFK0ChinB6gUrTIRerJFwTJYDOGz0QaEcgk2ZF+DGlfj2Vrsyd3IZyylG073VWuF83cwDCVcq6DQac0JUTVd6hQcAIEV8djWXMCHE1D72xHS/ehJ6cRuo6le5BjE8icTSgQYMrrI1PRsH0BgnVepsbLpDKChmIGaKJke9CExKtbCAQSnVp/EbulnanyXmIjjy96f3T4x+gbNxlsvoLL62N0COfEPFc0W1PjlOcKBeeiazbGYB9GIUM83E2TJ8VEqYZd9VOc9DYxMlTE/Mq/4akJQLmyqJsn7PfgMQ1K1vwAWNMkoViaTDxMueBB2hr+yHwvneWw5gRQhq5hGE7L9eBg9XgkEk7Qef/9TkC/Z89MRm2F9/25Egw6j5tMOvof13W6rq46Sfx8WEnXJG17Vf//F9q2YrOhAhyFYhMh6mNM1l3C1JkMot6HABr9SaJmzvngGnU+uLTGhhXbc0UoxFDH9ZROOx/aS6XPl+vU0p92HQPlttlSVmsrhMOSJ0+nKZVt/CLM9LRNuWJj+POY/uX1E4WMl3LBEYAGazNo+uIoKJOIUG6CQCFFx/QjYLYxbtchKiW6ckew/F6sq69CaBrJpDM7a2TCoKUmTa2nQtE2saSgYutYUifnCaLlM3Re6qd0vAY5maAlMkIlmaLfdznkC2CESOr11FpTCAT4vexIPoSc3MNQfr5mwe+3GTqRx87b1NdCfSzI3OR3IuFcB4NOeUTTqh4zUL0WYv4ATQCtphZtj45RGIFTRaxMFjuTxRuKIsImlKFUEWTLXvxGhUQoRDoXJTY8Ck07QVYPaMjMU05mEU01JO0IRudVNGT6F70/9PgUWuBStN09lCsaHk918nil4rxXpqedMlwy6TynHdEUgeQIo742Wrwp0hUnYG3wpmi1BjiTK9FXjLA3hjOqZEE3T6itlebaEKdGEizsmxLC8UjKJsJUSga56SCBmqU1XHOxJWhCEPZ7CPqqFtHt7U6gOTDglKimppzfBwedn69oGMZ85IE1b62ORp2La96XSDiXpqaqt9K5spyuaSNsKy5GtvezVyg2EZOTcOy4RrrrSoTfR93UCXqMk0S0NDKXcz7I5nxwLZfGTrddSt8Vf0Ap3IAQsHOnY7e/MLgp/+hOrL7TiHAYra0FEQ6TOzXCk985QmEkjhCOqDISgWyhzHS2QDoeJJs20QT4IukVg5vcdGA2uAnVpZcObuJON1UpGiK3r5F4rI3RTA1GKkVr+RRWayNT1z8Do6OdYtH5hjydsGiwR9G8Bg2+FFEzR7biI2QUKNkaRTxESGGEA+jXXcOOnToylaaQs5HSJuupo6z7MXNJRGISGY/TkXwcO5lC5gvk8072olwGb2ka76P3M31kiPSJEcKP/Yrgz3+ANVh1uysWnSyHpjmZG3fyt7VEB7RlOcdhrkhZi0Tw/v4L8Fx5KWJnD1o4hK7ZmIlRPLk42vQkhTNjkM8hdAMrGEXzefElBpHFIsWyQOZy7Ew9jOHVGQ3voVzR0GIxtENLlzl23XI5WixGX5+zBjfTYBhOtioSmS+ajU/Y+OwMzcE08VKYgF4k6skAEt9oH9HyBML0cNLudk7KAT9aeyt2OkPl/odASna11eLzzJlVMQchIDjTdWdVNDLx8JLbLcQ0NPa0xxbNpXK9jIRwgovGRqeklB5J8evvjNH/RNbRR9VEIRTC6jvt/D/MOa7ni2ve5wY1Y2OOWaCb6VsrzlrG2uYt4qAyOArFhpNIOD4sLg17YtR1Xkbl/txZ/XbmprGLqSKnE870Pk1oy35zXGoyOsCo1kyqxo+cTBA5/Sht1//u7DfAfMFmbMiHoUuy+RKBujTlilMiKC84kc9rA59z4lqIG9xouk2wNktGNjDa2UXELtHTWCQfuYSEJ0BdJIjfY9Lb6wQSwbABpsZO/TTgd2ZXmXnG80HGcmEidoK92lHyxm68DbVEr38+1rFdnPjOE2iaji+dYdxqpNs4wbjRhpA2OxIPIYVgcjAPMacUZadS0Pck+WKSoDdMUfgxfD7s/l7K8fk+I7mccwINBqsC5VJVDgVUMzhQDXBmXxehoYdDlFIZZDaLRGD6feiaji092Jks/kISGa3F9gUxfu851B89w8BAnniuSLMvQ21PG5rYRUWGZr1rJow2Wl+8dJlDzJQuy2WnBNnaWi1Jalp1uGc8DpmsF58WwaiUCZt5KrZOshSiwRpDptI06BoZLQaaTqrsjGxY2M1jhCJ4PTrFcoWKvTjYdTM5mUTI6baLh5d974Aj59ndVkdNeGnnPSGcIMfNqMTqbHjwCOlElpM0MZn2cKn1GN6IF9Hagp1Kr2lrdVOTE1gNDzsaoOFh57JjhxOAuTyVctmFsK24mFEBjkKxQbg1e5eaGudD0flAP4cPLqExWGggVwEiTqa6q2t5AfHCTi0p4US6xbkrZlrM44PIySsQjQ1MTcHQGd0ROus5wg0FdKFRsQRCaPNSFXODG02rikgXMhvcaJJInXNCT8VDSAFag065pYZcReL16LQ1hDlxQrjzBvHFQrR3WcjTCbKeLrJlH/G0QTwFZnGCjvQDJIMRjCeP0dnWgdDaSDXsgroRsifHKOMjFCiSEXVoCFoZAU1Hahrjj4zApTXk0wHkyDCBYoLhQA+W1Gn0pKgLl9BiVZ+RQo3jo6PrVcdjn6/aOQXzMznuz+YSiQxds7Hi01i6F11WkJqGJmxsYYDfTzB9CkNOUNrVhHbZ5USvuBzzgTSFcgnvHqe86b9HQ+aqIxgMA0Tb0mWOzk7HLqC/v5qpGxlx9nWzOKWSO9LDz7i/i+74g9iRJqZkLWZQMpStobFSQQhBd+0I48EdBI05nTsz3TzpRJLeqRKpXNHpopp5jKVkW6G6DLnpAFZFJ5tYPshprgtxedfZRS6xmPNcjt89QHDgCLrwkvPVkzYbedh+BjuTj9OYO4lob1/z1mohnE5F23ZKZoWCc+2aBRoTS5eJz6Vctp62FRc7KsBRKC4w6fT8eT7hsPPteeGH+Go+uBYGSd3dZ3dYXTgZ3cVnlOgIxOe1mPf1ORkIn8egtdVmJJnD0A0ETheLRM62/tq2IDvjcWN4KsuKRecGN8G6DLaE3JTT9uILFjF8FoWSTkM0SFtDmKkJH1I6+on6emht1fCHr6EUn2DwjKCkZ7ESJcqilTprGhEKodXXUz95hMqPj2BfczXDj5Wwx8fxpseZ8nbRXBpiLLIbISy60w8jNEHBCFHp7SU9WaGsBQllhjHq/RQtLxaCGjNHnSczLzMRP5UETy3BoFOaCoWcb+duwDNv8rZcPoMjJWj5LDKdgcYmRGIM8nkMX4my8FCyDYKVKfxGmmLNVWSymqPzqYk6+86c53t6HLO70dHq+6BUWnqEgRuIWVY1ONuxY/FsJ58PEgMZZLHERMqgcfQhLH8TY8EuGn02kbJOwC/Q21poC0zP37lYpCJ0+qbzTJsaSDB0JzAuly2sZYTpgZoc+ZSfSskgEw8Tqksj5sT2HlMj7F9915Wh2+wc/gUDxWmoa8UnpslRoqh7SYVaSeVr6ZwcxRMJrktrtaY5AY1rElipwMkH49gPHaGjNIS3PqKciNcBlcdSKC4Q2axTi3eDG1cnsFovmrmUy07nixvcNDQ4df/V2MfPmwDNTNt4eISOwEwveLGIZXg5PhqhWHT+vmePoLXJh6YJyjM1KV0T2Jac1U24wY03UFx1cAOQjocdJ+RAhfYWg7qwn0t3NHBpZz2Vgo9CwdEx1Nc738QjESc137/vD6CxgVwiw4jdTEROsy9wHNnahlYTpWZHBHtsnDPfugdr4AxlM0wy3E6NlkKWKwSmR9iRfRLh9wOCIdk2I6Txg26gFfLk4xkCVpIGb4o6X6Z6nLxeZLlMNlVBSic4KBarLdbuIFN3uKa7n9v4M1d47KJZJaRlo0eDaC3NtIaS1JeGMcs5ShUNu64Jf0inaARm9RzuXCR3qKfrz5NOO8Jhy5ofAC/EbXEeGKjenxDO+jTNCYyyQwnqT91HsDBBqm43mYZuNAG1U6fIxbMMx65C1tTMmQDq4HTzJEhG6sgEwoQDJpomsG3pZLfO8qb3R/J4/E6dL5MIY9vV7SN+L8VKhWxhdQaBcjKOjMdpM0ZprQwggCA5ruV+vKIEXi992SamKuF1ba3WdScI3dltY/f2YucKDNRcRZ/dhS30RdolNTjzqaEyOArFU+RsNfR83jmBuCULn8/5pnw+ZXJ3mnNmpvJjmk7W5lzuy50APbfFVJvNJEhSE0XG66/GiISJRJzsEkBNyE806KVYtqhYNkIIhCawCwb5pHNS8IXzmN6lrfcz8dDMeIZqcOMGPLppEa4pgfBRHwnSUBMgnxdMTDgna7d0564lkQBRFyPR5GF8KEJAt9ldWyLt3Ydmm+wMjSJTaUpDo6TsndjlNLqeJm83E/Wm0IJRIrkcO3xjCI+JVSgivV4Klpe8FiIYKOD3VBiwO/DmUuxqzFPvnWOmMuMzgumhUHACA5/PKUF4vdVAMxComgVCNYPjtirbqSwyY2FNVNC9BkLX0EpFhD+Af88O6hIhktkgFc8OCgGBkfKCbsw6JdfVOfqfRMLRewjhnERDIec9MTW1ssOuqwUplZy16brjkXTixEwwJm3MgV5kvoBR14LP8jApr2B38ymSRS/FpIRaD/2yjp2DxxZ181iBIBM9ewj4PUgJHlOnVKpQrqxuBpU3WERokmLWSzYRIlibRddtCqUK5YrNdCZPyH/2CZsyX0AKDVETIZCcpNsokRNhdKCDAbJ4GSpFSAT3kE7E2FO/tuaACzGScXalH6bUWMcZGcOyBWey9XSFJpQT8RqiAhyF4imwXKu1ceA6Kg1t9PdXA5unaum+sLTV2Xl+k41XajEdGveQ83ai7+mhvUObzUQABH0m9ZEgiXQOn8dAAslpjURaYOglzGAG3Vz6G2cmEUJKgdAkoZngJjsVnBEZS4LRHLatUa7YtNWHsSzBwIBz4nWf4549znW57Iiy83nQZZkKBnWRMl2xDGU7R8k2EekU1pPHGJCdoGvYviBpvYa65DB6Po1dW0ej3xH0koXR4G4olZGhGjANfCZoQT+VnIGnmKfBGgUco0LXZ8Tq3IWIhKnMBJuBgCNmjUarmRo3wPF4nAzPrC5n5n1jnajBqngo9h+D4BXOEM5MGun1UJYmwZDAaxkUMMhOV9AaWxEB/+z9uMfHnR3lvi9OnXLW4r7XCoXq2IWFtLc7LdRnzszoQozZtwO+UpJSPE4lXEvIKFGwPFgli6TlJ+bPgBFhNBOi/apG0tOCyMTJeaL40hVXkC8ZhHQNAQS8JpYlqdhOEOyWN1fC4y8hhKSQ8ZGfDlLbUETXQIvHGctOEd7ZSmRH64rCWuH3oXk94K3HzhfQ0ilC/jJS18GyCKQT9IhxhvYcAjSOH1/9CJPzwS0Texs0dmsjFCyTkj3ndKyciNcEFeAoFOfJUkMxKRTJnxpicDCAdq3PadXVnFbt8x3KV6nAyZPVk2Ms5pSkngoLJ0Bb8Wn62IVoiKHv6WHPwdii9QohaGsIky2WKJQsSjk/lYJByGcTrc+RLYLXMClWLEqVaqCTnemKEcIJbgRQSPuxLc3psqp1IgSvaeDz6GiaNptBcH1jururFY2TJ6sBxHAiQK2R5mDgGODF1GwMUcAaGqZQAkvzYOkmhqggsPH4wE4JSCaJhONQKSMtSdY00Dw6IhrFp1cwdJt8tAV/qUggF4dMBun3zfMZmd59HQht1ghwdpAm1VZxV0zsBgyG4UyBL//Ced/ga0JoQfCHEBPj2HY9ui6xJhOU6sv4zDxUypRzFqVIDdGrupmY1iiX55e+5jrn7tjhBDhTU4542LKcwHjukNK5uEFSoVB9Hp2dzmDRXKoCFQuPF8q5AtGpIYLpcXIiyKQmwFekQZsip+9Eu+omamv2o5eqmcxysYLWN4Zl2ZiGTiTgpVCsUKoI9Jm0oW07T0RKicd0xrYXy5YT/IqZrJS/jGlIipkg9pBFbOy3NKSG0S2L/H0+fHu6MQ4uL8ydl7Xs2YU9PIJMpcG2QNPB9GDuu4K9N+0glXbKesPDTpC4c+dTH7+waD1zy8QBPz69jE+fU25TTsRrggpwFIrzYKlW64qt0W+1YteAnExA7wl2XVuL13f+ue7h4ap/hls+WKvUudtimhtMMHBaYng8+BrDdHcv/wDRoI897TF++0iBdLaMLSs0thYI+PykcxohnweJMyMoUygxOepDSHumBdgJbso5H1bZQJtpC5Y4TrSxiB/Llpw4AX6P87zdzjK35OO61E5PO69NqM7DHpFFH59EBmda3nN5ZCrNYPBSSKYJyCzFnOCyzMMk9RhZLUIsNwhWFmybpFmP8PupaQ2gmxq2nUETcEa04Akl6So/jCwK5MjovHb9dLYJpBOA5XJOtsbVsLit4q42pjquwcY+1jv7vtGmTKhoCJ8fMxaFcdAiAWxfmOL0aYL2JJQbqYRjaNfuoKYnRuCIE0Dl885jusGTW2Jygyy/3/k5HreJmWksI7VsN57bIj405OhyXLPC6aSHgKdELpXGjifRywUKvhq8mk3J1olOnWLabEQUctgyRl+qgb17q/cb9JlEAz4S6TyRoIbXY1Ab9lGsWNi2jS2dY+8a9Vm2xLJsvKYT8BbKFqWyha6BJyCJ5YfRToySL5vko80E/GXyhSLBk33I+PLC3HlZy1QabUc7WDbkctjpLFp9HebvPQehOeLtUMgp07lat4Wt3U+VpcrELsqJeO1QAY5CcR4sbLVOlgOM550ZCELAjsYsnvRvMVM7wXfu6ZZs1ikZuKz1B6zLxKRGIleP1uAMU6ypWXl7KWF00EdLnZe6cIUdXRamESXgNTg6EJ85kXnwmD4KyQhhX4WUVcRbMw0SinkPpbyJEBCun6ntSMewTQjIpjwEQ4J43MlU+f1VL59cztEeZTLOydy2IRTW6Lp6F+UfnZgtt9nFEqmyH4SNR7NoTR8l7WvArxUY9sXAtqmtjIPuQ9t3GVPm5WjJFL6QTroIOM0+CGmDbdF0YBfm7zwdiqX5dvlHqxkPy3JeGzeocLU2hZkKgxtPGIUsdjyBVROlXLaoWBayXMJKpRF2APwhyOXR918Hgb1Ewhn0x2qxMxrYWfzZCfz+GFNTjqtzIOC8TsPD1S4zcEZyjIxAon8aOXCGcvoMCTlEyFdZsg05EnHuI5utZoZaWiA5HaZQ24Z88kk0G2QgiBSSMJNMajGmvc002GMkJodI1LRR36AxMuLsC/OzfqlsiYDPwO8zCflNMvkyXkMjFgnMBMaSUsUinS1RG/Kzb2cDfaNJnhyYcKZkS2jqf4JAKc5Q3X4SuobNND7vNHa0Fnt8fEUfm4VZS1kuI0wT49K9i/2l9PkeOgMDzmvtTpp/qign4guDCnAUivNgcau1BAEdgUl8ehlpaYumea8Gy3K+Oc4d8rhwcvVaIKVTgnAfZ+fOpVuJF66tt9f52esVXHKJCVQNXeaeyFx9DQJC9WlAp1I0Keecqceh2rTzrVWC0ARej046JRAVP1bZIBZzTrKdndX1DgxUS1anTjkC22c8A3Rj/omLdIYxvRXh99HBcUROUlOeYCLQBUBd2RkLLgydSrgOfc/l8PBvGRsuIUJFMA0yeQMzO00kUMF8+nXozfMPgmUB0qYUz+ChiG57KZkhvD6NcNgJNsAmP5LETlWcCEfWUi5kkbkC494KWnyc0rQHs1yhUunF8haRPoOKUUQvVygHG9DMHPrQaSpTJqXTxygfHkQL7adcexmpVGR2sCQ4QmM3wNm1C4aOpRjrHWOndRo7EmbMeylho3/ZNuTmZqe9fGSkKuZuatYYaWlBf+xBisKPtB1/npTlpznfy5hvJ4nYleiJSWKeNOm0E+TX1lY1P27Wb2giTTJXwLYlXtNASoFpaHhNHRsnc1MoWoQCHrpba9B1nea6EOPTGXRN4E1OU5eeRjZE2OEdZajYSqIUoc2TRNO0VQlzz9UYLxZzMlknTzoB9tGjq7NiWA2LAq4VDD0V54cKcBSK82BhDT1q5omac1qjz6OGPjrqlF7AObn39Jy/IHklikVmLfrdttWzaQzKZedDHpwPfPcb+lzmlq8KxTJS2sRacmTzBobwMjFlYJo2vkgaoQk0TcwEWAIsg3zGT1ONH7/fWUxPT/W+jx93rgsFZ+3hMFx66ZzSz5wT18hgGeO+XkKTp9ByQH0MmcuTFDVQKVOXG4RIFO2S3fRPRdBNDx0v2Ef/LweoxKfRMhkmZSfeGi+7nx1Fb29e9FzjR8eo3N+HFU9TqRTQdZ1MpA3zinbCO2LET8Sxe3tJx+PYZZucN0cu1EHSEyEiwZtK4p0Yo7MyxYS/k7zpw9Z1ZD5PpZKkMDlFvmyRO/pjzOmdeMx2KuEGSp6sM7F78iRWww7YG1vUgg7gMW3k8DBmKYveXMtUOUQdWacN2V81Kpyb7aipcd6DqZRzfN0xByPBAKWaZoJ2kmy+gt9K0yRHMaN+4rFGbI+P2mQvU6US/lon+Ovvr45KcN8bkYCXbKFMxbIxdA3LshmanAl6ihU0TVAX9tPWECYadP5vgj6TmqDfyQxaFQyrQtnjxRA27d5BUkUTr2lgGjpoqxPmnqsxnmE4z8X9/+zrc16r5sVvi3NGORGvLyrAUSjOg7WsobuDAV06OhzL//Vg7liI+vrqN/6VyOer5m+NjU7mBJZuj4+PVctXnd0Wuhbh5GCKvj4IBQTeSJpSRaAlUmjFAhXTg4zFEIVaWmMBDOFBEzYdgQQM5rH9Pqa0GFJqpNPMtkeHQo5/0FyEpiHrG0gnQL/SS/NvHqEymEHU1ULMj1H2UZsdRNTH0C/dC8Eg9hkbvVxi2teK8fRmzEwKK1/GjAfQI37qL1t8orEGh5i4+wnsXBEzFAJPLVq5TCGexXrwt2DUYz04ib84RSHUAGGTfEWjMDSMT5tE+P34Tw+BbSMijQjNwCpBqVymIqFowdTxfjQNGtLjVGr3IqSkhJe8pwajPogcLWMdO4G8vhahaZimE4S6JTM5GSeSGyEZiJGtSMyZEZcVW8PQ7GWzHQ0NTklmbKx6Au/YIegLBCgGatGloGhZeEIRtKCfnaQ4MWkwIZrZ0SoZmUySnLKI1uqcOhlmV0/19RNCLGrpjgTnBz1Bnznvf2leicvWiOgGlIpYHi/lik3YtIkE/E7b/ToLc5ubnYCvr88JdJJJJ1N2vs0DLsqJeP1QAY5CcR6sRQ3dtp2siKvXWC4zslacOlWdj9TVtXzb8FzmOiW3t1c7bpZqjx+IXom16xL0+hhX7XPKV+Uy2PkohpEmFM1TX5gicOQJzLEx7JKjgRhvOkjw8nqyZQ9mKU7k9GG0+HGKpTIV08dI+ErsXbvx1jjDIWMxuP76pdfb3z+z1n0xjPBzsEZHkbk8QhN0ab1o9SG0tkvQImFGpnygC5pbNcZm5EC2LUjndAwdopHFmS1XXG7lNER9HQJB0TbwejV8dX7k1ATTP/kVZaOdUKOfXEmjmCuCrZP1BvFn05SpYFcqgESWytimjVUpY1gZ7IBGPlpDIDUFUpIJhbEqBUpWgZyhkbO8RMwC/pCfQnyK4kgcX1sDdXVOUJJMOidhmS/QzUkOG02MFQPsDIzTFohjaDNpnmXakGMxJ8CZnq4GOKGOOrT6OuzRCWKtBlOlKAN00S3GQUrqc/1MBroY+eUJtMQ4gZJOQosSbfAwZXVSu3f5N/VSQc9CZktcpkEuVo9veAirsQm/1yQS8OD1GBdMmOv1OoaaAwNOsH3ihBMUxpQWeFOiAhyF4jw5Ww1da23BHp9YMvU8MeEIGGHGSXgNvgkuR6XifBC7j7V79+o6seauca7uYKn2+NNTNRSHE2jTv2XvH14GtGFZTgAX8Hq45oog2f4RPL/+FXo2SyVag9kQIGPV452aJvXAMcJ7knDqBDWlYwj3fuP1yLEJyskCJ2oPEG0KcPnlS79WpZJzEcIRzcorLsd++gGso8ehvh7NY0LAPzN/S5KastGaGjHqa7Efi6OfOEppIslksRlds+nIJrBaLp+nhZCTcazBYUT4ErSZeVO6cIIGU+bJoTE9lmO6USKnJ4hXDISYaYkWUPL68WUSZL1+JAKRTyKtOjylDKlgHYVgEK9RIpiMI6WgoGkIO48uS4hKmVzZSyyYJuCNUMh5SE5W8LU5wfHYmHO8amudEmrEV4ZyBVM3EAKS5SABozoBdLlsR12dk+mbmHBO3kLT2PW7Ozjx/TTxkQJauEjZMChkyphT49TogslshnI6Q2t9hRFZT6hUoTSaYPAnaYJ+G8+Op6YniQZ9RLq9ZG56Fvadd6FlUxjBGEJ3JqlfaGHujh3VzOvEBExOrl9JWXH+qABHoXgKLFdDt4dHKH3vB4sMAMtXPY3Bcuvs/m1tixzu15RUqjoh+lwyRGfOVFude3qqAcVS7fGnsw2UTQOtPsDO5ENU7s9Dcwu9J5wTTVMT1EQ9lH51kpKwsXd3o2kaWTtMpRilZBgEEqNUHhxhV2AIbUcbQgiGcnUIr5eCEWRq0kZngtCuDpqblz6Bubqiri7nWmgaxsGnIeMJx3MmVgu2jSwWSU2UEP4dRK/uZvDxKawHf0sln4dwBN3jR6uUCQ0dpfyjwXliXJkvkC56ocakzpMhbOY4lWmmVLbI54tolTJ56UcKgU6JAClMUSZt12NbEltoBGWFisdLNlIDQMhKkdEbyBgxLMvErthYumOkqJfLCCNPkDiW5aViG/j0Ml4rx5QIka74aaIasLpt6W4J1XssSTFkULR0wAf+s5dQGxudACcer/ot+btb8R70UHryBHXJE0ymowwYEXZ3+SGVoit+mv66/Ywi6AhOMijqKdYGMKbHOP7fg1x+61Of0C2EINzTheW7efZLhT21ccJcd9TKyZPO697b6/x/RaMXbAmKs6ACHIXiKbKwhr5UhsPOlzjRK7HPPIF+nZdoV2zJAZvnw3KjIuYGKeei6+ntdcpmS2V7FrbHp8p+SpYBSHr0k0jToHLiJCcfTKLV1FJX52QU7PE49vAwZmM9wmtStnUm81EsKfDoFfB62DH4S/TLdyOEIFfxkKt4Kds6OjZxo4na/ARP7wkAi/UKmQyzM6Hmdrgsl2Ubb7gGfXcPzZfXcuxf70cWCoj6GMlyAM2GmlAJPbpYjCv8Pqb1GJTKRCNZdCERSFIFkLYkKNNYugdtxp83akwBkLJiWAhMq4I0NTL+GP50kkxtbPZNoDmzETCKRXLRGmwpaJs+gaemhpwdpljRnAncUkI6A+EWymWJdfqMc9yFo1VyxmE4JdSekd/w+LSXcX+Q9tAU5WwBLRE/a7ajpsYpUyUSVc3VrqfV0xutI5m6lBBFinjJ1JYI3vn/MOtriOkZEsUwFhpBswD4SAUaCE3GGTs+TfMldat7A56FzSTMdZsB3C8SIyNORmfXrrU3B1ScOyrAUSjWkKUyHIliiLgVRsYkMp5gx8i9BK+/GSGe+gfyUloY2drG6ZZnIOpi59SNNbd1XNOqoxHmbbOgPT5oFAiXJqifeIJKKg1WhRPsRn/gYcK/cwWNM/MD3P3weiCbpVIS2OUIlieAR0gavQmMSmF2oUXbnHGytXky1U7Ul+Gy0hFEcemeedcAsLt78d8WnhArpg8j6ZwQJ04ksOMJglGDPIKS7bS9dwfHl5wJJOpjlGubkGMTaPU2IKhYFtmKQVCDYCFOPNSEVipj++yZoVNgWRKJjlnMU6gJM7Wri67HHiI0FacQCmMZJp5yHrOUp+TxkWltpmIZlIqPUzs9TDpQS17TsAsF7MFhhNYNxRLlX91LUZxAeEwi9buZ2rGfVCpGNOo87+YXPZ0j309TShWQ5WnGPV7aVpHtaGpyApzx8WqAo+uO71BGRInUO2WZ8bEJOotl9AYvMS2DR6sQ1IuEjCK95RZCvjLlnCAxYVG3CiuC1bLZhLmRiKNP6+11sjnHjq1vs4BidagAR6FYQxZmOHpT1ZpQsz9JuGkaOZJZkyF6S2WKclkY7BWIwd8S/p2r6bp2dY8x1+NmJUOzhe3xWjpJ/emTyJmhUafEZchiBU9yjIYH+rEanPKO8PugUsZ+4klncKNtgVlBDwYJN3gJiwwVw5hVXEfNHIlSkJFsPV69RFikafBll9SMTEw415HI8oHc3BPi6VPAjMfOqQcsqFjkjVpsoMGToiA9RNyW/wViXKFpaHt2QzqNPdhHpbYBadlQruApJhF+QaJ5N4HhIUJTcYrBMBXTRK+U0YslKh4P6fY2KnVB+vddR1PfcUJTcTQrja1DMRAi29CIqDUoF0xOXX6A1tNHMaZLeGQWSythtdTjmSyhFzLISAAZa0UrFggNHWVyXDJhXk30gPNcjY42vFfYaMk8ItJG0e/B87TwWbMdQjil03TaCXRcA8i2NufkPTnpdOBNJD2MaO20F3IQ8BM2q6LlHcEJTiecgyI8Hk6dgr17Vh5MezGjaU7Jyu1UPHPGMX50vZwUFx4V4CgUa8jCDIepVTA0izZ/wvkyv0qvjrM+zhKZorFClJQMIOolDVPHqD2dR+7//bOeQOb64tTVVYc2LsXc9njha8EeGnGCm3CYQXZg5y30cIAdPRr2UGa2vCMLBeR0EjsxhairJW52YNteRCZLfe5R7FAIrbkJO5dHSEne8pIve0kUQ9R601xT/A3azsWaESmrQujV6IukrHaSSQnC48FrWpSKZVJ6LTVGju7A2LwXZ64YN5cDLRaj5nevRD+ZIzGQQ+Sn8VVMinUxhntamCo1k/d7qBmbIJiI48mmMYxGpsNN5Jtroc7EpEKmrp5MrVOqMsol8gSZTrVieC2CulNbTEcaOXlNLcUzOrIE0fYIueknCIlRgnVeisJD1s4RDQjMDh+yr0DhiVPI66qBQ3e3Ru9xP5M5SZOepzIWx2g6e2DR2uoEM6Oj1QBHCCewmZycEXRHwhRqWylNPoSnwzevxdujlQllR8g27KLgCeONxzn5r0dom3580WDarWRqV1fnBNsnTzoWC2tpDqg4NzY0dO7q6kIIMe/yyU9+csV9CoUCb37zm4nFYoRCIV760pcyNja24j4KxYViXoYD6ApN0B5IVOvxa+TVsTBTNFGIkCo5sxy6Q+PUNJizpZWVyGarwU1Ly8rBDVS1HVo4hH2yHzk15XxyVywKeRvN1Nndlp/nLGuPT1B58LeIQAARjZAvCKbtKELT2Gn0Y08nIZfDfMHvoUfCWGeGGZoK0ptuIqpNc0X+fvTI0poR1z+ouXl1mofxMRuZTBIrDzPweBLCIXxNUWQ6g4kz7NDNQrhiXK2tbTawSiSc+6m/tAnPi2+h8MwX4P2dG+DaK5m+5FIykWbKFUmpJsiJ/ddz/MAzOXHdMxjafTmTnTsphCIIbc5kTCHIR2pIxxopRsIIjZnBpE5nlm1pIASlQJByNITPZ5IdTROJeQgYRQqWh7zlmbkrgRYJYcfj2BPV497CENax4+Qe6aXy698w+M2fUfreD7AG54ymX+pYi+p4kHS6ervrnZRKQccOJ6M14NmDPTiMzOWQluV0Ng0O01JTQNvTgzc3ReGBR8kNTZMLNKC1tSDCYay+05R/dOdZ13Kx4ZoDumNG+vqqdguKC8eG5wY/9rGPMTIyMnt561vfuuL273jHO/jBD37At7/9bX7+858zPDzMH/7hH16g1SoUK+NmOOx4whGDzmGpE+b5MqtpmckU+Y0iAbNIT3gEU7Od0kp55VERU1PVeVc7dqy++0Nvb8O8+RBaSxOyWEIWClAs0hpK0dNtoUVm2sJm1mCPjDnBWEc7+p7dDNdcjixX2JF+DFEqIerrETU1GDt2YN58iNP1T+NMKoqvME3USlDfU4e5xBDFcrk6tftsM7RgxqDvx/dT/uWvCdz5/yj+/NdY9z1ApnYH0u/HPz2KKOfnnaAXinFd0bbH4wR7VqgGUd9AbXsjuqE5s5wAYVZmg5dUbSOlQHA2ANNcL5oFCE0iNOloj/Q5AQ5OsOHzaoR1Qa5kYgYMNEDXLOo8mdn7iAacqZuZRGn2OVs/uRM5ncTw6si6WjKe+lUHFu3tzvXQgs1ck8XRERvNNBDd3eRjbdipDPbIKDKdQe/uwrz5ED3X1WIf70XkcxCrY0S2IoXmuCq3t2KnnUyftJd+XS5mmpqcMSjgeBQdO+a8bxUXhg0vUYXDYZpX6XmdTCb50pe+xNe//nWe+9znAvDlL3+ZSy+9lN/85jc8/elPX8+lKhRn5UIN0VuohQkZjrBzlrNkiuaOhVjNHKqF6O1teG4+hIwnwOtFC4dmHGXnpFHcNcwY2gmfF03z0+E3IQ8+uwdhGkivBzk6hswXyNR0ULyyhYxeojZY5OAzSmgNS5dTTp1yrt228JWwBodI/uDn2MkwgRoP0/49iIyX8EQvqVSaTNslRNNDNKeexB5JL9t67A6inPt7uQwBr0kkAtkJE5DopjW7jW27a3cC3nkZnDkITSKERNoCd4tqgCPxmQYFNEzDhIKTUokaeTxa9bFqZZwpvZapYpDInDJmS1OF0bxJ0gpT580iY23YQ0MrDqcER1fi8zkjMrLZqmg2HAaZiJM/1ktb6gnO5BsZMnzsaYyiX7IHvatzVl8jxieoSfYxFWmmjIEHi/FCDc3+6SWF3FsNj8cxBxwcdLr9Tp5cvYu44qmx4RmcT37yk8RiMfbv38+nP/1pKq6RwxI89NBDlMtlbrrpptnbLrnkEnbs2MG999677H7FYpFUKjXvolCsF26GQ+/uRKYXf6NdC73BU8kU9fdXg5vdu8+/s0VrbEDf1T1zhp8f3MxbQ0vzvLJd0CwTjOhoNRFEMOBkcUwT2+NjeBh6T2jUtfh42nOj6E0NS5583anX7gl4JVy90mAqjKivo7U2y1Q5gvB6KTe0I/MF/NNj6AefRu2f/D7el74E7x////C8+PfnHSv3m7dbtnEHblamU+SHx/BkJggYXmfy9ZwgyA1SmLltuVKas59ESgHS2UjO7FsX1TF0nQlLR9TVYccTwOLjriXiaLEYBV9kXhmzO+QosQsVZ2r3ZNILpol18hT2+MSKr19Hh3M9d7q9NThE66M/wh6bYMjcSaTZh/AHGB+1sQ4/iiyVZo+bzBeosyYQXhOPsPDpRWLeOZ/Bq8g2bgXa26uC48lJJ5vjuphL23ZmjJ0+45iDbsFs1kawoRmct73tbVxzzTXU1dVxzz338IEPfICRkRH+5m/+ZsntR0dH8Xg81CzIRzc1NTE6Orrs49x+++3cdttta7l0hWJF1tur43wyRXPbwIVw2sCfilfHategNTasam7XyWSM06eddttYbOWyk3uy3bXr7OuUk3HKg6OI0B6EENgzwYOhWRQsL1ZIw5gex8yn0VbIILj6G7dtOtk7SuU3/VQmsxilDG1WL4XwfjK1TYhYNfyQswHO0pmbeQinRCVnsj5yZq3BoEaloJHO2ZS7utHKR/AlBskHGymVwSxX3Xz1vT2ANk/w7tEsAkYRo5jGHhphKpenruDM7ij96E48z7tx2cBb15mddZXPg8/rBIxmdgqtficgqPFkyFRiJL3d1E3/FrHAO0h4TLr1AfqsLgqWMyxzlnWeIbWZ8PsdbU5fnyPS7u2FRkYJHrlvkSnoVhNfbwRrnsF5//vfv0g4vPBy9OhRAN75znfy7Gc/myuvvJI///M/5zOf+Qx33HEHxWLxLI9ybnzgAx8gmUzOXs7M/SqiUKwTQtOcLEdnB1rj0pmIp8K5ZIpsuxrcmOb8Sc/rvYZ5wuQlhKhaOERy70FyOY1MxomRrr56+cd0u6ZCoVX6++QLDBVi4DFp98edjFcxT21xCFkskNcCYFk0hzMr3o+b9QoGnQxG4r8exB6bwBcwELFa7GAQz1SCxtO9RJNVka9taSAl3nwWf3oaf2oa5PJlKqQ2G9i45Mo5LFsikdi1tYhDh6jprEHm86RGMvNe89qdTtYuL/zzMmf11hj+8dPY6byTMQsEEV4v9sjYWfU4buZhYIAFmSFncut4MUqTfxqB4Ezw0nkCdzfbKBKT1HuTAJzKOn5Ga6lLu1gQwikLt7WBHY8zdOdjHO8VEApvefH1hWbNMzjvete7eM1rXrPiNjtd1dUCDh48SKVSob+/n7179y76e3NzM6VSienp6XlZnLGxsRV1PF6vF6/q0VNsQVaTKZo7iyocXjyF+0KsYaW5XVxzLfF8EydOOJmbpz1t+eBLyqrvzWqfh/D7KGoBRKmMT0xiDY3QmUrTr+3C0kr4PF7weQjUrPwZ4cYkbskrl60O3NQAIxSiaEfxTU8SOHWc9DXXY0nwTqeJDY3hz6YRtkXryT4ytTHGuveQqZsvxBAzWR7bEvMesyxLFAolTM2LrgmKtW3UvrSFiYfSjqdwt5h9zetKTjA2RR2Nc1r6Y/EjDJVr8QQMymjE81Gaaj1ou7qwh0ZW1OMYhlMOtG3IJ4uImcyQodk0+qcJ6kUMzWasUENF91LICTxzvIPcTF9kvJdJ/z4swyCZglBysZB7uxAK2nSP/oregoaIxThp1dNqJwgGNDT/Yhdtxbmz5gFOQ0MDDQ3nJxQ7fPgwmqbNup8u5Nprr8U0Te6++25e+tKXAnDs2DEGBga4frnxwgrFFmclV9dCoTphez2Fjatxll0uEDreq3HqlOMd0tDgXC+H61jc2Lj6DFRCxNBidUQGH8fK9Dv6EJ8PS/gpV8CYSmBGvE432DK4Wgmvt5rBEKFLEDiCYIMCmuFF0wRaQBBMxJ1MTalM4NgoWsmi7PdR9IYo4ScyMYY/k6J/33Xzghw3wJG25vgmzQQ4ti0pFCuEIj58HoNsFiIRDRGNUhagzfnIdDVVufycEuLJPnyJCQg2UraBYp60t4HWtjJC05CrEPp2dzsC2TNTYTrnCNyjriki0Bkcpz8eZZAdXD6n5DQ3wO0ePMbJdBujup/d3d2YB6/ZlqUYORmH4WH2NIVJGiYThQjDuTp2R0a2hfj6QrBhGpx7772X++67j+c85zmEw2Huvfde3vGOd/DKV76S2hnzgKGhIW688Ua++tWvcuDAAaLRKK9//et55zvfSV1dHZFIhLe+9a1cf/31qoNKoVhAOl1t713voZ6rZWEgNDrqrLNUcgKbffuW37dSqbZpuzqY1TAZ19B276Lu2I8cz51gkFRKQ8okFenDwKIp00v5v020W1+x5LflZLL6uHK6gF0sQ9TEtssE7RGKZYtx24OUEp/XxshahCfHaX/st0wWW7F0A7NYwOPxoIUsMrUxQlNxGvt6582jcoXI0p4fvUnAsiUBnwGI2ddhbhA0F/d2N7Ao33k31uAQwlNCajo1oTKhZj8i4nd28J7dgNI0Z+43HKbS0oF++tQiTZUpKpiZKSqNbSSNGLVz9ncDXHMyTsdkmdF0kNPRMJe0b8/sxFyNVI2WJWzmyFXmZBFXcUwUK7Nh7yyv18s3v/lNfvd3f5fLL7+cj3/847zjHe/gi1/84uw25XKZY8eOkcvlZm/727/9W37/93+fl770pTzrWc+iubmZ73znOxvxFBSKTUs8Xg1uOjs3R3CzkGLR8eJxszcHD66clTl50rlebozEUrg+Ob6giVZbg4hEIDHFdDkIArxeGxEJY9olKg8dpvL4E0vejyswjkScklfBDFPJF7AKU9jlHAGjjKUFMHSNsJ3Gi82uU08SSSawPF4qHi9S1zGLBWrGh/EU8hRCYcJTk/jTydnHcZ+/lGI2m+Oi6ZDKFckVS7MZJTcWWxjkBIOOoWGudxDh8WC84PfQd3bR1GqgdbRhNDcQ1HLIbHZGk7Q6oW93NyA0BluvX1ZT1Vkzjbanh7FxbdG6XF1a7WWtGHVRQGNqasWH3LIsNAXVhZw36mI7ia/Xiw3L4FxzzTX85je/WXGbrq6uRS2wPp+Pz3/+83z+859fz+UpFBctw8OOyyw4XUamubHrWY6+PkcbVFPjOBGHQstvm89Xu7/cNu3V4Dodt0XSWIbutLGEQrQGsyT0EBkjhF8UEGYEOTFJ5cGHMa64fFEWx3WvEAKoj5GJdVM8GSdSk8Xr1REIirYfTUjCuWkq5QpIifR4wONBBxqsftBstEKZ4HSC6cYWdKuCUS7Neyy3i0o3Kti2MTN0FAIeE9uWxJN5Al4TEITDjt6mUHCeGjgC6PC9h0mcFozaSVr802itLWj19YQGhxmzvMTzeWpKR0HTEeEQGAbGZZeeVejr8cy8BnUxeN4h9N8u1lQZB66l0RdjctIJsl2zwIX09Dit0mNjK88R26rMHXuyUnfhdhFfrwcbbvSnUCjWjpMnq34te/ZUv+FvNgYGnCDMtp0T2+WXr7z96dPO9Wrawl1suxoUmWGf04WUTKJFQnhMgxJhQNDIKNg2wu9DxuOLNA/udyz3tRSaxlT7pZRO30/NVC92bR3S40GUS2jZLJZuYOk6Ra8fbyGHAUhNENLLSCkRfud2Xz6LNE3sOUZEczNYQreh7LSZe/wQiBhYFZ1soUShVAEcc8Hpaee19PurA1jNdAbh30fOG0EYFlb/gBMFjY0itRonSgwGoVjCHhlFC/jROttXJWbt7HR0XWfKbfS8eGlxeT2O10sm4wSHxhJnGiGc4Gdw0Al0l+gr2dJcKFPQ7Yx65RSKLYCUVRt4IZyTxWb9XMzlnBNff7+TtTmbfM4tYfj9S58ol2N42Llua5v5tlxXB4UCciZVECaJho0hHYMXEY0iNW2R5iEz0z0+V/dj1dSS2nMJ5fZ29FwWc3ICUShi1cUY23Mptq5TDoWwgkFq84PUy3EMXUMTAt1jYiAJ5jIUYg3ojfWY5uKDpek2QoAhTJpqQoTDGpqmgZBYtsS2q1mbdHqJAaw+L+CMRBBtLTPmgBrRWglSks7pTmDX0uyYB54eXJXBnM/nvMcsC2y5vBWCW0p0S4tLEQrNCLfndMdtJy6EKeh2RmVwFIqLHMtyDMPAOfmsZnTBRiGlk73p7XUGEba3V+3/l9venaV7LtobqAYmTulLw3jaNVQeeQSmk8hwiBo9TsSahHwW4fGizZQCFmoeXN+d2jmKWV0TWLU1xK98Nv5MGlEskp2KUfL5Kdl91BgmhmVRbmikdvAMIl3C9vnQNA1RKGJUKuTCYTKXXYHQdHyGwBQahZn4QAOCAQ2P9NAQ9RIOCgp5G9sWaJrz+LlctaxnWYsHsIaNPOmyH1uCyBeQloUwdBp3BMgUOsjJOmqDUxDwQz5/Th07HR3OcTx9ujpraSGBQFXsPHfMw0K6upzgPB53ypWbtaS6Xqy3Keh2Rr2CCsVFjOuGCs7JYTMHN+AIipNJJ7ukaUuXJeba1g89MQXSpr5+dW3h7r7xx4eRySTRSDUjoV9xGcb+q50zaLEEmQxasYCoqUHr2YkslZc0nHN9R12NSKkEPo9BrMYgV7Ip19ZSaGzBCoUwfDb5UIRMbQx/JgWhEMUdO7AiEbRyGSOXRc/nkK3NpJ57I7KlmdqQj4aaII21QSIBL16PTsDvob0xQE3Ihy6cM77psSmWbII+z2yruIONPZ3EOtWPTGeQXqfkFfOm6AhOogmQ5eoIHM2u0BAt0tJQckZlCHHO4xJcHVSpVG2hX4qeHud6JW9VIapGgitle7Yy620Kul1RGRyF4iIll6uKaJua5mcYNhPStpGTcZKTZQrJIKfPhInFNG64YfG21uDQrBlgpWiRYCdarI7a3+sAVk7Xz913KNsOhk5tdxnr4HWzjsrm7z0XWSphTybQwkHnTK1ryMT0spqHhQM2HQG3YOcOP0NTOVLZEsL2IAGPp0K+JJnacym1pRzG2BhWTQ3FHZ3o6TTa1BSlQJDaP/lDulpbGZpIk8wVKFdsNE1QE/ahV/xovgylShbbtimXoVyxyBUqGEaQ2lAAt1XcGhxCu7+fwniSQuEIjAwjcjno6sSMhDFxAjxhVj/qhWFQ68kyj/Po2HH1M2fOLB9Y67rTwZdOOyWo5SzS/H4nw5PNOtYBq5y/rFCsiApwFIqLkGQSRkacn9vbV+5A2kjcoKMyOMKZXBvH7V1EojrtHTH8/vpF25Z/dKejIYnVMeDvRBQrNE0+RvlHR2AFTcLcfcu1jQhfLXqliN1/gnI8Pruv3t6G54XPrzoqp1LLTg6Hapv5XPNB1xOnpcFHMBhjaCLNmSGLfLFCIGoRMbzIYAvZ2mcRfOJxzIlxRKWCbRikOrvhmmsI9XQjhCAS8JItlKlYNoauMe4z6SsIausFvohNYsymnJEEohZ1YT8BO0zQ50FKKI7GKT9+J4GkQTHQQaFpD4FCFntiEqtUgj270SKOP4D0+xAzKSjp9zM3GXa+HTvue65QcATdyyUdWlurJahYbPntOjrg6FFHNF07o7dVKJ4KKsBRKC4yJiaqupDu7s17IpgbdPT59zEdjuLJC/TkFJ2P/RqroxqwLBTIlmwTWdERPp1wfe2KtvUL9x3KNiOERkdtGq1useX9uWgeFg7YhPkt49Ggj0jAi52rUAxK9uyJYts2x4fixPVa8s/8XbzpFDKfJ4uBVl/Hnh0Nsy3BQghC/moXVWLmEzng9bC3s55SqoItJXu6IegzOX5cOF1d0sY61oudzhBp72A66yVrS8JdO6BcdkpWfafh8ksQpRIyPoXe0QYS5NAIco06dlpbHTH30FB16vhChICWFicg7+9fXrMDzvu5r8+5XHLJOS1FoViECnAUiouIM2eqbr49PefWVXQhmRt0JBt3Yxd8DKfqqfNmOdA6gj2SmRd0LBTInsk52Z3O4PhZbesX7mvPTOL2aBaw9L6rGS0BVaHy3CByYclKCIGpmxg+CM9oU/a0x2bLT3lfCC0QJhrw0dYQJhpcvgxkGM5927Zzvz6PiRAQ8i/YMJ3GjifQ6uvw6jNlKCQiEkHfvQv6B7CnppH9AxAOzWaogCXngS2VvVoNkYgT4GSzi1+XuUSjTumpVHI0TcsF5V6vs20y6ZS/lvPQUShWwyb9eFQoFAs5ftw98TkeN2sxDXy9cIMOu66eeDHKiUwztZ4sXaFxfIa9aPbRXNt6gCbfNEXbnAlSWNG2fuG+naFx7LnTuJ+C5f3Ck7bbRb3wBL3QsdfN7MwtPwV95jwzt6XQdefiCncXjmLweJwAwbSLlCoW0utFAO3BSXyaY4AkIhG0yy6B/gHM5z4bfWfXvAzVWnfsNDc7wcvIiJPRWY7ubkdk3t+/sudNS4sT4GQyTonQvzC4UyhWiZJqKxSbHNfjxtU57N27uYMbqAYd/dYO0mUfUTOLqdl0BiedDRZ07Sy0rQ+bBeq96eodriCCXbivR7Pw6ZVV7bsSrmHi3BNsemZJ0eji7RceE7f8VBPyEfJ7zhrcQDXAcU0KF+KuxRM0wdDJz0yx8evl+VmlUgktHELf2bWoK2etO3ZqapzrVGrpNbvMmDojZVXHtByuoePp0yvfp0KxEirAUSg2MZWKE9xI6XSZ7Nmz0StaHcLvY1JrolKwKNgeGnwpnh47Wt1gQdDh2tbb8cSi8SyuCHapFu6nuu9KuAaDsTm7uSfmubO93IdcCy2UYThBrG1XtT5zcQMcrTaKFqsjkyiu6XM+X9zuKNezaDncbquRkZUDF9Os6p5cF2uF4lxRAY5CsUkpFh0Le3A+7JcTcW5GypEYqZpOxpJ+Yp4kHYE4+uxgyMUnYNe2frkBjiuJYJ/KvivhBjhzDercrqq52id3FvBKhoWrZW6JqlJZbHo3m00SGtqe3RR8dWv6nM8XNwicnl55O02rbus6TS9HY6OTFSsUmOP5o1CsHhXgKBSbkGzW6SQBR5PQ2Lix6zlX+vo10q2X0BDM4p0awV+aPusJ+KnY1q+H5f1SGYalbnMDnHMZArocc0tUlYpT0oGqJsf9vVAALRbDuvbgprH5dzMuZxu54GZ70umVTQKhWqo6c0aVqhTnjhIZKxSbjKmp+eMJ1uLEeSEZHHROzjJcg+9pV9I5+ivk8MSqunaeim39Wlreu2LiOXMwZ0+wC6deu9mFtRDDuiWqSmV+gFMqzb//SgWQNugG+jVXo196Cfh8aMHAhtn8NzY6bfXx+PKGfi47djgmlWcbsmkYzv2Ojzsux64zskKxGlSAo1BsIkZHq2n+nTvnn2AvBvJ5p/tlbMwZctnZGcPrveWcgo7VtnCv9b5zcbU2c/1v3PLUQoGxO8phLWKKhSUqj2kjk2nyp7J4G8yZkp6GNRnH7u3FnkxQEicQHhOtrRVx4DpnIOcGUVPjvH8Tifmv3ULmzqnK5VYO4uvqnKxQpeIImeeaLioUK6ECHIVik9Df75QeAHbvXpwp2OxI6QhCp6ac1uFg0M06rE3QcSFxDf6WcjBeeIJdy9KJplVLVMWhcczjD1IegIxM4PelHTG17wqs3lPohQwyVIOoa0EUi1h9p5GT8RUdn9ebpiYnwBkfXznAAaf8dOKEk8k5m6lfT48zc2142HFQVqOaFKtBvU0Uig3GbQMvFJxvtXv3XnzBDTgBWrlczUJcTKLohbgt4nNPpM4MKmdi+0LWqm1fiJkS1VSK4q/uQx84hfD7qdS3IMJhKqf6sX51D3I6Sag5hPB6KeFFBPxo7a3YacdAUdr22R9sHRCi2mF2NsGxYVTHPUxOrrytrjtaNKgOl1UozoYKcBSKDcS2q23gpnlxeNwsRSbjlGrGx50MR3f3Rq/o/HEzMguPw0qZmrV0lNaETfnMMKVsGbOjGeH1UsZ0gpi6GryZCexKGc+MsV/B8sysd75r80bhmv2Njp5927aZRNPk5NkzYdGoE/xJWe1wUyhWQgU4CsUGUS477sTgfOt1O0YuNmzbERbH48637Ehk887HWg2uaNgtsUjbxh6fwJ6YgFRyXnbE1d+spRBcptJOV1SkZrE5oGXhFwVksYReyBHzpgka+erfFxgobgRCVF+PdPrs27qTw/v7z37fu3c712NjZ+/AUiiUBkeh2AAKheoHekPDfDO5i43eXqfLx+dzvmGvZNd/MeAOMq2trU5DLwyOU8k1E/KWKA0aGAeuQ29vW1MPHBdZKiEtG2k6mZnWQAJDOGdzYRj4jDJYFvmSTlM0M3/n83RtXmva253gfWjo7PqamhonYCkWnffRSsJ6IZz7Hhw8eweWQqEyOArFBSadrgY3bW0Xd3AzPe2UDCYmnJP8SpOiLxZcobcYdaahW32nyfibELFaImGw+k47tw8OzWZ71jKDIzwehK5BySlBBY0iXnf0RCCA3w/CqlAQ8/vSN8LBeDk0rapVWo1Jn+tw7Ho/rUQoVB35cDbtjmJ7owIcheICEo8732rB+VCfa/l/sWFZjs5ictLJ2tTUXHxt7UvhaEGq09C19lYyRi1CaITCzBPzZjJOuWotNTgiEkaEQ9jpzKIxDADC50MEgxRSpQ13MF4JV2R+5szZt/V6HQ2alFUx90q4Gq/JyaVHWigUoAIcheKCMTRUdXndtWvpbpyLiRMnnLKC27braikuZtzsTdhKYg8No8XqEEJQtp22NiHmi3lJnUVkcj4IDdHaiub3LTmGQW9swLjh6WjNjZvCwXg5dL06aiKfX3lbqAYtZxvhAM5x2LHD+fnkyfNbn2LrozQ4CsUF4OTJauvxnj0Xv4/HxIQjLp6edrxPtkJpCqr+NzWeLLJURvgctXTMk6Jgz0lPeb3IxBR2sYS+xl1vQoAWiaBdew36WA57aHiRC7SRbUPu7MYb2/WUXZvXk87OqtfN2fQymuYIuxMJGBqyaTFXNocMBJxLLudkErdCgK1YW1SAo1CsI1I6YkspnRPXnj0XZxv4XMplp9Q2MeEEN7HY4qGQFytu148/6qXoMaFQhICfOm8WmCMmccW861iT02IxPNcv4wJ9DGfg5iY3UJw7Hb1QOHvWsrERJnvjTN3bSzT9CFq5OOvS7Aq757JjBxw96gTadXVbo0SqWDs2V7ivUGwhLKvqcePzXbweNws5dco5WdXUOM/nbHOHLibcQFTUxxzX4HhikQ7GFfPaLR2ISHhNO6gWIjQniNE7O9AaG2azGG6gcDEMoHRLT/39Tru9dfoM9vjEkmaE1uAQTY/eiT02Qb+xB63NMTicK+xeiCtQPnVqHZ+E4qJEZXAUinWgVKp+4NbUbJ30+fCw8208m3UyNxerd89SuGJVv98JLIwD1yEn4454N1brKGGLRadTKRwiv+9akNqGDEP1+x1dS6GwNkM+1xPTBJmIYx3rJZ1+DLOcXzIrI21H2O3PTaLVdwKCgvTiD2ho/lbswWEq9z+E1toyr1zl8zneS6mUo3Nr2xwSJMUmQGVwFIo1JperBjdNTVsnuCkUnJPI2JgT3DQ0rG330EbjuuO6Bn96e5sj2u3uXFLMW4w6swPWOoPjzqNaCTeocX14NjPW4BBtj/4H9tgEZ4yeZbMycjI+K+zuDo0DMJitB87u0ux6L6XTVaG4QrGFPp4UiguLtO1F+ohUWmNkxPl7R8fan/w2Cikd7558HurrnTLOxezfsxSuwHjuMdPb29BaW5bUwWRnunfWWveh61URulsyW4gb4KymO2kjcbMyRnYarX4XIKhgYC6RlZH5wqyw29BsYt40mphTxpoRdi/n0rxzp/PFor9/65SDFU8NFeAoFOeB63BrDw07H8oek3hsL6nOq9BiMbq7L+5xBQsZGHBOtsWic3Lt6dnoFa09y82gEtrS09DdktZan0gNo5rBse2lszlu5mzTBzhzsjId3knOZBvIWT6iWm5RVkb4fYh5wu5zc2n2eKpdWAMDTgeXYnujSlQKxTliDVYdbkU4jNbWwpDZTWIgi/Xgb+n2D22p4CabdU6ko6OOnqip6eKcdr4Srt71XLrB1kvgqwkbLZemMjZBaWRyxcngGzQ0fNW4WRl8Xnx6hbZAnIgxp642Z3bWaoTdZ3Npbmx0As58/uIo3ynWF5XBUSjOATfl7jrcCiE4kW5GGgKtXtI9/RA8lEd2tGw6T5LzQUrHiTaXcwIbIZwZTVsN1z3X1d+slrXO3liDQ9i/OI49pFG0smQPj6N11C7ZIg2bv4tqYVYmYJTmbzAnK7MaYfdqXJp37ZrvvaNKVduXDfsE/tnPfoYQYsnLAw88sOx+z372sxdt/+d//ucXcOWK7czclDsIetMtSCkQQrI7Mopev7wQ8mLkxAknS2Dbji7Enea81XAHbEajq9veDSzWMlPnZgbF8CCGz0TW1mEHI8u2SF8MJ+5zzcqcTdi9Gpdmw6haF6jW8e3NhmVwbrjhBkZcNeYMH/rQh7j77ru57rrrVtz3jW98Ix/72Mdmfw9sRJ+mYlsyVwjp3AABo0hbYEahehYh5MVEMlmdN9XaCi0tF78D83K4LtOrfX5rPUV8bmbQbN6NnjORgOUNOrOvlmiRNozqujcr55OVWUnYvVpiMWdOVbnsdFZdzDPfFOfPhgU4Ho+H5jn9s+VymX//93/nrW99K+IsX00CgcC8fRWKC8XclLsI+NkVHkUTc76ZnkUIuZmZ2xVme3yMTMfI5rTZwGa12Y2LjeXExSvhBjhr9d1qbmbQ0Gx0YVO0DSpSWyzGnRE8+/3OCbxU2twOvnp7G9x8qCrKXzB2YqmszHLC7nOhpwd6ex1vnK0wHkVx7mwaDc73v/994vE4r33ta8+67b/+67/yta99jebmZm655RY+9KEPrZjFKRaLFIvF2d9TqxlXq1AsgZtyt/pOo/lb0eacFN2Uu97dtaIQcjOysCvsJD3I2hh07kYE67Zk15RLdmYCw7loi9x91spkb25m0E+JgFEAy0tFzqi5l8gMBgKOdiif39wBDqxNVuacH1N3PKhGR51A52yzsBRbj00T4HzpS1/i0KFDtLe3r7jdn/7pn9LZ2UlrayuPPvoo73vf+zh27Bjf+c53lt3n9ttv57bbblvrJSu2IWslhNxMuNoPO51Bi9UxpcUg7WNsVNKcfojm9ivQtJaNXua64frfnEuA435fWqvDPDczaAQ0OoOTnEi3VAOcJTKDc71wLobs2lpkZc6VmhrHmFJKZ15VTc0FfXjFBrPmn8Lvf//7lxUPu5ejR4/O22dwcJA777yT17/+9We9/z/7sz/j0KFD7Nu3j1e84hV89atf5bvf/S4nT55cdp8PfOADJJPJ2cuZM2ee8vNUbF/WQgi5WVjYFWb5giRKUbJ6lKYGC62Qxf/EAyu2Kl/suF4yG9kivlCM65bLLFtbtkXazdpsdi+cjcYVxo+OOpoyxfZhzTM473rXu3jNa16z4jY7d+6c9/uXv/xlYrEYL3rRi8758Q4ePAjAiRMn2LXMYByv14t3KxmTKDacjUi5rwdztR9CCAazMSwpMLUKmhDsbExjD2XmaT+2Gsu5BZ+NtexiWiozKG2LSqmCPTW8ZGbQffxSaZk7VQBOlq2tzdHinDihSlXbiTUPcBoaGmg4h/HCUkq+/OUvc+utt2Key1eoGQ4fPgxAS8vWTaErNicbkXJfaxZ2hTX7p3ky1UZIL9IaSCCEF3tqa3SFLYVbajqfLpu1nsO1SIybmwZDX1GMC5vfC2czEA47Gbpy2bEE2GpjRhRLs+EanP/+7/+mr6+PN7zhDYv+NjQ0xI033shXv/pVDhw4wMmTJ/n617/OzTffTCwW49FHH+Ud73gHz3rWs7jyyis3YPUKxcXNQiO2bMVHSC9i6hWCRhGZu3i7wlaDq785F4M/NyhaD3eKuZlB4xhoXg+e68LLZgaFUAHOatm5E44dg4kJR7O0lQbFKpZmw/PpX/rSl7jhhhu45JJLFv2tXC5z7NgxcjM9mR6Ph//6r//iec97Hpdccgnvete7eOlLX8oPfvCDC71shWJLMFf7UbYEiWIIgM7AxKrt8S9m3IZK3znEb2vtgbMQoWlojQ1oDQ2IaHTFsufFYPa3WRDCGYALsIJkU7GF2PAY9utf//qyf+vq6prnftnR0cHPf/7zC7EshWJbMFf7cWLARISKtEeTkM9dtF1h58L56G/cFvEL4S96tuyM3++sx7K23nyw9SAYdF6zfB7Gx53ZVYqty9b81FIoFKtGb29j6ukvRGtswFtM4h0fuGi7ws4Fdxr4uWRvoBrgrHeJYzWBl9sqXtiaEql1YccO5zqRUALtrc6GZ3AUCsXGUipB0tOE8fQGdtXFoXDtRdsVdi5MTzvX5zpgczMx1wtnvUpmWw0hoKsL+vudWVVLqCMUWwQV4CgU2xx3IGFXt4buu7i7ws4FV2AcCp3bfufbVn5Oj2HbyGQau1jCrmPZYNMNcFxdkGJ1+HxOZ1U6DcPDzqw1xdZDBTgKxTbGnXcbCp17qeZix/Uu3GxCXXdsRqXPwC7bFA8PorW1Yhy4blG50I15lNnfudPWBkePOkLzurrt9/7fDmzd/LNCoViRYtGZGA5wlgkpWw5XvHsuOhpp25RGJrAnJvAXp9bF3dkdm2H1ncYMeBCxWkQ4jNV32rl9cGjNH3M743rO9vdv6DIU64TK4CgU25S+Pue6u3tj17ERuIHdavU3blYleSZJJV+L4UtROhFaMqtyviwcm2HkDcoVDen3o7W3Yg8OU7n/IbTWlkWOxsoL5/zweJwZZFNTMDBQFSArtgYqg6NQbEOGZhIBkYgzK3S74epvVjN8cW5WJe+PIWK1BCPGmmdVFo7N8OtlAGypIYRAi9ViDw0hJ+Nr8ngKh6YmJ0jM5ZSWaauhAhyFYptRKDjiSti+4kq3PfhsTWILsyp5I4IQGt6g4WRV0hkq9z+0JuUqd2wGM2Mz6jxpdoZHMbSZ+/Z6keXyorEZ7tBNlcU5f9wxhgMD6nXcSqgAR6HYZrh6gwUzb7cdqxEXL8yqVKQ+u+9aZ1Xmjc2YeQxdzDnbFpcem+F2UrkjJBTnjmFAfb3zs1u6VVz8qABHodhGDAw417W11W/+2w3XqG815amFWZUaM0vAmJNBWSarcj7MHZshF6QRVhqb4Toqq06qp0Z9vRNUlkqQyWz0ahRrgQpwFIptQj7vaAyEcHQH25VzGbC5MKvS4EvRFpiqbrBMVuV8cMdmaOEQ9uAwMpdDWhYyl8MeHF52bMZcsz/FU6Onx7keHATbsrHHJ7BOn8Een1iXrjnF+qK6qBSKbYCUcPq08/N2L025QlLTPPu2blbF6juN5m9FzKlruVkVvbtrzYaR6u1tcPMhR/czNIxMTCFME727C+PAtUt2bLnPQwU4Tx1dd4L/kSNxnvxNLzvTDyNLZYTHXNaLSLF5UQGOQrENcIObWGx1J/atzLk4Ec8dRmoPDqPFap22s2Jx3YaR6u1taK0tyMk4Ml9Y9dgMd7aW4qkRyQ4x9NAR7FyBdH0TkQYLCkWsvtOO1moLz2fbaqgSlUKxxclmnc4pIaBh+0xiWBK3e+pcxjPo7W3O0NHuTmQ6gz0yuu7DSIWmoTU2oHd2oDU2rCqAUt0/Tx23a66rdBRRX8c4zdhCRwT8a941p1h/VAZHodjCSAlnzjg/u62w25lz0d/M5XyzKhcKZfa3Nrhdc0Z9LS3mNKP5WkbytbQHEou65kTjNv+2cBGgAhyFYgvjDtJsaDi3sQRbFdfB2BXmngtC0zbtSU3XVYlqLXC75oTPS1grULQzhI054iavF5mYWpOuOcX6szm+figUijUnnYZy2fl2H1sbDexFz1bNcrgBmwpynhoLu+bqvWm8+pwXdQ275hTrjwpwFIotiJTVcQxu6+t2x7Kc6604NVq1iq8N5+tFpNicqABHodiC9PY6101NTvlC4QxUhHPX31wMqABnbThfLyLF5kRV5RWKLUYyCbbtlKZqazd6NZsHV2AcDm/sOtYDNyulhkU+dc7Hi0ixOVEBjkJxkSNte7a7R3p9jEzFQGjs3r3RK9tcuJ29q/XAuZhwn5OaR7U2bPauOcXqUAGOQnERYw0OVb9plsqcpAdRV0fr9V1oWvNGL2/T4MopVLlOsVo2c9ecYnWoAEehuEixBoco/+hO7HQGLVZHUquFtB85Pk7gF0ewQspx1SWVcq63ov7GRXnhKBTzUfk2heIixHVctdMZx2HVH2CyVIvweunpLCnH1QW4+pvVTBBXKBRbAxXgKBQXIa7jqharQwjBRCEKQIt/Ck2b77iqqGpTtnKJyhUaq5hWoXBQAY5CcRHiOq7i8wJQ701R70sRMmccVr1eZLmsHFfnsBXFxXNxW8UL6pArFIAKcBSKi5KFjquGZlPryVY3UI6rs7it01u9POX32chkkuzJEezxCVWeVGx7lMhYobgIcR1Xrb7TaP5WxJz0hOu4qnd3KcdVqvqbrewJZA0Oof3mt5T7dZJ2hpA/jtbWinHgOiU0V2xbVAZHobgIUY6rqyc7k9jyeDZ2HeuF200nTvch/H6KdS2IcBir7zTlH92JNTi00UtUKDYE9emnUFyk6O1tmDcfQu/uRKYz2COjyHQGvbvLuV19cwec1umtqr9Z2E0nvF6k0BEBv9Ndp7rpFNsYVaJSKC5ilOPqypRKznUwuLHrWC8WdtM5NzpXQszvplOmdYrthgpwFIqLHOW4ujxbecAmVLvpxEw3XdSTpWLP6YX3epGJKdVNp9iWrNvXvI9//OPccMMNBAIBapZpXxgYGOCFL3whgUCAxsZG3vOe91CpVFa830QiwSte8QoikQg1NTW8/vWvJ5PJrMMzUCgUFzvT0851ILChy1g3FnbTNfpStAamqhuobjrFNmbdApxSqcTLXvYy3vSmNy35d8uyeOELX0ipVOKee+7hX/7lX/jKV77Chz/84RXv9xWveAVPPPEEd911Fz/84Q/5xS9+wZ/92Z+tx1NQKBQXOVt9dIHbTWfHE8gFT9btptPa2lQ3nWJbIuTC/4o15itf+Qpvf/vbmXa/Ss3w4x//mN///d9neHiYpqYmAL7whS/wvve9j4mJCTxLtDw8+eSTXHbZZTzwwANcd911APzkJz/h5ptvZnBwkNbW1lWtKZVKEY1GSSaTRCKRp/YEFQrFpkPaNpWxOL0nwBcy6b4qsmV1SfNnktWC1wvFohPchENKcK7YUpzL+XvD/uPvvfde9u3bNxvcABw6dIhUKsUTTzyx7D41NTWzwQ3ATTfdhKZp3Hfffcs+VrFYJJVKzbsoFIqtiTU4ROl7P2D83+6i8uvfEPz59yl97wdbtl1addMpFEuzYSLj0dHRecENMPv76Ojosvs0NjbOu80wDOrq6pbdB+D222/ntttue4orVigUm5252YzpwBWIoIeIkcTqO+3M5dqiJ3zVTadQLOac3v3vf//7EUKseDl69Oh6rfW8+cAHPkAymZy9nDlzZqOXpFAo1piFnjDS9CGEhhbcHp4wQtPQGhvQOzvQGhtUcKPY9pxTBudd73oXr3nNa1bcZufOnau6r+bmZu6///55t42Njc3+bbl9xsfH591WqVRIJBLL7gPg9Xrxer2rWpdCobg4WcoTxquXAeUJo1BsR84pwGloaKChYW0+GK6//no+/vGPMz4+Plt2uuuuu4hEIlx22WXL7jM9Pc1DDz3EtddeC8B///d/Y9s2Bw8eXJN1KRSKi5OFnjDdoTHmdVAoTxiFYluxbjnMgYEBDh8+zMDAAJZlcfjwYQ4fPjzrWfO85z2Pyy67jFe96lU88sgj3HnnnXzwgx/kzW9+82y25f777+eSSy5haMgRB1566aU8//nP541vfCP3338/v/71r3nLW97Cy1/+8lV3UCkUiq3JUhPWTW1OOUp5wigU24p1C3A+/OEPs3//fj7ykY+QyWTYv38/+/fv58EHHwRA13V++MMfous6119/Pa985Su59dZb+djHPjZ7H7lcjmPHjlEul2dv+9d//VcuueQSbrzxRm6++WZ+53d+hy9+8Yvr9TQUCsVFgvKEUSgUc1l3H5zNiPLBUSi2JsoTRqHY2pzL+VvNolIoFFsGvb0Nbj7kdFMNDSMTUwjTRO/uwjhwrQpuFIpthApwFArFlkJ5wigUClABjkKh2IKoCesKhUJ9pVEoFAqFQrHlUAGOQqFQKBSKLYcKcBQKhUKhUGw5VICjUCgUCoViy6ECHIVCoVAoFFsOFeAoFAqFQqHYcqgAR6FQKBQKxZZDBTgKhUKhUCi2HNvS6M8dv5VKpTZ4JQqFQqFQKFaLe95ezRjNbRngpNNpADo6OjZ4JQqFQqFQKM6VdDpNNBpdcZttOU3ctm2Gh4cJh8MIITZ6OduGVCpFR0cHZ86cUVPcNxB1HDYP6lhsDtRx2Dyc7VhIKUmn07S2tqKdZb7ctszgaJpGe3v7Ri9j2xKJRNSHyCZAHYfNgzoWmwN1HDYPKx2Ls2VuXJTIWKFQKBQKxZZDBTgKhUKhUCi2HCrAUVwwvF4vH/nIR/B6vRu9lG2NOg6bB3UsNgfqOGwe1vJYbEuRsUKhUCgUiq2NyuAoFAqFQqHYcqgAR6FQKBQKxZZDBTgKhUKhUCi2HCrAUSgUCoVCseVQAY5CoVAoFIothwpwFOvOxz/+cW644QYCgQA1NTVLbjMwMMALX/hCAoEAjY2NvOc976FSqVzYhW5Durq6EELMu3zyk5/c6GVtCz7/+c/T1dWFz+fj4MGD3H///Ru9pG3HRz/60UXv/0suuWSjl7Ut+MUvfsEtt9xCa2srQgi+973vzfu7lJIPf/jDtLS04Pf7uemmm+jt7T2nx1ABjmLdKZVKvOxlL+NNb3rTkn+3LIsXvvCFlEol7rnnHv7lX/6Fr3zlK3z4wx++wCvdnnzsYx9jZGRk9vLWt751o5e05fm3f/s33vnOd/KRj3yE3/72t1x11VUcOnSI8fHxjV7atuPyyy+f9/7/1a9+tdFL2hZks1muuuoqPv/5zy/597/6q7/ic5/7HF/4whe47777CAaDHDp0iEKhsPoHkQrFBeLLX/6yjEaji27/0Y9+JDVNk6Ojo7O3/cM//IOMRCKyWCxewBVuPzo7O+Xf/u3fbvQyth0HDhyQb37zm2d/tyxLtra2yttvv30DV7X9+MhHPiKvuuqqjV7GtgeQ3/3ud2d/t21bNjc3y09/+tOzt01PT0uv1yu/8Y1vrPp+VQZHseHce++97Nu3j6amptnbDh06RCqV4oknntjAlW0PPvnJTxKLxdi/fz+f/vSnVWlwnSmVSjz00EPcdNNNs7dpmsZNN93Evffeu4Er25709vbS2trKzp07ecUrXsHAwMBGL2nb09fXx+jo6Lz/kWg0ysGDB8/pf2RbThNXbC5GR0fnBTfA7O+jo6MbsaRtw9ve9jauueYa6urquOeee/jABz7AyMgIf/M3f7PRS9uyTE5OYlnWku/5o0ePbtCqticHDx7kK1/5Cnv37mVkZITbbruNZz7zmTz++OOEw+GNXt62xf3cX+p/5FzOCSqDozgv3v/+9y8S5y28qA/rjeFcjs073/lOnv3sZ3PllVfy53/+53zmM5/hjjvuoFgsbvCzUCjWnxe84AW87GUv48orr+TQoUP86Ec/Ynp6mm9961sbvTTFGqAyOIrz4l3vehevec1rVtxm586dq7qv5ubmRR0kY2Njs39TnBtP5dgcPHiQSqVCf38/e/fuXYfVKerr69F1ffY97jI2Nqbe7xtMTU0Ne/bs4cSJExu9lG2N+38wNjZGS0vL7O1jY2NcffXVq74fFeAozouGhgYaGhrW5L6uv/56Pv7xjzM+Pk5jYyMAd911F5FIhMsuu2xNHmM78VSOzeHDh9E0bfY4KNYej8fDtddey913382LX/xiAGzb5u677+Ytb3nLxi5um5PJZDh58iSvetWrNnop25ru7m6am5u5++67ZwOaVCrFfffdt2w37lKoAEex7gwMDJBIJBgYGMCyLA4fPgxAT08PoVCI5z3veVx22WW86lWv4q/+6q8YHR3lgx/8IG9+85vxer0bu/gtzL333st9993Hc57zHMLhMPfeey/veMc7eOUrX0ltbe1GL29L8853vpNXv/rVXHfddRw4cIC/+7u/I5vN8trXvnajl7atePe7380tt9xCZ2cnw8PDfOQjH0HXdf7kT/5ko5e25clkMvMyZX19fRw+fJi6ujp27NjB29/+dv7yL/+S3bt3093dzYc+9CFaW1tnvxSsirVs9VIoluLVr361BBZdfvrTn85u09/fL1/wghdIv98v6+vr5bve9S5ZLpc3btHbgIceekgePHhQRqNR6fP55KWXXio/8YlPyEKhsNFL2xbccccdcseOHdLj8cgDBw7I3/zmNxu9pG3HH//xH8uWlhbp8XhkW1ub/OM//mN54sSJjV7WtuCnP/3pkueFV7/61VJKp1X8Qx/6kGxqapJer1feeOON8tixY+f0GEJKKdcqIlMoFAqFQqHYDKguKoVCoVAoFFsOFeAoFAqFQqHYciiRseKcSKfTjIyMYNv2Ri9FoVAoLlo0TaOlpUUZCq4jKsBRrArbtrn99tv57ne/u9FLUSgUii3DS17yEj7wgQ+gaaqgstaoAEexKm6//Xa+973v8ba3vY39+/djmuZGL0mhUCguWsrlMg8//DB33HEHAP/rf/2vDV7R1kN1USnOSiqV4rnPfS5ve9vbuPXWWzd6OQqFQrFl+OpXv8rnPvc5fvrTn6py1RqjcmKKs+ION9u/f/8Gr0ShUCi2Fu7n6sjIyAavZOuhAhzFWXEFxaospVAoFGuL+7mqGjfWHhXgKBQKhUKh2HKoAEehUCgUCsWWQwU4CoVCoVAothwqwFEoFArFmlEoFM56mas3ueOOO+js7MQwDN797ncTj8dpbGykv79/457EGvHyl7+cz3zmMxu9jG2L8sFRXFBs22ZoeIJMNk8o6KettUEZXF1ESCnJFspULBtD1wj6TIQQ6/qYz372s7n66qv5u7/7u3V9nNWy2dZzNqRtIyfjyHwB4fch6mOIdfyfu+aaa3jyySeX/bsQglOnTtHV1cUjjzzCO9/5Tv793/+d/fv3E41G+eAHP8gf/MEf0NXVtW5rvFB88IMf5FnPehZveMMbiEajG72cbYcKcBQXjN4TZ/jxXfdy4tQZisUSXq+Hnp0dvOD3rmd3T8dGL+8pY1kWQogtG7AlswWGJtIkcwVsW6JpgmjAR1tDmGjQt9HLW5FSqYTH49noZVxwrMEhKvc/iD00jCyVER4Tra0V48B16O1t6/KYb3jDG7jtttt48MEHF3VevuENb0DTtNng5Yc//CEHDhzg5ptvBiCXy/GlL32JO++8c13Wdi5UKhUM46mdIq+44gp27drF1772Nd785jev0coUq2VrfhIrNh29J87wpa9+n8ePnKSuNsKu7jbqaiM8fuQkX/rq9+k9cWZdHvf//b//x759+/D7/cRiMW666Say2Sy2bfOxj32M9vZ2vF4vV199NT/5yU9m9/vZz36GEILp6enZ2w4fPowQYjZ1/pWvfIWamhq+//3vc9lll+H1ehkYGKBYLPK+972Pjo4OvF4vPT09fOlLX5q9n8cff5wXvOAFhEIhmpqaeNWrXsXk5OS6PP+1IpktcHwwTiKdx2vqhAIevKZOIp3n+GCcZLawLo/7mte8hp///Od89rOfRQiBEIKTJ0/y+te/nu7ubvx+P3v37uWzn/3sov1e/OIX8/GPf5zW1lb27t0LwD333MPVV1+Nz+fjuuuu43vf+x5CCA4fPjy770rHZ6n1bNZSijU4RPlHd2L1nUaEw2htLYhwGKvvtHP74NC6PO6tt95KoVDgscceo6ura/YSCoX4xS9+wRve8AYAenp6+OAHP8g999yDEIJbb72VH/3oR3i9Xp7+9KfP3p9t23ziE59g9+7d+Hw+mpqaeM1rXjP798cff5ybb76ZSCRCc3Mz73rXuyiVSrN//8lPfkIwGJxXFnv88ccRQswe1/7+foQQfOtb3+KZz3wmXq+X73//+wwMDPDqV7+apqYm/H4/V111Fb/61a9m72dgYIA//dM/pba2lrq6Ol7xilcwNTU17/W45ZZb+OY3v7mmr7FidagAR7Hu2LbNj++6l8RUij09HYRDAXRdJxwKsKeng8RUip/8171r7gMxMjLCn/zJn/C6172OJ598kp/97Gf84R/+IVJKPvvZz/KZz3yGv/7rv+bRRx/l0KFDvOhFL6K3t/ecHiOXy/GpT32Kf/qnf+KJJ56gsbGRW2+9lW984xt87nOf48knn+T//t//SygUAmB6eprnPve57N+/nwcffJCf/OQnjI2N8Ud/9Edr+tzXEiklQxNpiiWLSNCDaehoQmAaOpGgh2LJYmgizXqYon/2s5/l+uuv541vfCMjIyOMjIzQ3t5Oe3s73/72tzly5Agf/vCH+Z//83/yrW99a96+d999N8eOHeOuu+7ihz/8IalUiltuuYV9+/bx29/+lv/9v/8373vf++btc7bjs9R6Ojo2X/ZR2raTuUln0NpbEQE/QtMQAT9aeyt2OkPl/oeQ6+C9Ul9fz4tf/GL++Z//ed7tX/va14hGo7z4xS8GnGBz586dfPrTn2ZkZIS///u/55e//CXXXnvtvP1uv/12vvnNb/LFL36RY8eO8d3vfpdnPetZADz88MPccMMNXHPNNfz2t7/lm9/8Jt/4xjf41Kc+Nbv/ww8/zBVXXDEvs3r48GFaW1upr68H4JFHHgHg05/+NB/+8Id54oknuPbaazlw4AD5fJ7vf//7PProo7zlLW8hEokAcOLECa699lp6enr4zW9+w1133cWJEyd4z3veM2/9Bw4c4P7776dYLK7Bq6s4F1SJSrHuDA1PcOLUGVpb6hfpNYQQtDbH6D15hqHhCTram9bscUdGRqhUKvzhH/4hnZ2dAOzbtw+Av/7rv+Z973sfL3/5ywH41Kc+xU9/+lP+7u/+js9//vOrfoxyuczf//3fc9VVVwFw/PhxvvWtb3HXXXdx0003AbBz587Z7f/P//k/7N+/n0984hOzt/3zP/8zHR0dHD9+nD179jy1J70OZAtlkrkCAZ+x5PEL+AySuQLZQpmQf23LQNFoFI/HQyAQoLm5efb22267bfbn7u5u7r33Xr71rW/NCxSDwSD/9E//NFua+sIXvoAQgn/8x3/E5/Nx2WWXMTQ0xBvf+MbZfVZzfJZaz2ZDTsaxh4bRYnVLHjMtVos9NIScjCMaG9b88d/4xjfy/Oc/n+HhYVpbWwH48pe/zK233jp7PEKhEP39/fzO7/zO7Gt5+vTp2e1d7rzzTm655Rae85znANDZ2ckNN9ww+zivetWr+Mu//EvAyQq99rWv5Yc//CEf+tCHACeYcf8/XR555JF5tx0+fJhgMMi3v/3t2fLZzTffzNOf/vR5gfPu3btnf/6Lv/gL/uIv/mLee/G9733vogCntbWVUqnE6Ojo7OeQ4sKgMjiKdSeTzVMslgj4vUv+3R/wUSyVyWTza/q4V111FTfeeCP79u3jZS97Gf/4j//I1NQUqVSK4eFhnvGMZ8zb/hnPeMaK4sil8Hg8XHnllbO/Hz58GF3X+d3f/d0lt3/kkUf46U9/SigUmr1ccsklAJw8efIcn+GFoWLZ2LZE15f+uNB1DduWVKwL58T6+c9/nmuvvZaGhgZCoRBf/OIXGRgYmLfNvn375ulujh07xpVXXonPV9ULHThwYN4+F+PxWQqZLyBLZfAt/T+H14ssl5H59Skt3njjjXR2dvIv//IvADz00EM8+uijs+UpgEcffRSofukAyOfz844PwIte9CI++clPcujQIf7pn/5ptgR09OhRHnroId761rfO297j8czLljz88MPz/kdhcdDzyCOP8KIXvWg2uDl9+jQ//vGP+ehHP7rk8zt9+jR33XUXn/70p+e9V175ylcu0u34/X7AyfYqLiwqwFGsO6GgH6/XQy6/dIo2nyvg9ZiEgv41fVxd17nrrrv48Y9/zGWXXcYdd9zB3r176evrO+u+bjp7btmlXC4v2s7v98/7hux+mC1HJpPhlltu4fDhw/Muvb29s2n3zYaha2iawFomgLEsG00TGMsEQGvNN7/5Td797nfz+te/nv/8z//k8OHDvPa1r52nuwAng3OuXIzHZymE34fwmFBYpixSLCJME+FfH3G4EILXve51fPnLXwacLNgNN9zApZdeOrvN4cOH6enpmXec6uvrF2lY3v3ud/Pkk09y44038rd/+7f09PTQ19fHE088gWmai7KeR44cmQ2astksJ0+enBfM2LbNww8/vCiD8+xnP3ve7x6Ph6uvvnrJ5/fII49QV1fHo48+Ou998thjj/HTn/503raJRAKAhoa1z5QpVkYFOIp1p621gZ6dHQyPTC7SaUgpGR6Ns3tXB22ta/8BIITgGc94BrfddhsPP/wwHo+Hu+++m9bWVn7961/P2/bXv/41l112GVD9MJo7AG+uEHU59u3bh23b/PznP1/y79dccw1PPPEEXV1d9PT0zLuczwn5QhD0mUQDPnKFypLHL1eoEA34CPrWZ1aZx+PBsqzZ33/9619zww038Bd/8Rfs37+fnp6eVWVX9u7dy2OPPTbv2/0DDzwwb5vVHJ+F69mMiPoYWlsrdjyx5DGz41NobW2I+ti6reG1r30tp06d4r/+67/4xje+Ma8UCEuXjvbv38+RI0cW3deePXt473vfy0MPPUQ6nebIkSOEw2Esy5r3xaOvr4/vfve7vOIVr5j93bbt2SwcOCWveDw++9ipVIr+/v55w4RN06RSqSybdTFNk3Q6TWtr66L3SVvb/O60xx9/nPb29lm9j+LCoQIcxbqjaRov+L3rqauNcPzEGdLpLBXLIp3OcvzEGerqIjz/puvXvL36vvvu4xOf+AQPPvggAwMDfOc732FiYoJLL72U97znPXzqU5/i3/7t3zh27Bjvf//7OXz4MP/jf/wPwKnld3R08NGPfpTe3l7+4z/+Y1WGXV1dXbz61a/mda97Hd/73vfo6+vjZz/72Wwd/81vfjOJRII/+ZM/4YEHHuDkyZPceeedvPa1r920J00hBG0NYbwenVS2RLliYUtJuWKRypbwenTaGsLr5ofT1dXFfffdR39/P5OTk+zevZsHH3yQO++8k+PHj/OhD31oUaCyFH/6p3+Kbdv82Z/9GU8++SR33nknf/3Xfz37HGF1x2fhejbjkEShaRgHrkMLh7AHh5G5HNKykLkc9uAwWjiEceDadfXDaW1t5eabb+Z1r3sdlmUtEtIfPnx4UYbk0KFDPPHEE7NZnL/6q7/iq1/9Kk8++STHjh3jf/7P/0ksFuOGG27g4MGD1NTU8P73v59Tp07x3//937zwhS/k5S9/Oc9//vMBiMViCCFm3x+/+c1veMtb3oLP55vN/DzyyCPouj6vVHbw4EGi0ShvetObePLJJzly5Ahf+MIXZpsQDh48SCQS4dZbb+WRRx7hxIkT/OQnP+Htb3/7otfhl7/8Jc973vPW5DVVnCNSoTgLTz75pLz22mvlk08++ZTu53jvgPzs3/+bfOu7/1r+2dtul29991/Lz/3Dv8njvQNrtNL5HDlyRB46dEg2NDRIr9cr9+zZI++44w4ppZSWZcmPfvSjsq2tTZqmKa+66ir54x//eN7+v/rVr+S+ffukz+eTz3zmM+W3v/1tCci+vj4ppZRf/vKXZTQaXfS4+XxevuMd75AtLS3S4/HInp4e+c///M/V1+H4cfmSl7xE1tTUSL/fLy+55BL59re/Xdq2vS6vw1oxncnLJ/rG5T1PDMhfPXZa3vPEgHyib1xOZ/Lr+rjHjh2TT3/606Xf75eAPHr0qHzNa14jo9GorKmpkW9605vk+9//fnnVVVfN7vPqV79a/sEf/MGi+/r1r38tr7zySunxeOS1114rv/71r8/ep8vZjs/C9bjvh81I5cygLPx/35O5z/29zH7mszL3ub+Xhf/v32XlzOAFefwf/OAHUtd1+aY3vWne7ZZlyUAgIH/4wx8u2ufAgQPyC1/4gpRSyttuu03u2bNH+nw+WV9fL//gD/5AHjlyZHbbX/ziF/Kaa66RPp9P7ty5U95+++2yUqnMu7+//Mu/lLW1tXLHjh3y1a9+tXzf+94nr7vuutm/33HHHfLyyy9ftI5f/epX8vrrr5fBYFDW1tbKF7zgBXJ6enr27/fdd5989rOfLSORiAyHw/Kaa66Rn/3sZ+fdRz6fl9FoVN57773LvkZr9fmqWIyQch16OxVbiqNHj/LKV76Sr33ta/NSveeDcjK+uJEb4GS8nvzrv/4rr33ta0kmk2fVT12sXGgn46fKf/zHf/Ce97yHxx9//KL/bPiHf/gHvvvd7/Kf//mfy26zlp+vivmoNnHFBUXTtDVtBVdcWIQQa94KfiH56le/ys6dO2lra+ORRx7hfe97H3/0R3+0ZYMbcMpV69EKvl688IUvpLe3l6GhoU3pMXQumKbJHXfcsdHL2LaoAEehUGwbRkdH+fCHP8zo6CgtLS287GUv4+Mf//hGL0uxgKW0LBcjc9viFRceFeAoFIptw3vf+17e+973bvQyFArFBeDiLnAqFAqFQqFQLIEKcBRnxRX6LWV0p1AoFIrzx/1cvdgF1ZsR9Yoqzoo7J+bhhx/e4JUoFArF1sL9XG1padnglWw9lAZHcVYikQgveclLZrsB9u/fj2muj2utQqFQbAfK5TIPP/wwd9xxBy95yUsIh8MbvaQth/LBUawK27a5/fbb+e53v7vRS1EoFIotw0te8hI+8IEPqBLVOqACHMU5kU6nGRkZ2ZT29AqFQnGxoGkaLS0tKnOzjqgAR6FQKBQKxZZD5cQUCoVCoVBsOVSAo1AoFAqFYsuhAhyFQqFQKBRbDhXgKBQKhUKh2HKoAEehUCgUCsWWQwU4CoVCoVAothwqwFEoFAqFQrHl+P8DeOVyUQQUuS8AAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjgAAAG7CAYAAADOue8dAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9aYxkaXrfh/7es8a+5r5UZtbW23T3THdP93RzSIrmMuKQhChd0DZhWDTl5YMXmBjLhilYtmUZGtuSIdmgYdmApREg69IWrsRr2fToDkmTIjU909Pds/Raa1bua+zLibO+98ObJyMzK6sqq7qqMrvr/IBAZkbGcuLEiXj/53n+z/MIKaUkISEhISEhIeEzhHbaG5CQkJCQkJCQ8LBJBE5CQkJCQkLCZ45E4CQkJCQkJCR85kgETkJCQkJCQsJnjkTgJCQkJCQkJHzmSAROQkJCQkJCwmeOROAkJCQkJCQkfOZIBE5CQkJCQkLCZ45E4CQkJCQkJCR85jBOewNOgyiKWF9fJ5/PI4Q47c1JSEhISEhIOAFSSjqdDlNTU2jaPWI08hHyR3/0R/IXf/EX5eTkpATkP/7H//jQ/6Mokn/5L/9lOTExIVOplPzpn/5pefXq1Xs+7m/91m/Jubk5adu2fPXVV+V3v/vd+9qulZUVCSSX5JJckktySS7J5VN4WVlZueda/0gjOL1ejxdffJG/8Bf+An/uz/252/7/3/w3/w3//X//3/P3/t7fY2Fhgb/8l/8yX/nKV/jwww9JpVLHPub/9r/9b3zta1/jb//tv81rr73G3/pbf4uvfOUrXLlyhbGxsRNtVz6fB2BlZYVCofDgLzAhISEhISHhsdFut5mdnd1fx++GkPLxDNsUQvCP//E/5pd/+ZcBkFIyNTXFf/Af/Af8xb/4FwFotVqMj4/zjW98g3/5X/6Xj32c1157jS9+8Yv81m/9FqDSTbOzs/x7/96/x3/8H//HJ9qWdrtNsVik1WolAichISEhIeFTwv2s36dmMl5cXGRzc5Of+Zmf2b+uWCzy2muv8eabbx57H8/zeOeddw7dR9M0fuZnfuaO9wFwXZd2u33okpCQkJCQkPDZ5dQEzubmJgDj4+OHrh8fH9//31F2d3cJw/C+7gPw9a9/nWKxuH+ZnZ39hFufkJCQkJCQcJZ5IsrEf/M3f5NWq7V/WVlZOe1NSkhISEhISHiEnJrAmZiYAGBra+vQ9VtbW/v/O8rIyAi6rt/XfQBs26ZQKBy6JCQkJCQkJHx2OTWBs7CwwMTEBL//+7+/f1273ea73/0ur7/++rH3sSyLl19++dB9oiji93//9+94n4SEhISEhIQnj0daJt7tdrl+/fr+34uLi/zgBz+gUqlw7tw5fuM3foP/8r/8L7l06dJ+mfjU1NR+pRXAT//0T/Nn/+yf5d/9d/9dAL72ta/xa7/2a7zyyiu8+uqr/K2/9bfo9Xr8+q//+qN8KQkJCQkJCQmfIh6pwHn77bf5qZ/6qf2/v/a1rwHwa7/2a3zjG9/gP/qP/iN6vR7/1r/1b9FsNvnyl7/MN7/5zUM9cG7cuMHu7u7+3//Sv/QvsbOzw3/6n/6nbG5u8vnPf55vfvObtxmPExISEhISEp5cHlsfnLNE0gcnISEhISHh08enog9OQkJCQkJCQsKj4okctpmQkJDwWUFGEXK3hnQGiHQKMVJF3GsIYULCE0AicBISEhI+pYSrawRvvU20to70fIRlok1PYbz6CvrM9GlvXkLCqZIInISEhIRPIeHqGv7v/lOiThetWkGkbBi4hItLyN0afPUrichJeKJJ4pgJCQkJnzJkFKnITaeLNjOFyKQRmobIpNFmpog6XYK33kFG0WlvakLCqZEInISEhIRPGXK3RrS2jlatINFY7lVxAgPZ6yHbbUQ6Rbi6qiI5CQlPKEmKKiEhIeFThnQGynOTsumHFoNuQHd5HaO9DFEIQp27hreW0cZGT3lrExJOh0TgJCQkJJyQs1KxJNIphGXCwMVxdKKNJtZgC1IWGAY4A+h2CN78LtrUROLFSXgiSQROQkJCwgk4SxVLYqSKNj1FcPMW/W6I9CWpnIEQgJRI34fxcaTvE7z1DtrUZFI6nvDEkRzxCQkJCfcgrlgKF5cQ+Tza9CQinydcXFLXr6491u0Rmobx6itopoHTdEHX0WWE50PY7iJsC31mGm2kQrS2lnhxEp5IEoGTkJCQcBfOasWSPjON8fprYNkqddbtshSeY7v8DNqli2iFPNg20veRzuCxbltCwlkgSVElJCQk3IWDFUtCCPxIB8DUQoQQaNXyfpREPGZDrz4/hxjbRtd0oqxAD2bRsxFapqlu4LoI00SkU3d9nISEzyJJBCchISHhLsQVS6RsAG51x7jVGxve4BSjJGKkilatgO8xyFYRtk3G8NR2S0lUa6BNTyNGqo992xISTptE4CQkJCTchYMVS/vIAzc4xSiJ0DS0y5cwMya99TbSdUkLB9nvE62uo+VzGK++nBiME55IkqM+ISEh4S7EFUtRrY6UEsTwf6cdJfF90KpVcl9+CXd8Duk4GFuryE4XfWEeMxnXkPAEk3hwEhISEu5CXLEkd2tEq+sYmSK+niLq9ZH1xqlGSQZ7WbHs3Bi99Ahmq4M98XwyVTwhgUTgJCQkJNwTfWYavvoVgrfeJrXUwPPSuGmX9MI8xqsvn1qUxHHUz3QaQEMUi+hzxVPZloSEs0YicBISEhJOgD4zjTY1SXGxQX89IprUsM6XTzVKEgscW/mfVaO/hIQEIBE4CQkJCSdGaBrZ2SqaD4PM/sinUyNOUcXCxrJOb1sSEs4aSYI2ISEh4T6IRUQcPTkLuHsFXpnM6W5HQsJZIhE4CQkJCQ9AEJz2FoCUKnrT76u/E4GTkDAkSVElJCQkPABS3vs2J3qchzChPBY4ymyckJAAicBJSEhIuG+EeDgC52FMKDf0iO56h8j10Cogn6Dy8IchDhM+uyQCJyEhIeE+0fVPnqKKJ5RHna6ac5WyYeASLi6p6d/3aNLneRDVahi33qO/2Uf6Ie4PVu9bIH1aeRjiMOGzTSJ1ExISEu6T2Ovi+w92/4cxoby3uEH49ruY67cglUYbKSPyecLFJfzf/aeEq2sPtnGfAmJxGC4uIfJ5tOnJJ+a1J5ycROAkJCR85pFRRLS9Q7i0QrS9c1fhcBJir0vsfbnv7TkyoVxKiPZSXkcnlB97/yii+85HSGdAZrqMsG0Q9yeQPq0cFYek718cJjwZJCmqhISEzzSPIpURR3AcB4oP0Dg4nlAu9iaU3+xNEEWCS4UNdQPbRtYbd5xQLndr9DfbiHwRS2sQSUna7xC1WgjDQFRK+wJJjI0+yEs8sxwVh9fak4ynmxRM5zZx+Fl77Qn3RyJwEhISPrN8Up/LnfikvXAOTSjPpInkkRbE95hQLp0Brq8j8iZOy2V9M6LSbzLpfQyajshlwTTvKJA+zRwUh1sDpS796MBSdg9x+DhIzM9ng0TgJCQkfCa5zeciBF6kY2U0tPQU0eo6wVvvoE1N3vfiE3cO9rwH27Z4Qnm4uISWnjq83XsTyvWF+TtOKBfpFMLQka0O3d1tJKNgGmDnIQiQ9ToIgWy1gNkH28gzSiwOQ8ejHapQWtXuDG9wD3H4qEnMz2eHRFImJCR8Jjmaymh4WZa6Y3vN8e7tcznRczxgqXg8oVzL54hW18F1kVGI7PeJVtfvOaFcjFShUkFub9MNUmBZFPUuQgiEYSA1HQyT8NqNz5wXJRaHN7fzSCRTmfr+/2JxqE1P31EcPkoS8/PZIhE4CQkJp8LDNv7e9vh7qQz2fC5xGqgT7DmEbRvp+w+cyvikgy31mWnMr34FfWGOqO8gG01kp4u+MK+uv8vZvtA0tMkJCAM6Mo/Ycyl7vkC2O2gpG21hnmh9/RMJuLOI0DT8F78IqTSyVifjNZDhycXho+K4yjj5hBi/zypJiiohIeGx8zjC+Ed9LmWrR93Ns+sWKJjOJ05laBqE4SfbxnhCufm9Dvge9mVO5NeQEkQ2i1XK0AoqRH5IO7DQKDNVFmjTU4hshmhj8zPpw1n1p9BfsZnb+DZyo4usNxCmib4wj/Hqy6eSCjoaMVx3ynihwXxuJzE/nxKnLnDm5+dZWlq67fp/+9/+t/kf/of/4bbrv/GNb/Drv/7rh66zbZvB4LP3IU5I+CzyqIy/Rznqc9H2Ii5hpJ3I53Iv0mnodlXDP+MTfJMKTUMUiwgB2tjJ7uN5ICyLdM7AZwwiC9uISKUK6KUxVXre75+qF+VRUdsLSOXPVcm8/gtnxsx71Pzc81NwMMp3BszPTxqnLnC+973vER44DXr//ff52Z/9WX7lV37ljvcpFApcuXJl/2/xSWPFCQkJj4XjjL9uaGB/AuPvnSpWYp+L3K2p1EW1jI5P4Eb4zU3MwidLZWQySuA4DuTzD/QQD8xgAKKQJzNZxL/h42fzaNkM2cxgr6/O8QLu017dIyXs7KjfZ2ZACO3MREPiiGGjY9IWyvx8MbcxvMEpm5+fRE5d4IyOHj44/6v/6r/iwoUL/ORP/uQd7yOEYGJi4lFvWkJCwkPmaBjfDQ2We6P7fUzuN4x/r1SXPjMNX/3K/m0q7g22mKAz9SzjP/HUJ4oUxc3+TkPgOA4gNFIvfw6xsgXdPtJ0SWUcZH+gjLZHvCifheqeONg/Pv7JPVAPGzFSpT+2wM6ijxiRXMhv7W/jw4gYJtw/py5wDuJ5Hn//7/99vva1r901KtPtdpmbmyOKIl566SX+2l/7azz33HN3vL3ruriuu/93u91+qNudkJBwMo42uLM0NdBpyykpX8x9hPFPmuqKfS5yt0a1P6C+VaBdzDM188kiF6m9E/EH7Wb8SYj778ixKbSLOcTqDtJZBXcDeYwX5XGlBR8lnqciVwDl8uluy3G4nsbWzBcRm+9yrvlDNCOPtG1w3WMFZ8Kj50wJnN/5nd+h2Wzyr/1r/9odb/PUU0/xd/7O3+GFF16g1WrxN/7G3+CNN97ggw8+YGZm5tj7fP3rX+ev/JW/8oi2OiEh4aQcNf4KASnDYxBYdP0UWb9+ojD+cakuADLpY1NdQlOpDA0QD8kCET/lgXOnx0bcf6ffB61QIP10DrMwgj3x/G2pp/vdV2eVxUX1c2HhdLfjOIIAbt0CrVrl3C8+h/nDvoqUnQHz85PMmRI4/8v/8r/w8z//80xNTd3xNq+//jqvv/76/t9vvPEGzzzzDP/T//Q/8Vf/6l899j6/+Zu/yde+9rX9v9vtNrOzn63mVwkJZ5nY+xH1+ohSkWhrB212GiEE0+k6NzoTrDslLrRunCiMfzTV5UdqYTa16J4VK5mMEgaeN+xI/GklDkbnchpaqYg+d/vciKP7ahCa2JqPELf3AzorfpajtNvKf2OaYNunvTWHiSK4fl39PjUFucIUcuGXPtVep88KZ0bgLC0t8Xu/93v8o3/0j+7rfqZp8oUvfIHr8RF2DLZtY5+1T0VCwhPCUe8HgY9stogcR4kc28b0e7itAYPiCMUThPGPprpudccx9YD57J4D9S6prtFR5eXY3VUL0idBiAdv9vdJUM0KlclZSpUuM8073PbIvlrpjZAxXKbjBnmfguqe9XX186xFb6SEq1fV76OjUCio3+OIYcLpcmYk5d/9u3+XsbExfuEXfuG+7heGIe+99x6Tk5OPaMsSEhIelOM6u2rj45DNInt9oq0doo1Npv2baONjbL14Mi/IoVQXgAA/NIZi4y4VK7E5uNO57V+fOgYDlR6xbchmj7/NwX0V7jU7DOWBr/4zXt0Ti5tKRfUeOkvE4qZQgGriHT5znIkIThRF/N2/+3f5tV/7NYwjDSX+/J//80xPT/P1r38dgP/iv/gv+NKXvsTFixdpNpv89b/+11laWuLf+Df+jdPY9ISEhDtw1Psh0YikwMho6JcuEK6soY2NYfzEG2iZDHazShhpOM5QhNyJoz1uqlaHmpun6Wcpmd17z3J6SJGXdFqlu6Lo8S2+8Xbv+VfxffXcdxQ4B/ZVY3QEgJLV23uss13dE4bDNNzYCXsEPS5u3lTvhW1/8khgwqPhTOjh3/u932N5eZm/8Bf+wm3/W15eZmNj2Eug0Wjwb/6b/ybPPPMMX/3qV2m323z729/m2WeffZybnJCQcA+Oej9qbp7F7jigvB/6SAXZaqJlMmhjoyycV19Hy8v3fuyjs5xKwTYyCtlt2ydq11/cs6r0ep/sNWZUu5MHnir+IMSmZttWC2zcRizelqMc3FeNDRfpuuRE99RHG5yE2Hlw1iyT6+t7zRbF2UubJQw5ExGcn/u5n0Pe4XTqD//wDw/9/Tf/5t/kb/7Nv/kYtiohIeGTcNT7EX/CtwcFxlLt27wfhjGMrLjuvc2kR3vcSKeJ1HTEwgLmay/dNdVVrUKzqXw4d4p8nIQ40tTvf7LHgZP3dYnFlK6rn7EuuZs+ifcV31pB1upIdxPOeHVPvz/0Gn3Sffsw2d0dRpUuXz7dbUm4O2dC4CQkJHz2OFoSPpZq0/KytLwso3b7WO/HwoIK/d+6BU89de/nONjj5lzNY6OdozmbZ3Lq7tGI2JD7SSMvB5v9fVJOGkCJe8HE54QnHROhTU9jfGkS0e1gj91eTn7WiCN5Fy6c7nYcpN1WAgeUuDlrzQYTDnM2j+yEhIRPPbH3I6rV9yO0Y+kmACv9qmp+Nj19yPthWcMoju+f8Hk0DW1slNIz04hikVb7ZF9rwy6zJ35JtxFrg4chcO43ghP3wslmT3bfbhcQGtWFIvrcLNrY6JkVN7GIyOU+2Zyvh4njDA3PFy+ePcNzwu0kb1FCQsIj4ahPRvb7FLQu0nVxtjpEueKx3o/5efUzbux2P8R9bU4ijkaU35ZW6/6f51FwUoETv7ZGQ4mzTObOJeIHqe9VhZdKD7R5jw0phwJn+oxkzjxvOCZifv7siK6Eu5MInISEhEeGPjON+dWvoC/MITtdVRIeLKKNj7Hy/C8e6/2wbbXYR5Eqgb4f4mqW+Ez7bsTt/uPhjQ/Kw6rIut+IwElKxI/eHs7+4nzrlvo5OXk2UkBhqNKmoAZ8ps5mNX3CMZzxQz0hIeHTzkGfjHQGpNIptutVQGMwOH7BOHdOnTHfuqXSASclfqyTpIxiQRFXIZ02J13MY+PtYKCiOZZ1MoET3+8s43nKYC7EsNLtNJESrl1Tv4+Pq5RZwqeHJIKTkJDwyIl9MrH348IF9dUTh/2Pkk6rRS4I7l+AxIvQSYZgximtTyJyYlH1oFGc+H73Kz58fxjhulOJeExcWv64p57fL3FaMk5TnjZxI79S6WwO+Ey4O4nASUhIeOzEM4WkHJbcHiWenXuSvjgHmZhQP9fW7n3b0b1u+rE/5UGIK6kGDzjpIBY4J0lRxbeNhdlJ7xu/vrO8SLda6vVY1tmYN3X9utqedHp4TCV8ukgETkJCwqkwN6d+3skvE6ddXFf5cU5K7DE5SVQmjvZ8EoETR09OEjE6jvi1nSSCE4uoWNCcxFwMQxF5rw7Rp4WUEPdzPQvRm5UVFR0TYnicJnz6SAROQkLCqaBpw4qeOxl9pyYjZKvF8ttbRNs7yBMqnUpF/Ww27347NVH7k5mEP2kvnPtJUR3tgROX1Z/0Oc4qscgdGTn98uvtbdXhWoiT9WJKOLskJuOEhIRTY2JCiZBaTS1uBxfrcHUN+6238W+a+EHIILOKPjOF8eor9+y8OzqqojJbW/cui85mVY+Yk3RPPo64o/CDRnDuJ0UViyjXVffL5e4dxYkjWWch7XMcQTAcfBqX7p8WzeYwmpd0Kf70k0RwEhISTpVxNZ6KlZXhdQenkE+UBohqmU17nnBxSV2/eneDzcHIzL2iF/GiGvdeeVAeNEpypxSVjCKi7R3CpZX96FUscHo9JQzS6XtXUMVRrDiqdda4cUP9PHfudLej14PNTfX7pUtnv+Is4d4kEZyEhIRTpVxWkZZ+X0UbNHF4CnlRSLbbGn29iJieIlpbJ3jrHbSpybt24p2YUL6OnZ27T6KOq6C63U/2Oh5mFVW4ujacseX5CMtEm57Cm/gyolLdLxE/SQ+cRkP9LBQebPseJb3esHz9XpVgjxLXHQrs8+eHUbmETzdJBCchIeHUic/eb9y4fQq5lJAzVehi2yuhVctEa2vI3dpdHzPuo3ISA/En9eF8krP9OIKz35fnQPRK5PNo05OIfJ5wcYnge+8S1Wq4rorg6Pq9hUFcSn4WIxKxqLifXkcPmyAYlqefOzesUEv49JMInISEhFMnkxl2Lx60XKTnw94U8rqXp+uniYCOl1FTyH1/fwr53YjPxO/VETn26TxoFOdo2fb9cDCCI6PD0SuRSSM0DZFJo81MIZ0B0bXrRFG0/5ru5t150B47j4PtbfWzUDi9iEkUqXJwUJ2TTzOKlPDwSQROQkLCPsf5Ph4X58+rn8uN/HAKOZDW1c9BpNy0O53UbVPI70TcSyf2VtyJ6t68z7v5cO62b+JKqrih3v1w0GR8XPQqkqhhpX0HDB1/fZuo27/ryIV4W1sfryFbLcqlx/c+Hnz+Ox1HUTSMrMXjNR43Ug4b+VWrZ6NzcsLDJfHgJCQkAHf2fZykaulhYJoqEuLl8/THFkivXkVLT5Ex1NjsjOYTSmjUBWOXD08hvxOx8LhXZCYWC3dq1nevfZPJKDOv49z/rKKDJmPpDNTj70WvlvtVBrU+F3a/Q+gMCM1nwA8I7RsYM+cR4vbWxAe3ddsZwdeyZD5u4z9zCX3+HGKk+kiniJ/kOIpTQqclbmAobnK5YcPHh4GMov2xJCKdeuT7O+HOJAInISFh3/cRdboqepCyYeASLi4pr8tXv/JYRM78PFy9qrE180XOt9aJVtfRqmWm7B3W2gX67YhcJkX7qecZPeGikckoA/Od5l7FaJoSG0dnNp1k36TH1b7p9++/W/DBNJJIp/ajV1EQMFgcEDaaRJ11BlYRsj6RbiC7PVIr1xDnZoi2xf5iKgcD/G9+a39bB7JEtLMLN97Efeu7aOdm0C9dfGSi9ST7KhidxvfV6z0t4/PSktrvhjGM8j0MTvskIeEwicBJSHjCuc33Ea/umTRaeopo9WRVSw8DTVOLXpsqrTd+gdLV7xCtrZPyG8joIrnxKuLyRWp6lZOedE9OKvPy2hpcuHDn242OqmquZnMoUk66b6xfngS0B2r2dzBFJUaqaNNThB98RNjuEDrTIIFshoEogOsySGUI0jlyvU3Mb/2AwfdcVVJlGshWGwFoly8StnsEm7vo/kCZjJwBstMluHnrkYjWE+2r777NjekM+AHnz4OMHn90Y3NTRdqEeLjm5rNykpAwJBE4CQlPOEd9H/3AwtZ9dCERQhyqWhJjDzGWfwcmJ9VogYYxzuif+SWoqXD/AmnW+hU6XY08Sojcq4kfDBvh+f7db1cqKYGzs3NA4BzYNyBo+2nSuoephbftGxh9oKGdB1NUQtMwXnmJ4HvvqPpuaxph6jCIqBsjNNNjaLpO0B5g+XXS3ZuIkTnE2Aiy3iBc31DG5HaH7fWQrWiaC+klhLCQQkV69PMlokbroYvWg/tKIljujTCVbuzvKyyL3e9eISiDpQXIH2zjPeboRqMx7Av0MBv5naWThIQhyZ5OSHjCiX0fcdXSWr/Kzc7EsCLoPqqWHgZCDPvWrG8Mp5Dn50YQmkYup0TBvYzDB4mFUNwx907PC4fnXh3cN4HU2HJKNLwDjWeO7JtPWkUFIFIpRKmIKJUgiohCSZMym9nzDHKjBOkcnhtheX1Spo/UDbVomgbCtpFBSHRrmWVnHGGaVESDOhWkrkMUQhieuNT+vl7HgX212h/BC038SJVHRe0O4coK24MimCZz56L90veTNG58GHS7SsDCw2/kd/QkoeWnieTwCUQ6RXj1GuGVa4/VuP+kkwichIQnnIO+D4CyrRy517uT6gaue+KqpYdF3HW32z08NHNmRi1M8fDIO00iP8q+YLrDYM+YeJzBfu+YA/vG1NTC1PIPCJwD++ZBF8yjoxqkMwDDRHv6MuSyOPkxaqNPE6YLCF3HxySUGugCQ9doiTK3uqOgG6DrCMtCdto40gYhiNCoU6UVFkDTEYbxSERrvK9Cx8cNVdgsY3hIKYnW1ln3xhHpNCMFF00flr5HnS7BW+880oV/MIDVVfX7hQsPvyz9oLi73plg2ynRDy0l7D6+SnD9JuHiLbz/7z/B+51/8lgEXUIicBISnnhi30dUqyOlZMTukDFckHC9M05Ua6BNn6xq6WEyO6t+xhU3oLr2xubUKLq3YInRtJONboiraeIS5qP7BlCeGFTp9sF9c7ey7btxdFRDLBSkZqDls8hIgK6jCXVDGUUIGUEYIgp5atoofmQgshlEIQ+eRxiBRKBLnwZVpJRknW31/0zmkYjWeF/d2s4ikUxn9qJDfQev1cfRc4hMCtLD6Mbtab6HTxDArVvq97m5k09gvx9EOoU0La7XR5BSIIQk098lunYd2WgidB2RyyIKhccatXrSSQROQsITjtA0jFdfQcvniFbXkf0+U/YOwhsQ7jTYsBcwXn35sXsHYjETBIf7y8zMKMHSaqm/T9qcLxYvtbuso7mc+hmPNzi6b/BcZBQi+31V4ZXP7e+buEmc5917W6SUdB2PZndAz/EBORQ4e0LBq3cR5QrSMMAZQBQRSpB+iImHMFWFDgzFgj49iTB0WrIAhkHB2cIJDHAcLEtHn1ZRuUchWoWmIV/6IlEqg6zVSXtNZBgSdbosBbMIy6I8ZlB3C9S83PCOjzAFerCR39TUsG3AwyYoVlnMf56o3SVtDLiQ21CVVK4H+RwEPqJQQFQrjy1qlZAInISEBECfmcb86lfQF+aQnS7RxibnwytoE2O4X3iDun061R8LC+pnfAYOw9lLxaJawNZOeCIcG4fvJnDg9rENB/dNzt1FNpo4LQ99YV5dv2eQjRfP4yqpDgqajXqHD5d2eG9xiw9ubXNtrcbKdpvO3gIfi6ogW4K+g1nOQyatXNI9BwRYWRMrZ0Mui3QHSKeH7PWQuRyUy2yOfQGRSjHt3kD2+mipFNr5eTCM24TZw2TJnUJ/5SXmz2v7x1FvoOZJaNUKbXMCgKp1wAz1iFKgBxv5jY4+upJ0x4Gbixra5UuMFFwma+8ja3WiVluFi7pddu1zdMcvIoR4LFGrBEVSRZWQ8CniUTYR02em0aYmDz3+s9UqV69p1GqqCd/j7vZqWapXSRCoSE0cYZmdVXOMajXVhbbfv3eb/YMTxsPwzj6MXE6ZkQ/2zYn3zchqnf6SZDCiU3qqdGjfxwKn3z+8n1q9AWs7HVr9Aa4X0B34aEJQyafIZSzcjk69M+DDpRaXojIT5Rz6zDTRl/8U4XdvIms9BmYeYeYwMmmi0UmKJZPsx7fw3voBoX6RVNQnCG+BpqPPTNO58JMYwiBTSmN80KI4WEf2euApYWa8+vJDr1waDNS+1Ueq5F//+f3j6NZGDv2jK5jbq/gUGbG7aHvRqjjNpy/MP/QUaCxuCoVhp+qHTaulBroCzL5QJTP3OsFbbxNevQ57gnOx9AqiUsG3TUrstcq2bWS98diM+08qicBJSPiU8DiaiAlNu60U/OJFFebf2FCC41GE+e8m3BYW4No1ZRJ9+ml1+zh9tVdoxMoKPPXUvZ9nZkbddmvrzl10R0aUwNndPdwETmgauXMjaH3oaDB5RFfG86gORnBavQFXV2u4XkjK0miHIWEUEQHN3oBIStp9A9eT1Nt9fnDd5dxokVI+xaJrUzt3Ecp93J4APUu2XCAKLTLpBhmtT0/k1DBS2VINCgEERJFAK2Rpn/s85mjERLGG7j/azrpLS+rn+fPD42hrC7QSZJ4/T+efd5C7dcoTTWRog+uqVNkjiCbdvKnElmU9um7JW1vDVObCwp5BPa+EcHjlGt3f+SbL9lOIQh5LDzmX3Rne+RSM+08iicBJSPgUcJpNxAxDfYEvLqpF7MKFh2vUvJdw03XI55XoiCM2MBQrOzsqBXGvTsUwTG+123de+OJKql7vzo9znHUi9tDE/XaklKztdHC9ENvSaHQGdPseym8j8IOI/sDHdbMIYWBZOlEUsVHrsLTdxO3ksU0LUS3SjXT8QOL1BpiRwNi5Rko6tD73KkZfo2jomNYsMp2iv1JHrm+QmcgxGGggNKypR9u/qNcbdgaOj40oGgoANz+G/spLzKx/B7nZRdYbCNN8JNGk9XXlgxJiON/sYXPr1nCsx6VLh6OBQtPoTV5idaIBWztUrA4jqeHB9CijVgmHSQROQsIZ52gTMYnAjwzsjPbYmojZthIUq6uqK/Dly3efYn1STircpqbgyhUlZioVtXjFUZxyWS2mS0sni+LYKniA5w2jLkc56sM56f9g+L/ewKfVH6Drgnp7gLtXe65rGghw/RApwQsioiDEcf39NJofSAJfkDHVTo4CHcuUeEHAoO2gt3cxR0oMghSdwEAYElD+jq3cBaJmh+lskz6VxzJJfGVF/Yw9U6CiKKDep14PMtOHU1ePIppUqw1bBzzMRn4xsa8nHudx+fLt/XTW16HdVp6cafcG9k4NWS3vH3iPKmqVcDvJ3k1IOOMcbSI2CC2We6NsOqXHaljM5Yb9ZOIv+U/Cbd1fM2mV2jimP4oQwyqog6XhMzMqarC7q7bnJBVM09O3P85RYkPycY0BYx/Qcc91cLELwogwVFGaIIowNQ2QhFGEH0T7+09GoAmhKsbCiP4gwNA0/CDC81SoKIwEAjB1jcjzCT2fyPPw1jZobXXZuVojeP9Dwo+vsuWoGvqMULmyuKfQg3CS6fKxoEinh5GMwWDYSyiOhM3NqehG3LhRGxt9qAt8u60EMBwvPD4pYahEtpRKqzz11OHnkFL9P94fT32pSuEX/9Qh477sdG8zpyc8OpIITkLCGefohGlbVzmQjp+mE6S5kF5F+o/HsFipqOhHq6VEzkkiJnfiduFmktp7bceNiKhW1QLW6QxNwkejOIuL996mOGpzp8nhoNJg9boSTvkjA7uLRbUNrdbtU6h1fbiwG7qGBAZegJSSnhcQRLerwjACGe6JHiCSku7AIwhDgkBgWaBpci9qoIEhIAyJrlzF0V9B2DqprI4ILGSjiet2EOks7SAL5oMLnJN6vmKhGPctgmHVm66r92pi4uELjoM4znA7Ll58ONHFg3jeMCJVKqnXcxDfV5FNUIL7woW913uMcT+ZLv74SPZyQsIZ52inYV1ILuY30DSpmvE1RgkN+7EZFicnlUiQ8nATvpOc7R/kYPfXSApWeiNsOAdGcR/THyU2/R4sG5+ZUb6POIpzr5lTMCwZvpPPJo5EHOy/ExP7eOI+PAeJDdhBABnbwNQ0+q5H3/WPFTcxkv3+gft/u35Es+PhBSFCSGQEUkaItI4Rung9DydVxtZ8sqIHpkmYLxC5PmY0wLXzh17L/RCnDsPFJUQ+jzY9eexohbjkvlAYiorYdxOLm9gM/qjw/aHBeX6eB264eCe63aG4mZi4Xdx0u0NxU6kogXVQzD3KqFXC3UkiOAkJZ5z9CdOLS2jpqb1eGnAht8mmU6C1O2Bp/PMsZKpk7/1wD4Xz51U43nXVTKjR4Jiz/alJ9EsXEcXisWeuB4WbllHKoOuncC0DWw+OrTTJ5fY8Kv7QQxMLjrii6tYtZfy8G+PjKpWwunrniE+8QEfR4YhAvHjFkZqDpNMqurO1O2CrU2e72SW8Vy+3CKx+j3xtm8C0cPLF/Sfpu5Ja2yEMdYhMJBF5PSBn+/TzUwSeJDIEXZGmEDbZdctotknVqEOniyjdf13//QyO3NlRO2Zyb6qHlMN5T/GIjYO+nIdNGA7FxczMvU3m90u9Dtvb6vdz525vRbC5ORzeOTs7PBaP8ijbOyTcmUTgJCScceLGb3K3ppq0HTAsjtbWyBTH2Ll8kZVVjdHRR9fz4yiXLyuRU79Rgx9+m4KzsW8UjnZr+P/8O/h/9MeIsTG0YuG29MZR4TaX22apO8Zyb5SL+fU7VprMz6vI0cF0VNwXZ3tb+YSC4O5n8rp+eHTDcemT0dHhAnY0zXMno3EmA33X49aNBr7Wwb/HePFcfZf89RXsbpfZ4GMi3aBbrrK1cJkOeWQo8PwI1/WRgUTTBNV0H1sb0Dn3LNRtAjdgxy9RiLZZLzwNmQpFuUnf8x7oWDiaOqx7ORUlMtxDqcPNK00QlX3TNwzNxqapRGg2e2cj9ydFStU+AJRgzeXufvv7ZW1t6ME6WjkYP3ccpLx48c7H2+No75BwPKcuIf/z//w/3+/uGF+ejptd3IF/+A//IU8//TSpVIrnn3+e3/3d331MW5uQcDoc12k4NixWfunHufhFtZLt7AzD9Y8aIeDihYjo6jW22ymciQVEJo3s9pArq8gwVLmWIIBc7rb0xtExCOagQ07vIl2XtaXgjpUmtq0EipTDFNPR7sbLy/fe/njxj8/AjxI364uNqweJz+SPRnEsS7Lb6tNshYRhhH8XfZOr7zL/3tuk2y0C06JbrOCl0hR2tph/721S3TYCQRSKvfyVwNRNIlMS+H160oJyFXO0jDU3ifG5Z3Aq5xC6zsDIq9EI5Ts//504mDqUEmqDPOv9AwrPtok8n8auenGjIyo1ObixSm+jRRRG+2nCg32EHjZxI79SiQd6nXfj+vU9cSMjLpZ20NaHadcgUMI+juw99dTdxc1JUn0Jj4YzEcF57rnn+L3f+739v427nHp9+9vf5ld/9Vf5+te/zi/+4i/yD/7BP+CXf/mXeffdd/nc5z73ODY3IeFUOK7TcBzq1lERlWvXlOHyypVHU0lyFK1RY7bzHsv5i2w4NjNs4661yLoeWrFAEETQcbCRiJnbS9r1mWn46lf2z3DH/Abt6CLO6ALRT86izxzfrOb8efVaV1aGzf+ORnHu1q0YlMDZ3VW3P26BjPfdcZGaYlGJq1brcMSs7/p0HQ/fBy24i7qRkvHFq5iOw6CQRyJA0wgsm27ZIt1oUtpcpzNdJfJNhNhLk0UamZEcg90K3Z0+ftEgn/Io5gRYWWQXcByCqXGMQv6B/DcHU4ebQu3/8XRzeAPXZVObRlgWI9Em3u98l2htnWv9GdB1ZHUE8/IFZp6vPtTj72Ca58ZOHpktkM5ot3liPglRpI4rKUHWayxs/An++jDyMhifZ2P6VbRq9Viz8dHtPZjqA4EbGaQeY3uHJ50zIXAMw2DihEfpf/ff/Xf86T/9p/kP/8P/EIC/+lf/Kt/61rf4rd/6Lf723/7bx97HdV3cA27BdlzHl5DwKeO4TsMx8dlk3ITsyhUlBB5VigDU2b7pO8xU26w5o1yrj6D3u5SzFmOiRk0fp63ZXPJDjGMqo+B24XZJT7PUqbDsadwplhtXUPV6ytRaLg+jOPn8sLvx/Pydt/3g6IajPpuYVGpY8nzwvCtOhxwVOHFpuO+HmHcpo093WuQaNQa5PAiBPOjTEYJutkK618Z0+wy0oto2IZGApmu4Lz5P9/9pM+h4pPQOea1LuwOy3aWccdEuP4emP9iieTB12CkuIICCqUrOpZQEu02c6mU03yP3J79L2OnSL04jUmUCL0Lf2iLqtMjOPQuFh5OCOZjmWXOqDLQc2kiFmZ+ZBR7OcwTBcDBnqrfDxPv/5FB/pt12ivrNALHxLlM//zlKE5N3fbyDqT4ntFnrV9G1kPO57WOrBBMePmdCNl67do2pqSnOnz/Pv/Kv/Css3yW+/Oabb/IzP/Mzh677yle+wptvvnnH+3z961+nWCzuX2YP1jMmJHzGmJ9X4wZAVX8c18vlYRGf7af9DmPpJlY4YEtM0NLLtChihw5oOov+nLrDHSZHH6w0Sc+MUCiqr6a79aqJ0x9bW8Moy+ysMvru7Chhcq9hzbE5NjaSHiUuA9/dPXx9LIaOVmwZuobQ4F6+YsP30MKAwDAJpElTjuHK4QwMz8ggohBTegS+DgIMQyJQomxQnaIxdRGRzWC6HfStNdaaObRSkZE3nkarVh/YixWnDuupaeRunZLcQYbDCerr9gW0S+cZW3t3PzqxySRCaOi2gRipcM67+tCmZR9M89TsGZzyFFomzcLuWw8tzeM4Q3FTrURMLn37UH+mxf4ETa2qXpt/jfT73ztxleCanGStr96MitUd3uARTlFPUJy6wHnttdf4xje+wTe/+U3+x//xf2RxcZEf//Efp3OHb+XNzU3Gx8cPXTc+Ps7m5uYdn+M3f/M3abVa+5eV2AmXkPAZZWREVX2AMkvGlS0Pm/hsP6rVKRh9Kuk+43KDTTnBthzFGrTIZEDaNku90RPP4InHKLTbx5dqg1ro40U8/vgfjeLca9J4XC5+Jx/O3UrCjzMaZ1MmGcvkXpmZwLSIdAMj8IlQeaSQYT5JBgKp6USGhgw1NcTS0DBNJa76HUGQyWFemsX88R/D/n/9WZrP/hj6U5eQVdWN8ZP4UvSZadqf/1No46NUB+v7ni/mFgheeg3Nsshu30SrVth2S0RS4EQ6GpA3B1gjhYfSfPJgmqc7foGaGEEIjYvVOvrs4WaQD0q7PfStTU9DlWHkJULj7dp5VntVEHApv4k9kj/Ra/ONFNflRfp91bl6IbdFyeoPb5DMo3rknHqK6ud//uf3f3/hhRd47bXXmJub43//3/93/vV//V9/KM9h2zZ2PGAmIeEJIZMZDspsNFQk5+LFh/scRyu8Riol+qmIameZLXMCkbrI+amIJSFwQ52dBoxdmD7RDJ54/tXi4tBnc5TRUdWLpdVSlTSaNhwpsbWl/r5TlVRMXPHj+8fP2LpTxVQqpc78D3l9pGQq5dHtd7DNFk6+cOyTO/ki3XKVws4Wcq+RoIg74UiJ5vj0C2ME2RS4kjAQhJpE0yOsVEijCbZhkMsY6OUi+lyRaC8CERufP4mto9UCrVql8LNlbOvCvufrerOKFmlMizWk5xPZKdq9DJuDEgDpdIPJtBqm+TCmZcdpnkFpkqXeGA0vx0uVG3u79JOneba3VSk4qMhnKgVhXUVeHCPHOzuXkVKQ1j0u5jbUsXCCSeC7u7DbrqJVK2R2bjE1YgzL7UnmUT0uTj2Cc5RSqcTly5e5HscLjzAxMcHWkdPRra2tE3t4EhKeJAxj2FI+CODjj4f9SR4WByu86PaYMbewhE+RBo3SeW7JBRbETeRunaY1gfv8F09kqrTtYRfhu6Wq4tELcfO/2B+TzaoozsbG3Z8nvv+dbhdvw8Ep4TCssootfeHqGt7v/BNG3vp/mFm8wvzb73Lh3TfJ1Y/ktwCEYGvhMn46jdXvo0UBWhRieC65Rg3HztMYnUVoAkREFAp8H0IxoO/10dBJ2xaWJSgWh1GuuA/Mg5iLYdisce1HO8hWi6lJ9lOHQWmUKNLUHLCyhbBMbjUrOKGJF+kIAVOZPbXwkKIT0hngurAYzNDwcozaLdL6gbzgJ0jzLC0Nxc3Fi8N9J9IpdrUxvrd9HikFJavHF6vXhzr1Lq8tipT3bXcXEBrzP3GOqZJK7cl+/1Cq73HPo7rfRpyfBU49gnOUbrfLjRs3+Ff/1X/12P+//vrr/P7v/z6/8Ru/sX/dt771LV5//fXHtIUJCZ8uhFAiJ+7rce3a8Gz1YXHUKPxMs8nVd3uEOy6tWsRNq8L5ixHLky+xHlZJ3yFacpTpaSXK2m2VdjvOMJ1XPl08bxiFORrFmZy8cxQn3g/9/vH/HxlRz7+7e3gcQT6vUmOtFhR6w6Gh2dIEaXsE2fWp1K6T7ra59fwrdCsjhx63Wxnh1vOvMLiioTsBWaeOpXu0RifYrD6NYxfQUV2MQ98EGWLnfEw7JEuOes8knVZCKxaAseh6EP9NbOTtrtQJnBFsK8JfY79fS9y1en4ehFnFm5gjvNFjKz2DpYWMp5pkDfehRiciK8VNucBuL8tIrsP5/DaGdmBhfgAhda+BmR/vVFm1Po9strlQ3mU22zhw3zu/tk5nmBI1TWXwF2KK0B5WCT7KKep340ntxXPqAucv/sW/yC/90i8xNzfH+vo6/9l/9p+h6zq/+qu/CsCf//N/nunpab7+9a8D8O//+/8+P/mTP8l/+9/+t/zCL/wCv/3bv83bb7/N//w//8+n+TISEs4809PKa7K5qaId4+MPt3/IwQovfW6WZ56LuPpOh92dCC+vszaR49y0tj+R/OiwwjsRp6pu3rxzqmpuTr2mmzfV48ZRnExGnVVvb6vXeyfiiizHGY5biIlF1dGxDsPBkhHBh8Ny4BQCs2OilUbRMw7Wyipji9folqu3veBuZYTGuTKyHXKrEkFaw8kXGdQLRI6GZoRomiSUgNQpFwzSKQOnreP7altTqaHAMU21cN/vaISDU9030s8jMiYzxjLh4i5yt8bgp36OqJkH38OsACNV1qZeo7l8Ha3TIpOXXMisIvuDhzYtW0q4Vq+ym5mn2F5hbnywP6tM/f/+hVQYDpsDmqZq4BcTRfDOO3uTwKen+IL4Ptn6DlLcfRK4lOrYi6Nok5NDoQl3b+/wODj43sYVYQxcwsUl5SP6DA/+PHWBs7q6yq/+6q9Sq9UYHR3ly1/+Mt/5zncY3StfWF5eRjtwILzxxhv8g3/wD/hP/pP/hL/0l/4Sly5d4nd+53eSHjgJCSegVFKL4uKiim70eo+uGZtuaFx6uYi4rhZg01UCq1pVvpmTDuuMU1WdjkojTR5TnZtKqUhNFKlITCZzexTnbgJnclJ5ldbWjvcp3anrsRAQNTuHOv8KIJQCtDT5bIqNfJF8Y5d0p4VTKB3z7ILATtGtjqAbKjqhStc1okCAkCAkAg3LEriOwHUllnV4end8P7g//81BI28wNQs9G01IjKyNzEwRXb3Ojf/3dyCTZo5buN/XaI5cIpz+Ap3Z5zB3Vpn1fojY3EI+xOjElSuwsalRuDjO/I0fkdncRFbvLjbuxsGBmcXi4ePIceA731HHj23Daz9RQNv6iXtGXlx3OI9NCHXsHJcevFt7h0fJ0V48bmSS0oJjx258FnvxnLrA+e3f/u27/v8P//APb7vuV37lV/iVX/mVR7RFCQmfbWxbzWq6dk0NCnyUTQENY9iLZm1NfflPTKioiOepXjUn6doQp6rivjPHparOn1ciZXlZRXriKI5tq4Vrd3dYPn/cdsLx86VAjWqo1ZTIiiuvQG2H43mEboAxqgoZGl6WrUGJ0VQLJ/DBtjG7HQzfO/axY1FydP/LSBDGTf50uVeZJfFcA4GaMi7EsBQ+Xp/ud9jkwX4ty/1x/EjnQl4ZkmSnS7flEwYN7EIae2IM6bjsLDmsr65iXphn9PkJpvIegqcRkxO3DZR8kDlMV66o48W24fzzZarP/dgnSvP0esMxEkcjl7u78MMfqt9LJXjppZNNAj84h6pSUc0lzxrxe9stzrDdUT7VC/lNNCGfiF48py5wEhISHj+6rkTAjRvKt3Llyu3zdh4WqRRMTUaEzQ7rV0B0YPbZPOsbGr2eMnoenfV0HPPzwzTUcakqw9ibBdVXC0+pdLhXjqbdWeCA2oZ6XYmogymG+H+1mloMDwqcYhEG2xY9o0hx4KozY6EUR9/TcAnJiojINiGVQnB4ajiAlHvKRhz9jyDwDTQh0fUQTepIGWHINKGmYxhKxMU9emJBd7/+m7hfS2SnkF3YcQuMp1tYWls11mMaYQrOZbYRWoEVZhmU0ww2A9Lvvcds4Qpev47QBdrUFMaf+nGMc0q1Poj3Y2lJRd40TaUelXA4LDZI2fRzBRwJhuORTZmHqpQOcnBg5tGBmFevDoXPhQu3N4Y8LvISBOpzEwvThQUlxM4irV2fld4MIlVGCMhb/WG1HiiT9kOodjurJAInIeEJ5sKFYansjRu3LwAPYwpyuLqG/dbblJZ7uE6J5StjhB8FLPzEOVb9Kba3VdrsqPflKKmUWsS7XXX2fFzh5OysEmubm0rgxIu+ZalIx93E1Oio+v/m5u0CJ047eEeCMIUCbBfydKsL5Le/j5aeIqOrG7l7079T3TbO1BTm+AiGExBGEdFBLbOX9hJHpY+UhIGBMAI0HQwB2ZwgZ6TZqklGMl1ynTZXN/Mgc1iWel/u138TN2tcaRWpRzmqdgdD+MjdOq1GCKZGSgxoMYHmpnBDk612lpS7yXj7KqneGjIMkJ5PuLRC+OHHyF/5ZbTx8fv2fmxuDnvSzM4eju7FYqPVG7C206FV2yGK1ADSYibF9GieYvaw2Xh9fVjldrCr99Bvo/5++eWT7bfYwwbqeJ2bu799/biIDc+ykwVDJxN1mS71bo/SfsZ78SQCJyHhCWdsTEU+VlfV2Wy1qhb7B628kFLSG/gEYYS+uYnxB3+A7HSpVit4JYuo47C2bsP/+QHzvwBrwRRLSyptdq/y5pkZlaqKJ3wfTVUJMYzExCJoZkadbW9vq6jAnQTOwdENx/XOMQx19n5wrINhAELDu/AsmnONaHUdo1rGFB7uAKzOLkEui/fCi0xUC+Qdj2bXpTfwiKQkkqBrAh0N29KJooggVEJHaBIZakSaTso0KGR1nlkosPzDPoObHczgY4wffkQ9eAORz2GmR8G+//lPYqSKmJpicHXAIGdQDttkN35IWKuz5Z6DQYcxY5XVwQU23RlszUP0W6QHDRY6PwSZhmIBslmE7xPVG3j/8HfQLl9Ext2A4426i/ej0VDROd9XKcnjvFCt3oCrqzVcLySTMtB1jTCMqHcceq7H5ZnqvsiJo5OgUrDxe+a68OabynBsmvClL917nImUKv0Zt1iYmXn408sfBt2u+hzHZMbzjC34yFtLUFLzsGKehF48nz1XUUJCwn2Tyw0rSmo1uP7WzgNNQW71Bny0tMt7i1t8sLjJ5u/9M9qbuwRjY4hMmsl0i1wWSlWN7V6WpT9eYWJcpXTiIYf3Ik4jxIbRo8ReiGZTiZFcTokVw1B/H9eVOCY2Ih8dzQDDsQ2NxuHrhQBRqR6a9m526hCEuNMX2Xz5S3hjYwghyGVspkfyFLI2pqGTMnU0TcMwNLIpE9PQEOgYmsA0JZapUclkqeRynJvKUHVqdN+7Cb0udtpAn5kEyyJqtgnffhetecyG3wOhaeycf522NUKxvkhl6V2ieoO6OQZCI0+HZesS/c02pXCHWj+NNnCYdz5AkyHNwjkwVYpIWBaiUiaq1wnfex9RKRNKnRudcfxI29tfh70foBZmNShWUq76jE8P6Doecu+AkFFEuLXN9o+uEG3vUkgbmIaOJgSmoVPIWrheyNpOhyiSXLmixE3cIiEWN/U6/MmfKKFSLMKP//i9xU2/r6KCYTgsKz9r4qbXU8I/FjeplNrOuXkN67VX0PK5M9GL53GTRHASEhIAdTb71FNw5UrE4IMbXK2PcnG+iNBUVZB+j8qLo2fXdr9DprZLN5Oj3xlQRmPVnWEs3SKQOn7WpLEzwLjWoThbpNVSi9zly3ffzpOkqqamVHpieVkJojiKs7OjFrujKaiYUkk9Zq02FDQxhYKq4trdPexzMQy1mGrT01i/rHwimfcEg14a48XnYFCn3fOGEYcowtR1bDNSizQaQRARSYmha9i6gbRNMrpJGGjgRiB9MmmTzlvvE7ll7JyJlrbpR2mEaWJYAtndonTr+8hXf/rYBetu6cZeeozBeZvCj75FsbeGsG3qxjhkfPLeJt1cjqZXJGoZpDM9Kt46ld4K9fw8TXOSPg5jbGESIAwDqenIbg+iiMWuUo2h1DHjKV173o+o12ewssvHHwt22yZaKcQxuny4dCD15HdIvfce3vIq6UaHGcsiHB+n/7nn8fcUqRCCTMqg0Rnwo/cDUpZJJjMcVwIqohM3g1xYUCmre7G6qo4zUMfDg873elT0+yrqGp8Y2LZ6zQcjofrMNHz19HvxnAaJwElISNhHCLhcqXGzsY6TL3OzazOZqbPRr1BNdahY3WMrL6SUrO10cL2QQtZCCIHueehhgJZK4fkhje4AaUq2nSIT6ToyLLLVM+g0A1JjwzLvO5WCH2R6KuLj73Wo73iUQrAnD3uDCgUlcAYDJT7iKE5cddTpDDsUHyXejqMTxOMsy9EoU7GoRE+vB7mc8omkp0Csg6HB5Zmq8oz0B0RugKYJxss5ijmbVtdl3VCpKT+IMHWNlGmTCzwym6us17J0ohxCc9E767RaNaL0NLYWMAhNlnrKNZ3WfUQ+R277Y+Tu54fvy56oCW8tE165QtRogX843Vi3p2m3IV/QKBd8jNHn2GYMGZaRUcjGlk3TzZIXLbbcHDnD40LnBwQY1ItzBMJCIthikhlWVKjDMmEQstQsQxHypnOohw2uC77P4J+9yUfbVXYGeTKyxni1QZB6jmhykjCM6N1covX2myADqJQZaDa2DDHX1si3mnR+7Mf3RU4Y6mxvmJwbk/tpVoAojHjnn/VoNiKEYfCFN9JUR+4esfB9JYji9/38+UdjwH9QHEeJ9/hYtCzlB7pTive0e/GcFonASUhIOIR0BkzJVVr5FDXPZq1fYRBaMMjTDyym7fC2yovewKfVH5BJDWfuDDSdfiQYtHsEtoUbBGTSt+ho59h0Kkxqq0grx46bwmqpSEyjoVJImcydoyyxN2hiZZcVZ4yr39V56rx/mzdobk4ZVuPmfzMzw3JxTbtz08CZGbV4bG7e3iMonVaLy8G5VYWCesxmIyLTVwtIys2DLNDraczOpihk7H1fkqFr+1U/U1XJ6rUQP5B88ZkUpqFR/6hBffEGwulh2HmILJAhYneHTsPBndDIWyGhFKw5o6Q1j5w5gMgAdzi2IN5P4bXrRMurEIVQraKdm0VY1r7Zd+e5P0NnUGXSGjAS7UB5kk5vjB2nTMHuY42DbEq23Qopt8l0pk1qvMj15kUQGhINAYyxCVIi+w4in6dpT+D2A/SiZCLdHB5fUhKurhP1HK6GeWqpcexMi7FwjcJ2neifN+n++I8TjI1RvfkxotulMz1NMZNCeD1CzYDxcYytLTIfvE9rdBRnYLC7raGJiKmpobhxFtf59jeb+M0+hvT5Yu59UsEY4V18ZLu7wxRlPj8c5XEWGAzUMR0Lm7gNw0laA5xWL57TJBE4CQkJh4iraspRnXQ2YLU3Qkr3aPiqvOp6f5TzRu1Q5UUQRkSRRNfVGWGn57Ll68zmixR3tvAqIyAEnueQMVboM8N6M8XoOYk3bbC+EaDrOtPTgq0tFcVJpW4vvz3YlTVdrZApZHH6go3rDSZ2/+mhypx0emgadhwVxYmHb0qpIi5xxdjB9E0qnQJZpdu9/ex2dFSJn93dYZTJsiCq1Wh+5xrVzveRno+hjRFan8c1R+HpqvLfpG83ewghMHQDXYOxsomMImpXbuANQozyCIanYfsBQhjouSLdhovb88hnddK6h0aEagEIRjjYr4iJ91PY7kCnq9RYOg/dHvLGTeSli2gzUzSX23Q+XCH1bJliyUBYJuvtPA4mBbNPL0jR1jJYVQ+jC4XA4fKfe5FG8GOI3/5Dmv0UpUyPotbGCvvIvgOWiW/naUx9ARH4zDffRRql/QZ94W4d2e1xPf15dnILmLJD0V8k1KFeKJOt7yLf+T7hSy9h7ezgl8t4QUgoNXytQBS00CydsFTC3N6iv9KnISq4XsDCgmBiVCnP2kcbvPN/bRG5PoW85Aujqwg3fccqrig67AObm7t3Zd/jwnVVei3eNl1Xabb77Xn0OHgYlZcPizO4exISEk4TMVJFm54iXFwiNZPifH6Tm50JymaPLbfASHuRmxMv8XS5ul+lYOgamiYIw4iuG7BR6xBEkq2Fy6Q6bTL1XdxsgUiA6TUo9HfZSl1g1XoKO9hhIGzev5Jh4JtcmLfY2lIdYg9WvxztyiqEYJYG18JJOvY85eYPEUe8QRcuqOqXpSUVsZmZURmU2Ivz1FPHV4vppefxzz+D61YPiaxMRv1st4cCJ1xdI3z7Q/WFPpFHpGwyPYHcbNN7p0Z4Yf4eVWcHft+t4W63CTLj+JFFEGqARBcR2ZxgOzdK6IWELkhbYImAUGpIJKXuCtqFaaiUCf6P/0vtp0qJcGMTMsqrE5kWvU5Ebm0d/enL7GQWaOwGnNM7TD5dxP14mt41n3o2S0r3KRg9XGmy4ZTJh9tcugji0iXqNzTCV9+g9L03Eb0e1fA6kWkislmEbbNafB798y8wN+li/MA57P0YG+OKO8eGPU/odonCFlI4alq6JnBzecztLZrLq4QDF1ks4QUGN7vjCAElXaVCTcNgxxujVZc42YDJWY+FaSUmF29GXPuDFpHrc27C5UJ+G9DuWMV1cI6UYajj5lE0vrxfPE99Dg52qF5YOFvpsoOctZlXicBJOPOcpTOCJwGhaRivvqK6oK6uo1XLXMiscqM5QqW1Q90cJX9unus3xP7QzmzKpJhJsdXo0ug6BJFEE+BUR1l64RXGbl4l36hhOF0wDNrlKu6lKWSmTL8L+dIAz5N8dC2FEDA7aVC71eHKtz2euqxE18GOu0IIQinYHhSZyeyy2h9hOf0MF9c+PuQNMgy1fYPBsIGfriuRE0XQvbGO8fu392oZ2/oRy3WHZeMLXHrtcHfAOCoEQ9HFQKhS27SHBNI5gSjkGHTq99UKXzoDwiAisFK4oUEvTOFLnbTukNJ8vOIExq5H1B3Q0G0QESk5QO7WKZbBePVlqDf295P0PfoyhWuMUKHBppimn7VZaH+A24lwtAym9MhoAzS9yPrs63SXrlLorOFlytRFljCQZJ0dRnN9Rr78DNdvakgJ2vwC1kiOczd+H21jar9k7WbhJfRLFxl/ukq2CnL+lw59fm9cj1hbWsQNQ7TIwxQRoUhhCYcokgyEQdbzcFyfXghuK2DXnKZgSCYzDVLCpj/wWWyOoDHA13QuX4aZsSqFTIq334bmukPU7vLi2DqVfEjbT5M3nD0f1rCKK9qpseKM7o+6ODpH6rQ4KmzOog/oKGdx5lUicBLONI/ijCARTPfmaOWF79QYCddZzl+kNzHKrmOQXevQd1NcmLcoFgXTI3mWt1v4YaS6bUgIpKRTHqHzUpV0p4XhewSmhZMvkktbBG6Hfsei1QU73SdyI679oI/9/VukGuv0PZOrb0kW5iXazIw6BlIqpDIILbp+mkFokTY8+pHBdqvAzJGurHNze3ONNtTiFUdxtrcjwsVlzh/Tq8WeTSEXB7gf3kB+sXKbgbnVUmkvu6NEV6k8QwvBmlPBiwzO57YxtQg/VSBcfe/ErfBFOoUwdEI/wsEmiHSEkGSNAREaUtOxMwZmMUWtb5GNOhStBtr4GNbPvoQ+M024tLK/n4SM2DDnkZFJRW/QJwOaRIt81voVGkGacWudmXMaQQB+YZT+eZtobZ1Md4tM2GGLCYqVPM/+/DTt7BSyq1J0o6MwOjtK9o1/cf/ztNXLYogSpqXtVxwd9H6sr8PSbhcf0AOHlNElow/ICFeJTikxfI9QN+gWK2wXpvGaJm7Wp8AGnWaLnpTsGpfIeAPMmTFe/NlJchmbIBD88R/vVbRFPq8Z38PKjXKtMwUSrKxPSt+bxWHbDGo9Fq+CNnr3OVKPE99XnrGDwmZh4d6l7KfNcdFV4NRnXiUCJ+HM8ijOCM5aCPUsE1detJdWWXv/Ol4QkZoaZySdp16DvuNyY0V1Ppscs8iXVD8XQxP4oTzcl1eI2wZNdgc+mvAx7BzStwk9i5y3jbW0zS1SzFSK6Hkb3/PZur7G6Mp3IfBhbyRC1nBBQBDpVO0O/V6aligzYabQDz815bIyMG9tqV43ug5B2yHcbeBXxkkJQc3NkdY9MoaHEIJiRdCq1WgvNSguDOuDq1UlcHZ2YFqqMQeFSkDLgQ2nQslUY8ctLcA3TAa+hn3CVvhipIqoVIjWBzh2FV/qmFpIzhjQDyzcfohZLpB/9TztRQ/N8MjMgj2SR5/Z6zOz56Fi4BKms5AJEd0ejXQRBBTDGp6WZoANjkN6toA+VuX6DdUnpjRTIJzI0dmaoVd3KFgh55+2MGdKLN1Qwm50TxQoEaMETK8HbUe9joUFSdc5bKqu1wVXr4Jn2YRpi7H2Cp5hU9F3iCT4e+aoVLdDa3SCtjVKe7TEeP8a53ffJe13kD5smOfRoiVM22f8jTGEptFuw9tvq+fO5eDl53xaSylu1qsIG1KGNxQ3wHYnTVOew7QsyuW7D2J9HASBEjbxXDEhlHn4rI6AOMpx0dVekKJgOqc68yoROAlnkqNnBCAIpIaZ0R74jOAshlDPOtHaOu3f/2PyS0tYYYC0LLzpGbJPf54le5L+wOXaompYt7wRgJQYmoYft3y9B1KCZvdACkLXILu6ie3WqWXnWXYsxmQb27JpVRZI1T8k67UJd2vqLNEZsBBc5WYwx4YsMNn/iK2RF1hsVXl66vDzjI8rgdNoqEaAMzPgbfbZcYvoUZ5LbFF3Vd34pYIaNDmac2g1UqythuSzO/sRP3OkCmj0+yAqSkzYvhI1oVTHYhBpWHpA19EY6DlKR1rhH4wi0q9AJg1oCE1Du3wJrbZM6Hh4mo2te9hhl9aWg29Mk56eIJvTSRXT+FEaUYSRA2vGQQ/VdvU5tIpkvH+DdWcCrIBK9xaL+ZeodyzGsj0WfvIp/EBFcBxHXfR+B219FRohaVljYu0HfPSdFxHnz8P4OUA71Gk4DIcznUYnB3y8vFcWvzdKwZIZmtsFiEyCKCCcTOP3dWYbP2SQKxAYJobvkep28NJplqdeYNBLQy6NmDHI7LSIBrCRvkSk6eS1Glk6NP75W7zfHWE3mKWQtbh43uTyZVhfq1IvPIPc2mG82Kdo7VWWScHNzjhRvY42XmXhhTypUzQSB4EyD8dDXoVQEcfUp2xygnQGRK5Pq1il3inuD1yL04KnNfMqETgJZ5Ljzghu7TUMSxseY2UPcR9nBEcFky8NdBHds3ndk0y4ukb/t/8/2ItLaFLumS4F+s420coq2S/9BPXgHGHU4tvf98mXfAaegVV0TmzQlEAYSYTVgV0wO22CdIq83qQlp1jpW5yTa0CBrexFzoV9jG6X8E/eVN+hus6IWGbLmmNzZILc8+dxhcb29u3TnScnVZpqeVktImbaItAtIreLF+mYeogf6gxCg5QeoHkDCE3C9z9i8P0PD/WPkRNfRlSqqux6T0zI4gIpTc2h6gYpLOEhnQh3bvZQK/yjUcTAfwOtkCN8Kq+iZtUq2kWT6NqAyAFt0McUXbqVaUJ7hiiTwXGUHyM+4z/Y0+egh6qz2Ufkc9jzU4h1C63dRAaSIJVHlEpkXrqANTfClStqlEU+D7LZoPPxTXYHWUrGgIvee7R2HYLwOttLAVNzi4z99DPo+rDD4vXre9tRGnBz8/AoBceRXL0qCHyHUiEk1Ls42TzaFyfofVwnVdvFCtpEukFrdIJbE5+nZao3L1vuUFraZCd3jp2xc4gooqxtqyotmWe7M0v3istgxmNstsPoZIkrV1JIqYTinHcFfauFrJZpaWW2O1lkp046q3H+Z2fQ06fzWQ9DJWziURJCqAZ9Z6Vq637odmF1q4DHRUQnhbABAVPp+vB74JRmXiUCJ+FMEk84jv0WGpK86dDx0ziBxa1oFtnPU10NmRwZVtrc8fGOCKa2l6Hh5kDAeKpJ/pRCqGcVGUX43/oDopuLCAkyk0HqOjIIkL0+6bVVxn7wNrU3CrQbWbC6NOo2hi3wanmy5R6aHp38+YCSvY0WBvh6BlvzyYku28EkSw5c0teQocnibo7z7WuIwUCtCqZOLhXQMCcIJNhWhItKtZTLh02ZxaISOI6jzphnnskzeF+ys2mhpUaZz25zqzvOSn+Ui7l1opU1Kk6KmtBojkxRGfP2I36FTZ/Wiz9FZ7JKbk9MyFqdVDaP1HQ6XUGmvYqwLjFYOL8vmo+LIop1C9lo4v/ut+GrXwGmMUoFZCWH7IQYpTT2s6PUtg3Ylehuj+3tNLat7U9HPyoo9ZlpBj/9pxH/bBl7a5kVLwVpm3PndbYn/gWawQzjs2nOP6cxGKgFNwig044Ir23huZBPeZQat8g46yxmn8fRCoz2VomWNNJ/dJMw96fRZ6b3+7Jks5LW4HCzxyCArZU0MtRBeDhhH0MXTM26NAZldr7wGmZr6M2qywkCX71puWqHTKeJ3zRoZ+YIDZOU6BJoKcDlZvgikQ12v8fn57do6Dne/r7L7JiNZQkW3qgSzf0M/nff5sYtgzCIQHeYXrAo/djnTiVaG4aqoi8e2vppFTaep6rOXHfvinwebaRCeecKlWoWTTsbM68SgZNwJjnoIyCTRgiYSDeZSDcZhCbrzQy+rtP203Suqi+KkRE1SPG46MFRwVQw+zS8HEjYckpsRnksZ4v5zgB77Pb7P2lE2zsEH36EpmuEqQxSaPS1EilRJ8pkMbttiiu3sFtNsiUIBhYO4HsGQkh6jSzpgoNhBfd6qn1802JBLLISfg5fNxmxGwzI0fILrDa7XG78kH5fcjP7IhfG14lcj8hxMS2duYsGNxpQf+cWM788yuaWtt/g7yBx878bN+CppzTsp88TNpYJtldhog8yRLoBfn0Drd+nlBE0RqvsoFEWG2h7Eb/iygaNq9fZHitTvKQM2cU/ukZro4/bFWBKRmaraKkLuNkCcBcjpmmiWzmiThfvu+8in5lE1/fSVaYGwkf7+H289TS6n0JuOrTtScpzBTLThTumM9Z2LNWAT2yxJCcRQmDnLZzsCPhZUmklAG/eVMLPtsEYdOl1m/StUQr9dUy/xwf5L5MWfQSgp23m++8jd8sEb71DOzOJ42jK5zTis7o4bPYYRbB6y6bf1THNiHQqpOMEzM+GtHsBAy8EBMGeN6vfzBIG6rGylQ5CQL+RxYkyBJpaqgYyhxNkaclxJGDrDp/r/jN2mn8GJ53GDzxyRZ+ZSeXKdSvTLD87iTbTQfM9Ll0EfezxFxVEkYocxtVaQqhJ6XHbgU8DYag8bPEE9phCAcbHNcjP4v/uhyoyWS3v9z2Kao1Tm3mVCJyEM8lBH4GWPrAYALbmMde/hX5+HvfpPBubwzlDOzvqy2Nq6kjY/ohgsrSQS/kNNWXaLdLq6Lhahpu7BbRApTfK5VN44WcEubGpZgkVi+hS0A0sbpqXsaI2s9HHYKex+z0yzRqDUgk744EWItsZwlDDH6izcCvjYWfcezybwskX6ZWrzO3+gBX7VTzSnLNXuCVnqLcLbPplylqNKJthXZ8nk+lRS1dY6PwAY3OTczNplms6G1c7ZCaKOI46Hg7OlDrY/G8wgHMvVvF8QeMjm+X6Tcblx2wwzVbpaaYiD318HBEItpwSBcNhKtNQYyiqRaLNTfxrS0TFHNrUJKO/PEn//Q7dWkS2qlF6Po/2pra/qB2NIg5CE02oKJcQoFXLOKvb+ONdwrCApoEWuuibqwRs4xvPYNgmGa1Pp93Eud4hGvepvnD7WXG4uob7XdWbp1OZRxNlRtll7eaAzVvLVL6gMT9fpttVZ+Oaps7Gnbak62UopLvYgzaenUMXIaucY4ZlJsQWugwhn2ewusXmtQ6iWOTSJWj3h80epYTVWxbdloFuRmTzqvNyrtKh2RU43mHh263lkFLsixt1XR5T9Ig0Az0KiDSTnizgRqqVQFnfYTq4wqpxmUbHIpfRKI/2ye198PfnSAmNsYvFU5kjFUXKm+Q4w+tmZ4cNJs86Uqpo6O7u4X5Ntq06PB+q7jqDM68SgZNwJjmuF8txZwR2UaNQvP2DGDftMk31QbTvIJiEgDG7ycjOOv7cJTaLeaRUZypbW+opZ2bOdv+JR4FkuH9sQyeIPIJIo6tP4JDmXPQRNspYqxr8SXQrJFvp0qvn0ETEoKtCC6Gnkyn17/2kQrC5cJlUt82l7vdZFZ9nYNjM+R+z6lRZzT6D5b4Peh5wkKieJreyz3Oh/T62dCnIJn3P2/8yrtWUUD3Y8fX8eRXBWVpSEZ7URIUoLEF2muJ4m93tAt7Ahbc/gpTNZFRn3Smz2q8ylWkg222ilTW0ZgXf8+lfr2HOTGC/+gqiOI3bB1Fk37wah/GPRhFXeiPo2p7AAbBtPN/H6/t4qIgP/R5m0COcGMVrp9EJMWxBqmDgdTyiq9fJfqkMDM+MZRSx+ycfIB2X8pRNy8shgHIhZJNzRNs+qZWrWOYXWVxUfiUh1DHuGTroGkbQpRTssmUukMJBx6dPllzYQmo6pDMs1SsYnsf8nBJIB5s9bq2maTdMNF2SzUboOpSqDuu1iIEnh32EJPTq6jMnNEmu0iWKBL26GtftZ9MMsjkynSZb2jhEYEQeM9o1TN2lNpigny/Qs2ymR7sgQEYaH388fL8vXHj8n18plbDpHzjsZ2bO3hTyO9Htqu/Qg6JGCPVderfXcNZmXiUCJ+HMcj9TcOOS1WpVnTVtbalSXt+PJwhrpObeYGSnCXcQTLnXX+CpGW0/GlSvq8Xpxg31+KOjKgX2JKBNjiNyWej20MolsrbO57wfcE1eoKmNcM34Aq3CKE5FmUDi70FNk+SqHXr1PHZ2wKCTxs45dGt5lXZA3tYP52BOsVsZYfnFL3Jpa4m5+hVW+jN4PsyHN1jKjbGov0BetjFFmUnW0YgINZ1VMctcv894ymHJthgM1LFQq8XpqOFrM839t552Wy08g4HGbrtImiKjF2DnRou6PsrIwCWX0bA0Hye0cJoDzBs3kP0+FR12K0/RSlmUF28pMf7cn8GyVKjA9/fK0VVx2W1RRIAwUl/8mpDguvhGGl+z8AYQuj6a75LPSZwoTS+wyZkObT+LqUnyOZeoVlMv8oBvTO7W2F4L1PBNo0vDy6ELya6bp+YWyec7TLU+pLl4Ecepoutq+1wXGoMMxaJBvlZj25imGm2wpi+QwuUZ+b6aM1UuccObBX1AdVTf94/EzR6vXpX02yZCQCYbYpiS0UmXdj/YH3YK6jm7NRVt0Y2ITKlHGGj0m9m9YykiijQa49P0ejnSzTYaES90/h8cPUfDnIC8wJ0rkS536A3S5I0yW+tKzZzGHKn45CqeQA4qmlwoPN7teBBu89Vw79T/cZylmVeJwEk40zzIGYGmqYqZyUm1yGxsqDOpQXaU5c/9ItHVa+QbS1SjLXTLuE0wCaFSVGNj6sO+sqIWqe1tdbEstSie9eZbnwRtbBTj2WcI3n4X2h20TJqyJXm290NuBfNsm+dYGXmRrjNNKtVCM4anekIog6jTTiOEg9uzMVMewZLk0u5bFJvKTBzpBt1yla2Fy3QrIwiUUOqWq9Qvz5PuddAaAzr1DJnlm8xl69wIimz744zJXbbFOGNsqe63eo5G26H67CQXPp/nxk217luW+uI+mqqam4OrV1XjuaefVmW5tdpe2uoc7BbyNIsLVGpvoaWnGEu1WelXuLVhctH1kJpOoaxRS6doiDTVGY9odZ3o2nUyL6iISqejhFS/r7bBOhJFBA70CoqIag2C8c8T2FmcBgS+xIo8sqkAJ0gxCE10LcSXBnnZoaA1sQcton6fg5+GsDdA+iFa0WTNqdLwspTNPrtOjlBqFFI+qW6XW+sRdW94lu77UCho6NYUwt0i26txi3lymsNceBOrV0PYFrujzyDbPfSJUcYulw687wLhFeg0XXw/pFSOMG0ojQ7ouz6moWMbOn4YHBI3hhWQLjgEroHTSe89liSKNMJAo9srMEKTVNTjc90/Yj31FCG68heFSzj2OL0oS2snR2UyB4jHPkdKSnUsdTrD685KV+S7cXdfzek3PvykJAIn4czzSc4ITFMtWKDy4GtGlaBSpt9+hp7nodkWo+fzVKrHCybbVh1OpVQL4O6uWqxu3lT/HxlRkYKzMLfmYSI0DfNn/wVko0m4urYfa89EkvngY+y8zwczF3F6Fo5TJlftYmcPe23SBWffi6PVXYoba/S8LHqqhGZGGIFPYWeLdLfNredfwamO7C/43YGP7kdUCiYLc3m2ByNEWztMFkyCmsZWNMIUGzREkYKzhtQNdvMLlL/wOWxLY2JCTQOPq1WOpqo0TS0+ccO+mRn2PTvZLGSyGt3Llxj86Cap1XWmywHLbo7dfo4LYYiWSRNNzbI9KFG1OvvNzDKNFRzvaQKreEjgOA7Y9uG0q0xViUwL6fvQbaNVc4SXnyHoafg+hFJH0yLsyKEZ7kU6Ao/QHdB3u8jBCkVvkeCfSYRh7Av07X4ODJ0xuc02E7ihhaP7OIFNSveY1Laoa6N0vNR+9aHnqahDqQSTlwps8AU0cY3sZgOz22Mm/BBKRQaVaVptA5FO8dRPzx460djZgfUVm1JWI2CAZnrYOZcISSWfxrJ0thu9QykoM+WRyrm4fQuvbyuxJQVo4DsmvqdTaa1SDjcpjDX4oPRTaDJECp2qtYbVdCneWGXnwgJPnctTyFqPdY6UlOoE6qBAmJhQ+/Gscl++mk85icBJeGJIp9lrTqbRbhfZ2NgzJ++qy91yzHGodmRELQYrK+qMd3dXXQxDmQc/LZ1HT4I+M431L/45/O9+j+jmInLgYqRs9Ilp2qMzWJaHteMw6KVobRVJ5R3yI51Di4uZ8tH0gML1FQzPZTczw0AUyFMna3Xoli1yjRpji9dYHR1BCI1iq87nltdI13fRwxBhmRjpKqtamrxbZyIvWO8bbAXjjDu3sESO1OwUxkvPsuRWeUqqBWZnR52h5vPqzPpoqmpyUgmcWk1dn06rL/5eDy5dguv9KlsvfoXzm3+CWFsn3duhF+n0KucozhYJswV0L6IdZFQfHdsmHzYY+B69vhLX2axqLtjvq206mHaVNx2ibh8Z2ejVMuZXv0LQGyXqqIhhpOkYaYuw02dgG4jIJ9tfxZU2gZ5C6Br5kkG4vQO/O5yk3tGKaNUK7Z0l2nmbitXFDQ28yGAiVSfTWmej8hpdmUXu9WN0HCUAMxm136pzBVb1L5Aa3+W1/rfQG9OEwmDNH0cbr3Lxp2YxZofdFFsteO89lX4q5k3GxgxKVZtUWnUyDsOIK6s1PD/aFzd21sVKezjtNIFn7Ikb5cVxWmkkYHoDLjjfh7TGbjRDVy+S1xqM6ytIKVjNPg1dwaQBz15KPzZhEfv0ms3hdePjZ7sw4UF9NZ9mEoGT8ERSKKhLHJmp1dTvq6vq/5a1Z04+RrBYljIuxmdCOztqQVpcVP+vVIat7D/tHJciTI9UudTo0ru2SWq2T6cR0dxJM+ikCX2d/EgHwxp2Ms45Dc53f8iK/TRp0acZjdGL8pT0GlV9g0EuT76xi9loYsiQ6ffexTAizMlx2Os2naptMqHl2MouUHGaeKZgR6tQzz+DmMmQe74KpoomXL2qBMvFi2oGVaejBGgQKDEa944BtShtbSnBOjOjxM3Ojjo2hAAqVbRXfgmzWePcj5pc+U6N1eLLlAtb2AQUzD67boG6m2dcrpO1AnYti35fPUacomi3lRfj4D41v9dBOD5GO40xmlajFq6o2wQBRJGGMVpGNFJ02yADF813sS2NnN9GMw30+XOQz+83qvSrkyA00p+7QP/bHdr1kJGCSyBy6OGA0eZVdjPnaI0/BUJDE8PSZVALXa+nxGC5rPH0S2PkJn8VuVvjylUwTYvJy3lS5WHkpt+HH/1IiZs4vTsxISiVVChASslHS7u0uwGDVh6QpHIDDNunW1MGYxkJEBKEKhcXWoQmYKpyC3/VpiVGaEej5LUGlnDxpUktnELokkLQ5wsXfUqlx/OB29pSojVmbOzsevMelq/m00oicBKeaA5GZtQARnU2Gk/zBfWlPzFxuBInvm9sbPZ9JY5cV4meel2lQWZnP31NvI5yXIowY1sUsjYpy8ArhqymmjS3swz6Fo31Ctlyl0xR1cYavocReoxaq+zIcxTELmvhBQYyQ1/mmdU/Jh12SMuAkcXrmE6f5vQkhmFia9r+wL7c6jphOaDx8utMuQE0MtT6afq2xsamEiiuq0TpxoaK0Jw7p/qPxK3wd3dVJCV+L8tltWD1+0rQZrPqrLzdVrOAlpZgZVXj/PlRZn+yyrUrIfVmi2h0bwSBroRc20sx2t5rZlbIE/X29tNen5P+kSIyoWmIYhGRBRGCOJIhjSJ10TIZoukXcb7nIQZNHC1PKnLJpz380gzka/sl5tHaGqsftJChRj4d0Dr/FJn1Ju1miBd4jIstiuM6N6c+z8AoIPfMvnFqamxM7R8hlDDLZGKTrsaqO4oYUdeVDkQpPA9++EP1M4rU/h4dPZyi6Q18dhoevUYGXXPJl3pI3d/34ESBjmaEhIFg0Mmg6RG6GVGaqNHaGMHQOnSjMiVjh4ymjOq1cAoBVGhxuVCjNProzS7b2+pzHRN/b5w1Puu+mvshETgJCXvo+tCc7HnDrrfd7rAVfbmsvsCPepxNU039BXV2t72tvvCXltR1pZL6cvm0nDHda+K6oWvoe2XBtmlgWCGF8SZiN8+gm6Zby+MPTPKjHQLTItINjMBn1FqlLSpMcIutcA4Z6oTyWZ6z3sYKAvKtOvpIBS+CZs/HkkXKdm/f41KqXUd84WWaYpTJCoSratHRdXWmOjOjxEyrpVJTuZy6dLvqPfL921NVsQi6cUPdv9tVUZx4kfb2jLiGqVG6NE7jhw71lR7VUY2K0WTdz9HrOmjVvWZmPW1f1Ma7bHCHETwHp0Yf/FvKYel2kKsg8w6218azxhBGSC8dYFtdoKbuYNtEq+sM3vw+0h2wGYbsyElGqxG9iRGC3T6jQYO1Woqt3S28rI82OYFr5PfTE82mWgQ3N5WQefVVdX2rpaI6cdfdmDBU4iY2Uc/Pq8/H0X4zzWZEfcfC1DWKVRcpBFvrsbjR0IwQf2DgDyw0PSKVc0kX+vSaOdzQJpVuMt67Rcp0VIM/qVoTTNtbjHW3yZ5/6pF2yI3T0DHV6mHD+lngSfLV3A+JwElIOAbLUpU2oL7A19bUF3o8sDEOxR+Xcy+X1SUI1P0cRy0ezeaj72B6L2FyEk4ycT0uCa53HCxD21+Qi2Md7KxLZzeP20sR+AZhVadbrlLY2aJbtihodSzhoIuQrXCWvp/j+5k/xXQqJIUkSqcxgZZnEJCl7ue5kNvcH9g3mukRGhU6HSVGo0ilGDURsX7dYbzQJ7QtVmWei5c0Zmbg44+Hc3+kPJyqymTYL182TSWIWi31fk1OqgV/a0tF8eY/X6bV01ndhUrnOxS8JqkwT7c4g/dTz5GamSSz19gtioYN3lz3tt08fL96DtJ3ibZDkFVcV9tbxFVEyXVB6DqmIdB0gaenkbKLrQVDYbRbo7XrIWWD3FiatqgiehY76x2ks8RoukvlnMEVfxa3k4VWG683YDBxAWMkw9iYes2Li2oBf+EFJRrjKkTg0HBNKVVaqt1W2zc/rwTl0ancjQbs7uhoAsYmPBpdg+1NG12EeJ6OboY47fR+H5zCSJtIavTbadxuCivj4k/lyS216XYLaLaPZUlm9DWyrQ7p8SrGq688kj4rtZoSujGVyu3zzU6bJ9FXcz8kAich4R5kMsp0CmoR2Nwcmgy3ttQXyszM7d1JDWMokg7eb3lZXVcsqgXhYX03n0SYnOQxTjJxXQjB9GienuvR7rlEUmLogkhK7KyHYddp7xTw+jbNzQpXql/ixe7/j1yjxiCXJ21ETIfXyQ6arNlPUc9NIRswYb5HxvPQUjZp0aeLJIoE1zqTXNBu7A/smx6L+xupRSdod9m54lD2t9BEi5zhkBnNcbV+iWfeqHLhgorQwFDgHExVxc3/bt1S72W7vWe23QsMNJtK4IyOglYo0C28iPncDJY7oLJWYKufp2FpFPbe125XXWxbvb9x75eDRLUa/ke3CHYkIS3ca1dxMy/hcInAyaALnUxap9PVkLqBlTXROi10S4WHLF0ptiiKCBeX2LGeQUyM4+kRdadAJdOj0+oz8EOq6R1W5GV23DKRYSCyBv0OFNprlC9doFZT08TLZbUwjo6q/RTvs3PnDqc23ntPRQz6fRW5zGTUfjvIzo4SCSnL4PwCbNYivG4WXXi4ocA0A7r1POgBmgalyRqDbobAMwhck1TewUp7uFaeq3OvU9laY35wg6LbxbBt0s9eIvdjX3roHXIbDfW5jjlr0dcn3VdzPyQCJyHhPigW1SVeJOv1YddSUAva1NTt5uT4fkGgzoh7PSV6Wq07C6T74aTC5G7ccVbSHSauF7MpLs9Uub5ao9kdEEmJrmkYQhBqEWMzHZo7Pr1mjno0wdvTv8BTze9QaW6ihR0iw0SOpGhMpHBCnbab5n3rS5xvfp/SaIgQggu5Tda9CbxQ59p2icsXi/vpiPl5ZSjOejWqG1dx3RI1fQRSKWy9RnOzT7H9Ltf0L3D5SyOMjKj3LAyVsDmYqjLNYc8cKVWqptNRC3SxqARPu62ur1SgXtfYkaNMzkFOwtaNYQ+UfF69p7HR2LZVJMf3hx11w9U1wrc/JOhLhD2BnikgA4Fz9RYDzSZMTWGLAD7YoZ26hCQDI6OYXhuzW8PMWVSNJrLfJ1rbJPIDxPQYQmj0AhMvNGh5Fp6nUzEb5NvLXG+9gqfpSAROZKPZEW7HwW/1SRdz1GpKqHzxi2obD6ZlD0YcP/hAiZd+Xwl4y1LvxUE2NtSxDXDpkmBxucCg28V1I1KWRRT57G7m0a0AXY8ojjdxOhncbgqhRaTyDnZ2gNtLYeiCwuwEn/+Fc2T7n0P3PDLFHNroyEON3DSb6iQkplhUovYsCIbEV/NgJAInIeEBiDsbj44e/vJx3aE5OZ9XX5AHv3zicnJQt49L1WOBlMtGjBs1NO/kKaajwgTUTJ87CZM7Ps6RWUlOYBEhyBruvgfm6MT1YjbF5dkRWn2XvusjJViGxsALCcKQ0uiAdM6nsZWnF1T5wdjPM3JujZLdRdgpnEKRnK0T1hyEp1P3ZpADl5HNZS6O7GIImBNLLNdTuKkSNyde4mk04jXn0sWID759jYK3i1fJETo+da8AlsZMZZdWvU/poxvsnK8wOqZRqw0rqgxDCZg4ShMLprU1FeJvNtVCHqeyNjbUgnLhghK2S0sqhTU2psRSuz30zoA6LqQcCpx+LyLv1Yh6fYI/+edIJwflSYRvorkt/PVNgmgET08RSYFlRQS7Tbo00MoavpFGG53GrK+S66+RCm8hLQttcpztqIoo5LF1n+udceXfCTsQRVTMFj/gdbYHBbJ2hKmHBJFN1uwzGm7SdWbZ7Kj98PLLKuIUC0EhDqedbt5UIqDbVeLGtuHy5cPH0fLy0FR9+bLaT4FrU8kIurpDoxUy6GTJ5UNSWUm+ENLvlQj6Jrm8SyYNZkon8ktMjNs8fT7N7LS1J7gfvpm41Rqm4UC9x5OTpy9sEl/NJycROAkJnxBdV1GbqSkVAVhfV6bSTmd4Vn9c6Xhcqh6Ge83Clmo037xGvVaHMGQyVacwW7pniumgMAHB9c4kCJhK18ka7rHC5NjHOTIrqRukaHpZEDCb2cXe88BI57BjNpe2mCjn2Wp0iaTEC0I0TUAoQEImG5FZ6NDcztFv2zQG85jFiOlzLmHXYTAImDC2SGkttsaLrIULeP0sor9NceNH6JbB3MUi2wsv0ktXuXpVpQx1Xb328+3vcyP/NGN2Bzc0CCKdVpDBcCpM5xx6Gw22314i87k8Fy9UuHZdwzDUft/ZUWfqhqEW9kJhKFSLRfVe7u7umX0DdX2hoG7b66nrMpnh/VotldIQYph6zOWgvtym9X++g9X4mKjdJVpfR1ZeRaZcfJGn3owYeIIgXyDARkYSnRC/OEJQA7u1CwNwHYc0A2zNRy+VMb74EmJslM7ffwfh+XRkBje0SOkugWZTlE2Kg0265jP42EjNpR2kSOselvRoUcIPTUolJXBKJSXGYlPtQfGyuanEe7erIo7p9O3i5sYNFakSQr1H166pxbnZhErJwt0wyeoh+RHJhYsSwiI7tRDDhcsvQKkkabZA1wQpy+DCBfHI5ki12+r93T+Oc0o4nLawSXw1D49E4DxEHobBM+HTzcFwfa+nvqiiaFg6Hp8RHyyj1XWYZI2RD/4p7VbEdvYC0jTZ9HJsXO2SXX+X2V8Ca+54kXNQmKjhmD5uYLLeV805RswGBf92YXKUo7OSqnabfmDhOiFLvTQitJjTG6TSqcP3O+DHGbgBubQJQuD5Ie2eiwRSpo4+1SOTDxg0ijhdm+WbJqP6FjPXf0SuUUMPA8pGnkLpBTbGXqSenuedzIt86WUfa7LKrKbt9yC5dk2ZXoUzAN/nwuguN3vTjKU67Lp5wtCg6ViIgcVY6yaD7znc+NDm0kLAxPNfZDOaJAzVvj+YqpqcVAvf7q5aUBoNJYKefnoYtbl8WUVtNjdVa4DYYBv7dkolNfohnVbvfaq3TXR9jUbUY2w8j2ZZyM1NpOMQbe/gZKukXYmXLuELk0hqhFKgEyKlACEQnRqap0E6jZbOk7fqRM0mwXfeQv70z6FVK4Rb26ynp9G1iLTugzCopPt8v/MyPbtMMeWgSbVwBlLHcprIcgXXSqHr8OKLhyv/zp8fLvb1ukpN9fvq+M1m1X44KAauXBlGsC5eVNEwUPuyWlV/p9OCTNrgc59Tj7m+HmF4Lgs5h6Kl0xmUyKY0crnbPT0Pi05nOIwX1GuZmTldYZP4ah4NicB5SDwMg2fCZ4t4EQB1Bru1pRaAzU110bS9M+HUMMVUODdFUdSJpGBrUKRjV+jt1rn6B6sYr08yNaXdNrjvqDA5l9kllIJ1p8IgsNjpZtiOLjLSyzIp7/xlKY5OXO90mF67gtd2WNbOE/o+i+Uq+VuShSPRqNiPs7bTodUfEIWqg+38RIlKIU3aMnE8n3rbodbqsrGUxt12aW5EjIsU1alRIsui5HlMNH/AYjTgVvp1tt0Sf/wRfKmozl7Hx1W0ZWdHeUQW8uq1C9flfH6TG+0JRlMdtro53L5P08+gpeYpFyxMy+fa9Q6Xa/835hd+CfKj9HrqfdrdiahIdXIyomfYDcq02xrlsjrLr9WGZuEoUov/QYFTqRweshiPgej3IvQPryE9k151EpFxQUZgWQjbJvIjosEALQwI7BQhBn3SBMLG0Nt4kU4QSqJQYNgaphVh6wHVgodWUunH1X++hHbpMm6rT6cNqdQAqUfk6VAUbfr2RVwy2H5AlzR5rUPR2ca1CjSzC4zkNF57Tb2fsSgZHx+mQLpd+P73VVSyVFKXSxcj5E6NyBlAKsW1RhXQME11TF+7pvZTp6Nu/957ah8ZhorsbG3B1o02hd0bpNsb+H5IzdDRqhXmfnyO/Mzkw/gIHqLbHTbyhKEx+rTOQRNfzaPn1AXO17/+df7RP/pHfPzxx6TTad544w3+6//6v+apg40qjvCNb3yDX//1Xz90nW3bDO7UbOIR8zAMngmfbeKFIZ5U3mioBWB5GWSrg3YLJstj2EIQSkE3SDGRajKZhp4IWavXkK0O6xRZX1dfzlNTasG4TZgIgS4ks5kaUSTZrHv0Ri/QFiXaV1RkYWbm9i9QoQ1nJYXXbkCjgQxCTMvivPchAyPHhqjQe/OHfOS/RPVi9ZA/o5hNUcjY9AY+wZ7AyabMfbNyiRQT5Ry9gY8/H7D8D6+wgc6t3It0nTpPhTcwLQ05M8GF1Y/QB3nWp1+lXtd4801Vujw+rqIBur6XMmlXmZo4h7F8A20mxUJ+iwhJUG+wHZZxjBxN08A0BJ4IqVZNlusDzi2/ya1nf5FsVsPd3GXjDz/AbryLLgMyuRxB/gu0L19i6rnq/gyyCxeGaY142Krrqku1qt6LVms4HsKyoLHWx95tIdIzeKEgiDREOoso5KHrIe08MogQRAShBroEGSENJRb6roaMJFI3iTQdQaR8TlqghltWSgzWdtGqU7Qnn0WLArRBAzyHSnqH90Z+FkcfpeQ28TohuuwR6iFeaYJeaZryaIapKSUe47RIKjVsf+C68M476mcqpdKsC+k1gv9DncyFbsBNLqBVK2Sfv8Do06MsLg7TebquhGi1qkRfPq9ERnu9zcjy98gNdhnkxhAFEy3wmK+9hf4HHxKmHt53Zq+nnjNO+aRSqirsNIRN4qt5vJy6wPmjP/oj/p1/59/hi1/8IkEQ8Jf+0l/i537u5/jwww/J3qWspFAocOXKlf2/xSnF8B6WwTPhyeDgpPIwVIt0a8fD9TSWwhlEWyOtu/RCm21KIGDc2uWCuI459jy1dJFmU6UKrl+PU14a+QNDHLVqWX1jui6y1mCylMP82RnaWZXicRx1hq1pKvpw8EtVn5lG/umfJfx7/yuy30faNn5kYFey5KanuJyH5vIKu1cLNCplGg3t0NRkIQS59J2/peP/R50Wl50fMp7L82FjkjXXphmO8Gz4HsV8hKhUON/5PqWpp1jOlVlZUVGECxdUBKBUUtu/vq6xOvUlpmoNrNV1jGqZOengd3wifY5dfZqOPkIwCFnI7rLrFRnJejRXVpl7rc7NH7YYfPv72L06N/QCF+Q1RL/PePMtNtsdtvUvUKmM7FcFCTGM0kxPK0/K4qJKYRUKw1Rk3AjOH4TIIERkDLxIsNgdxzZ8ZqfbiJsGkRsSGaAZGm4/IrQ9IstEQ2BoIa0gjQhDDDMi0DL0I4m1V3MetTu0VpqELRfv7R+xbTyDkbFInRsnVzYpnb/Axx+XcZsauckSYd0nbfhURmCgZdAHGroOzzxz2C8Wp1iDAN5+Wx0vmqZe73xqHflP1cmcrFRZTM0hPJ/MziL2W6ssuz/FIF1F19Xj9XpK1MzOqsdbXQVdi5hofogYtBlUZxAIxtItimYfWbnzd+b9WgD6fWXej4WEbd9e7v4wOMl2Jb6a0+HUBc43v/nNQ39/4xvfYGxsjHfeeYef+ImfuOP9hBBMTEw86s27J0crT250xonk3sEtIJXNkV3epbJZw546Y+0vE04VXVdfcJMmtL+/xVaQwjezOKGNE9i0ggw5Y0A0yEN0kWwjz8yUqsxynL3oT5zyYhrzC7/E+NJ3EBuryHoDYZroC/MYr76MPjNNGXVmHp/RRpGqijnafFBLpRDFIlq1itQNbgXzYFuMpTqURI/SqEmx8y5NLtGiwsaG2oZz504+lkI6A6JWm1x7lZe8D/jj/C+xbczhyBJznY9Z6N1EKxSYyHXJz5ZJpZQou3JFCY2XX1aCQtdhhSrrz/88E2tvkd66hdlsc67fhLE8UWaM7VCj6aa5Fuk8W1ijQx7plsgtLZP5kzfxeiaNwjkqWpNWMEmpu0ba8pBM4X10g4lfrLC7q7G7q/bTYKBEzLlzStxsbBwWODs7Q0O5MAy1kUEAhnLLuqGJKBTQJotE2wHSVw38Qs3A0XJEuomuRYjAx/PTmMLFsCxCdIgisuYA2ekQXbvOpr+AMHXcwiiRZ6J3u4igQWXhHB+uV3GcvTL3jka2ZJPN2ngCWg0VVXnjjWFDShj2e4oiePdddaxEkep1Mz8XIf7v7xF2ukRTM9zqTSAElAo+Xnac3c0erffXKb1R5uZNDdtW0ZKFBSV24t5Dab+Dv1tHy5UAQdVuUzBU2dVx1Xoyigjf/5Dge+8S1eugCYRl3dECcPCzAcogPj//aFI+d7MmhGPTia/mlDl1gXOU1l7zhMo9ppd1u13m5uaIooiXXnqJv/bX/hrPPffcsbd1XRf3wFHWPpr0/AQcrTwZTbXZGpRgbzKuo2Xpuz6NG6DtPa0Q6mwibiX/WZpAnXD/iJEq6ZlRZhc/QpuZoh+mWHcqZA2XUEqadY1B8QJVWcDfK0EvFtViJIRaUOt18POjrD73C3CuQzXdozpuHns2mc0qU208bytuPhgboPN7xl0xNoKmaZQHAU3PZmdQYGdQYMRSpuWxTI/x2QrLy2rRV+bUiPOFGrp/j7Ns20K2WsiBi1ku8ZJ4l6s8xYaY4krmJdq9PM+2PiJlWxQK8OyzaqH6+GN1Vj4YwOuvq9cyNwdLVNmufIWJVxrkWquI3/sDzpVC5KCG3zNwolG6foqb3THOWyu4IsXNt3eZb16jXfkiZb3FFuN0zTyFQhet3WLGvcpqLcPuYoeRkeL+6A4h1CiOSkUJOsdRQmB8XDUKjL9eLAvMQhqqFcyNFl6+qszDYm/lTaeJ7AgtX8V8Zh7TuoD7nkHU07GjPqEXgZ1GMw1E2MLAR9ckZbNLtLpOOPDA0vFyFTblOJqhY9k2Rq+NdeMD6ukv43kamQz7VWOapvxgpZJa9FMpJRphmLaUUkXLmk21OF+4sNfvplPDXVsnKI+x3JvAiwzG0w2aXk55xlJVqq01PvrBJUrjKQxjOMw0NienUuBteIgwxE6BJ6HmFihafXSGoZa4Wi9cXcP/1h8QfP8HSNeDVApRKqGNVm+zAMTHYCxsDEO9xqMz5B4Wx1kTQsdj43qf3sqH6K+k0PZ6ECS+mtPhTAmcKIr4jd/4DX7sx36Mz33uc3e83VNPPcXf+Tt/hxdeeIFWq8Xf+Bt/gzfeeIMPPviAmWOs91//+tf5K3/lrzySbT5q8CyYDoX/P3t/FiNbdp5nws9ae445MnLOM+UZ6tTMKlaxxKLmyWxTarfc7b8F/z8g2YB9JQk2eGUZtgHbF4QhGBJgG5Z9YzQaLdiQ0aJbslq0RA2cRbKKNQ9nHnLOjHnY81r/xYqdkefUOawqsopVpPIFApkZGbFj77V37PWu73u/93PCw/9n45CRp0gaNhNhvnxamxt0FN1pBS6EuSlWKob8+P49PvAYP3A4qn1RG1uUWk3OlyYQx3T2c6zKGvKJeXLXpJiUMuSkSJksLpoIQhTB7duSvFanTZ12B9zRTC9yN1zXEJ08n5GGnR3Y7tcoySUWwxhRDljwB8x7A3aiBqM0YH9oRMsnshJz0zRXlsHlr7fJ37zMG+0Ongo56e9hnfj2QnuBWQsoJBd5jQpDbnKaW94F+izxdNdiadksAh5+2ExWb75pdDB/9mfwwz9sSMb6Oly/LtlJWiydbhLMv0Tp5edYkXOkcp3YeYADd5X9sIwd1llt2Fj9La6XHuOMdYMrXGRMhRybK+JBLpZewgkHWMEYFSfMnTKk5uBgln4KQzh9WvH6cyFvfjXiiccVntdiPJaHZeYHJUly6iKicxk9GJGg8D1lDPraHUSwjJhfwGmVkHNzRBsKUo1fkYilc6iOD3kKgwliMqZU1ixk26hOl45YRDgO/coa3bRK2YrwrYyFRsKlzRrRUkpQ9uj3Z5YE47EZQ89j5u6sFVXVJ2iPUBOfl7ZadDqSycSQ6CK6l+9HhLFk0z/BICthi4ytsIUnE3ajJhV3xJvDs7RKMdWqT71uoltSmuo0radNTx0X6QjiCIQHTW80I30AcYxwHFS/R/bVr5O/eRk0iMUFRJ5PiXGIOH8ONRgy/uqLbD22AhgibVnmeni/iA3cKU0Qa6t00io7wwauyNFzGn3Qwb76BqefehbPf3sfq+Pq2/cHHyqC8yu/8iu88sorfOlLX/q2r3v22Wd59tlnD//+xCc+wUMPPcR/+A//gX/5L//lW17/67/+63z6058+/HswGHCycFv7LnEvgWcBrTWi06Gxfgb3kfodHYOVMjeb4dDkZwviU4gW2+0jnyHMl7WI+ATBcXjzBw3WiTX41Cdn4e5piql1bo2lZx5GrLYO/VjATK6bm4akpKlZJVuWITO+bybidtsQoWvXzHvm501a4u5rx7IMSSnSXT1VYRAs07+1T7BYYq05wZKwEvRQXpetTk64cIa9pMneG2byqgw3WX/lc4wHKdulB0icGleTOapXtlg++NwdQnutFGp71+x8rmAwpF05C9KilA95PP4Kl7zH6bsL/OULHmdSeOwxM1E++KCZnC9dMlGrL37RpKtardmEvf16l3ob6nFMXe6yVg3ICIhjl76cY9dZwi9lTLoJi3aPdj5Hbls4pPRpIoAla55aeoNT1gYbrkuvZ4jNzg6H5eU3vtXmzPaXyC6tsa8U8evPUS4/Tdi8yMFBjaUl813txU28Ry+QXNklGh/gTXro0gi59ABi7gFEVEIIcx6jSIIEvx6QyMJnx4PSCqrbwU62Ke1dRyUJvdYaVOrsJIsoJJ6bAxrhWPSyCjoznc49z5zzPDfXzfy8SU212xDvtFGXLzM3/BZxknKV8+z45xhWT/LgkyXW1sx9B2CsAzY4SWccEAQp/aRCyxvQTSpYImNz1KBp91lcXEKUZsaIvj/zBdIaI7Cem0fv7bM+18WxZuRGa41qd7HOnCa/fBV1MPVWqFYQUpJJh6RWIxjskmwdsDH/FPpmhHNqiGzUOXuW98035yj0QZvB7S7X3afp7RtFtkCzUuqxFnQpLffQ/Q2cwQPg31+acFx9+/7iQ0NwfvVXf5U/+IM/4Atf+MI9ozDfDo7j8OSTT3Kl8Ba/C57n4b1PeaC7V99HBZ6q3UVWpx2G72LkUpobR3HzKKC1CecWoj+lzHNpOhMwHn62uHM7RdPAY3x/wjqxhlxdue9qrhAnZ5mZaIPAXBvFtRIE5jopmjSeP28mtY0N83zRFblwU777KyEELOabzL3xTQ66B+z3NONuh0uVKvZii9OVA6zuAWuNCvbPnGBHSEYj2N5SZF+7zXwfGqdaXBAd+mnAnmgw9M4wOOiw/KXXWPjfV1Bb22Rf/yb51evoTtfsrID1wUvc8B4klRZZdYknapvcSjUb2VneeMMc35NPmmt8fd1c72+8YSI5X/+6IT7r63DurOLNr16mnTXIHvkx5vdfpzXYIhOK1BZckT5jOcetUZUzTold7wwivE7L2qcn5rBJifC5pM/z0ewq3ollqqtVRlOx7O6u+Q7WVAf5wvOk6Qa18gpDKvT9ZZoHl9lr2+w1T7O2NodtT00DW00sq04SnsRuTPAugt1pwbaEyAxDmppzq5Q5R4W41/chUyXcEz6txRp2OmT83z+PDiP20hahm6HtMSWnz0Il4lZ/jggf35EkiZnwSyWz382msS5QCnbfaJN/83nOpq8j5ufY0itsdhfodgQXkq+zJB6i0TBlcv0+bIdz7JXOUupt0rdWqDtj4twxZpAoKkmX+WWLiVUlnEa6igamxQJOCEBI5p9ep/HV11BbI/Q97pnWhXOkf/FFZLXCsD1h314nx0EjcURCVllGTXKk0qByzrSGBOffe5fju5EkhkRvveGSjh5GVCsIATV3wvnKNlXHSCG0uLcp5lEcV9++//jACY7Wml/7tV/j937v9/jzP/9z1tfX3/U28jzn5Zdf5lOf+tT7sIdvj/utvo8KPN8phDCTU7lsxKQFtDY3vNHI3OyzzDyX57NO1Ue3IcQs1VUuf3BeD8d4dxBSflu3YTCTX7EGiGMTyanVIM8U7VsT4jCnWpOMhmWElLRaJsJRmNZl2aydRKtlVvRC3HnDba3MMddKGN7qsTWCbDzm+vw8cu2HOP0jp/BPrnCCaZuJl/v02h32S+scDD2q7gSlJWerO7TjKr1qhZ2NkIM/vs3K9S/ijTuGuI3G6E4HpKnmeWB1RLd8ggOxQKfdZW7FpfmUy9Vr5hgHA3j0UUPO5ucN4QkCM+G8/LKZhB9fbXNm+CLXqw8wdD3U6SrJRLEo91HaRoV9Lo/KxFmJG3qd89bLdN1FCHdZcW+QW+v0dQ07tLgy9wme+PFnWTshefNNs/BYWIC9XUV24zpOGLGz9Ahn7TYv9apcy8/w+MnrvHEtpfv6LvrHGhRpE88DhCR1ysiFMnpOwY0hWVdD7COFS5pK8nx2npNk9l1WClxXsliPUF+/zp5zCqGg7a2ikVTiHqRdUkfSm8xhlVwy6eK55vs/GJh9KJWMsP3Smwp16TIn0qtYJ1fZiZpcGa7SUxXOLuwwP9iifmmAfvjn6fZManRrS1I5t8Tg5RBvuI+qONxK1qgyQsQhC+UR3fmnEaE0TUnlrL+X48wWX+fPg20vkTfvf88kV0Sx4LZ3gT1vnVT7zIsOHRZosY8jE4RKOKWu4QUZXu3j7+G38E7kubn+bt40xwKgLZuSk3Dau8ZSI37rwnKaZhPBvXUG77bv2zG+M3zgBOdXfuVX+J3f+R3+23/7b1SrVXam3c7q9TrBtCTjl37pl1hbW+Mzn/kMAP/iX/wLPv7xj3P+/Hl6vR6/8Ru/wc2bN/l7f+/vfWDH8Xar7+8WQpibU6lkVvFHUbQFGI1mzQK1njUIvHs7lcrscSx6+/6G5xnTuXxjk96XX0ZtKHSak1g+G+WTiNVVwtM12u1ZU0/XnbmmtttM/6dYfukFnCM3XFEKqNVrVMdjwo02W60LWM88zWZmwTQ1Va/DWn1Eiytslx4lymF70mSQljiIqjS8CWcbe9weC9JLV7k9riPn11kP9rBOrqGiEBXFkKaoXp9mo06lvcWt0gOo8xeRUvLRj5oKqt1d+MY3TBTq8cfNxP2Rj5gxeO01UzY/2rR5Ikk4t2acjQdpma6ukNoVTpQPSJwB6STlul0n1mWuqYd4wH2ViZwjiHvMxbfIxDL90iruxccYVFdoTvuO7e9PdXFhSL+d0qzUSXIbzx2itGCU+thSU6nAqDdmeKuD45hGVoUeRCnTRXz08l+S3oIsWiCnQR6OSNbOolT9Lb2siihI4CuaV7+OHo1JTj7MYGfCSJXIpaRuD2iMt9jaPc2kVKXUrIEtDz+76BT+iU9M2ycMhsz1rxLMV7g2XOKl3hkUcLq8z0IwYtnPUZub7F7q0VGmUq5UgjRrYl2wsHeucX2/TFPsYtnQXLLotJ6mulw7bFyqtbkfFfqvubk77133u2fmSnL5uT67+WNEaRX8PoQxyhUsiy0EcDK7ip0PsUZzyIcePGzA+p3ibh0MrRbtjuTqVSMlKGDbpnruxFqAytrk129Cw9iDHG6rSLOtn7nvft3dXqWfltAaGu7kvn3fjvHu8YETnH//7/89AD/xEz9xx/P/6T/9J/7O3/k7ANy6dQt5hCh0u13+/t//++zs7NBsNnnqqaf4yle+wsMPP/y92u174p2svt8P+P7MhOso4ngW8YnjGfE56nlRoCBQhc7n/RToHeO9RRF5CYYjLrTm0J5Hb5Tjdi4jbt4mqjzNpmgipbkOXHdGjMZjI55VvSE3bwpE8BiNOMMRGTkWLXeIrFQonZKcG76G3XyYjWiBODYai+1taOgyNdfhhNhCVwM2wxa2zOnEVbpJhS42TXWblcmr3K49Cghe658gsBdZPy+QW5uoThe9t4+olPHOneOhZx6mF7TY34dBX7FSGlBt5VzfLXH5kkenI3n6aUOwHnnEXK+vvw6b7YBh+gk+MbnE+eo2V0YrVO2Q3djYP6+J2+R+FXEq5/KGR0ydq+pRzuorOK4kkCm16hzZvCEKr7xiSEGrZQhOnkOrNGEnk4RWGZuY1wYnD+e3vbhGI4gZjTx2txTNB2aGgEqBGo3ILz9PmOwgyutouwqRTd7uEg2uobNHAcdEZzNFHufoSYpwJb5KsbZvM26sgi5xpfwQWabx0xFWFhNZZYaygd2oEOoS5Wn6uihbf/RRc661BlvFNPI2O/ohXuqfYZj5XKhus+CPWAs66Nxje99htJOzP+2+XhAWu1FjM36cheUQX84jXIeBVWK+Ze7RQWCO17bNsQvBfbUxR++ZSsGNm4bIjkdV0soC9Ho06+DHOxCmnHC28WSKHgyMg/X8/D0lAO/2+1NEkgahzdVsnZG3gFhZQS7Og5AsLZnI06zwQ5J/B9KEAvEgZitsEgYnYDgVR8uchjvtUnqfvm/HeHf4wKcxfdT56D748z//8zv+/s3f/E1+8zd/833aox8ceJ55tO5aRKTpjPiE4Yz4jMd3rlbA3Jx8f6bz+V4I+I7xznGvULcA5uo5zVpGdvsG3V2H6kd/DKUlvZ45555nUi6WZUqGa3LIbRWSuDX6iUc7LhMrl3lviGelnPD2cNIuMolYXzfXy/a2iRD2aHBQfRJv7xZrp+Bkqc1a0GYzbNGJSnTaDh3/IbpJk4Ugp+kd8EJ3nW5SYYtnOHtqm7W126jtHdyf+SmsjzxmUmtAZbTJ5T+7jTroYGc5Dzmaq+7D7IerfGFS4uJFoyl58EFzfb74okv/YIkvbsMzZ7Y5XzEkpykmbIUN1LjNiXWJfdFHeXD5skOommxXn0A1I06uwcpclWxDsrdnvg+vvmoiRidOGD2T9D2EJYljQdmWjDOPBW/AQVTlxmiJh9xLbMgzdMKA0zWzcJhMIE0UcncLHUakqyeQmYPKJMKy0bUmeTtFqQw7sMhGEWk3w8vHJIMYy06p9HdQgwE7wWO04yqhKKFswXxpQM3y2MtOMAmrBK6FG5hzOxqZSFe9bn5u3FbowZB1f5u9rMzX9s8x0p6J3PhDTpdNWeftfp0+gm6/hJxaWYzH5tpJU2g0JI1mmcHAmLHWaobMWBaH7S/grVGbe17DGjY3FFuXx/R7CiwbHQTMPbBA8NpNdBiy1ujj7t6G9gSdZVAKsB9/BOdnfuq70qnkG5uM/uDzXO212BfPoCYTdBQjsm1qO2/ywAOSub/+7D0/491IE4qIelEFqftVUllDxMZipOUNaTij2cbfJsV1jHeGD5zgHON7D8cx4erCjr1Alpkb4mhkblIF8QlD89jbm7228PIpdD7HXj4fDO42mkyVRS8pU3MmeFaGPd9gvvM6a61HUHMLbG+bc1uIjrPMaHPqokpuBSyKfXNTFTDOMg7iCjrNaKeaei5ZmgQsTcWiRQf1dluy+8AFosGQKzci7HrAyVqfE9xmedRjq3aO/qnHab+W0uu6zJVjzlT2OIiqdJIq10YrXMtaPFp9heW11cNVb76xif7c5zg7HNGrn6ZNizxJOTV8nUGyz5b4CC+8UOLgAJ5+2mhLymXJC3mV7VdivnrD4rGFTc7XNrjanWdu1GHHOYG9dopTpyWawgtI0h56eFUPawQnm2ZiLrqN27YR6BbmbO5chbmWoL2X0XMDqk6I0oJIOZBrvO4OsvYYI8o4jlkg9HqgwghrNERXqyS5Zpz5h6agubYIvRJqonFUQrbfReUerqNIRRnX7rEyeA21t4/yh9zWZ8mVxLMSfF8wEXOM4hK2yEm1hT3V8g2Hhtw89RRc+UYbdekyp4cv0Ys1Xxo8xVhkLNQOWC31OF818oCrw0VG3ZBO9TR2KTj0+ul0DFnzPDMW/f6soWi1asYyz2fk5ty5t18Q7e3BrZe7dN7YJR+M0UpTcyY0FlysixdYfWYR+w8+i9rchCQ1b/I9rJMnsX/qJ75jcpPncOum4vrn+iSdB8FzodellA45Y99kzu0ien143SFRfdyf+5/uS3LuJ01IU3P9DId3OhgDlJaqNE5rnFsvI+ffWn37dimuY7wzHBOcYxzCtmc9k44iz+9d0l54+RwczF4rhLmpFamuokT0GO8P7jaaBE0vKU8rW8AmYSG5hBtGh9VTYM7b5qYhOmEI250quX+W4f4mlXkfx1Kc5TLdTsJgLOjmdba8Grt/esDCjqR6cu6wEqvVgtYPt+gtPMrGV26SHXS4PnARdsDJU/Oc+8TjsLzC1f4NDjb7tJ0VOkmFljvkQm2Lrckcw0HKK42nuXapxUcrdzYglSdWaYmMutrn+ngJ6bnU9g8I0tfZrD/Jxoak3zckZ2EBPv6zdV70FNdf8Xj+wKPXu85DpTe5tfYRnNOn2Y7nkBsmIlN4Cm1umoosxzE/l5fN96BwLX7lFeO5c+aMIUTWuXVk/yZZd0ytnrCvFvDUBD2O2Kmt07y4yAATMSt0biLPEConsQOGmWaQBES5YQC5lkx0gEZgxxNSLcFy0TLHloqSn9M4Mcd+b4XJfp9+zby25Y7wZMx+XGecONiBg+M7uK75Xi4twUc/ashN/s3nWUpuQqvCF/MfYuzb1EebnNl+jbN+igpaXO0tMOgn9O0l7LVlnGxCvJeyNwioL7rkuaRaNeSmUjFRmyCYEUAwC6ejPcruhV7PmCLuXx+QX7lJFufUqopGECOznOX2y5S/9g3Tcb3XhaAEcz5IiYgj1M2bpP/1s4j//X99xySn6AN35co0cj0OyXoTnMChMbyKjIdUg4yW6COQ6GoF4gR10Pm2gt8izVak/3evcodYHMw9sNUyY2Q2IcntJ0i7u99RiusY7wzHBOcYbwvLmhmFHYVSd5a0F8QnSWbi1QJCmO0Uqa5jL5/3BncbTTpScbqyx17UIMxc0lizqU+wt1PDio0wuDCRPHfObGM4NBUymbdA7+sdtvZMe4H+wMaLxtT0iBV3l07rEYaDTfa+GXLQs9ndrR1OZnNz0Hhghfr5JcLNDjdvanBcdmpVdkaSlTGc+6l11v77/+BWr083WOUgqtIe+zTjbc7XYjZPXySOTWPNCmMe3tjFmUamMiXpJhXOV7bpJBXatQpub5czDw7Zj+qHnjjr6/DEE/D0TzcJluu88WLC5fwU4arm2Z8usbklkb1ZX6CTJ81kFEXmer1xw0xApZK5RsvlWTr3lVeMqNmyoLQ6Rz0T9C/v0h71kXmfhjPhoHaarTMXufBQncEbJqVV6DZKFUksJWGssUsKgSE2AlAIwtRDAZaO0dIi14JE29gqpWzFWJakv/wwtw7mUFmGZSkq9oR+5DEcmR5PVr2MZZnoQbls0kO9rqmYKsVtKqca/MnOEwzSEqUg4UJtwtmbV8lvW1wP1+hjM6otYdWr5Fs7hIMhnaxGwxoiR2WslVWSxAiJK5XZ4qcgN28XtRmNjC/T/r4xdo029imnY1aXQCBYLfWoODFKNcj/8pvowRAqZWS9BkKQYmOXAhgMjfbsL7/5tpVGw6HRZ92tO6z7EZbYw3Y88smEobfAUDiMqbLOVaRlgcqR1fJ9Bb9ZZo5lMHhrlCYIzPjfr33Je1l9e4x745jgHOM7hpSzaqyVldnzR718RqNZNUiRDul2Z68tvHyOlrQfE593jnsZTboy50SpjdaapLdLZ/lRoloVpWY9h4q2DEWH54sXQT/Q4qB5lt2v3yB+8xLdpErXP4Pl2bQaGq/icsIeEe9cZtTN6DeeYntbsr1tJrh6HU6ckJROzvPQtLnirVuG8G5vA6zR+MSnWL/0NU5uvsjteIE2C3RrZ+ivrbBwskalMm130Fd8efgES37Og8EWsXIOI1ML/oD1RptrYw9HJ8ZosGI+6/Jlc309+yw8/rjE931eegk2e/CnfwY//uOziMrWlrkuT5ww1+jzz5vIxI0bZnxOn56W3+dm/13XbPvsWfM57oLxtpGTCc1SRCcMiJIAryQPo6C9nhlbALdeIq1VGPWG1AIFQqO1NJ3ElSTMLBM5QKKkjaU0SW5hWYpFv0+mJLpWZXd8GiVcmrSxRn0mep6xVcMueThSUikrJqGk2TQRp93LQ3Snw+pCxue2n2ZnUqMsQh6sXefCfB9dfZiruzU6Z54m8epYeYa6dp0wksRui2olIRCCuDvAmYxwH77AwlqddttEa1z37aM2UWTK+adFsoQhlMWEk9ElZM3Hkg5aC/RUrS3CEJ2mkCSE7io3xKM4JEg058VldCmAyQR17do9iUcczyrvjsL3zT3G9yHv2nRklck4oKx71OQQgcAmQ6KmvS0sKJXQg8Gh4Hc4NNvNsru+i8J8D1qtd27L8X5X3/5VxzHBOcZ7jqNePkdRpLWKiM9RL59+3zyObgNmqa5K5QfDy+e9tmV/O6NJt1bh5E+cxzphPFYODsyke9ikc+fOBoALjywz1xKE+1/jwHYZuFVu50tcycqURhFVu0q97GF397n4bJe+aDEczkwot7bMDX552Ux6Z8+az9raMue85yzRe/h/pnShz/nqiPO+z81Ri4O2PDQinJ8HGcDWJcneyGcvfoiz9i1abHCg5tmjikg8TvtXiBctOtqkk2zbEJGDA/jjPzY+OQ88YEjKN75h9u/zn4cf+ZFZlaAhXrNu10X37Nu3Z5b/aWqO5eAAXnwRfuzHZte270siKgycChqYrxlCd/u2+YwkmU2oWkvU8irhaBvalyHwybVGqAw1GJOxBtJCaEGWC2wrI9UOjhWxHPTYiZrsjSrkuIhymaVzNsP9KqODHCtJsIdjGCsOumVW130+8pGqqZpKEtb1Vf68/aNsjX1UlvJg9AVOt98ka1e40XiKbV1HiQpWEJBfukQ3DHB9gaMTPBWSuD5OXVEdbuJsQ7fxUVrTqqmzZ+/dCgTM2F2+bK6zopChMGvUByFprhCeg55qkRxpcjs6y9iVq1xp/jjarSGwWWKbJlOn0ylL1VF8SDzy3HjV3LxpossFbNucg0rFPN/vm2hdtVKlMe9Q37yJYsRytk/VMdvSU/GhaDTIpE1HLBPu1mDWhWd6/k2UpmhU+53gg6q+/U7w/dZW4pjgHON7BiFMuLYI3R5FFM1SAYWXD9zfy6dcnpGf7xcvn/fLlv2dhroty6yyl5bM+BZpxEKbsL9vxrYWaqrSYnlVsCK7WEOX/Sglzm0GacBOdoZydMDwuqS5bgjJ4qI5d/v7hsxsbRlSMDdnIiRr08MzJEYycZtcjZs4Cs6sm8eNG7N+T1Blfl4TbW4z1CWuJC5CzfFQ9i0mlUUSGXD75ONUvAbn1kzao1IxEYv9fbONr37V7MfTT8NP/qQhOfv7pofVM88YgbQQhuQU6aokMZEc09dr5vocx+b4+n146SWTqnrzTTOxhaH5f7Np3jeZmEhZ4Z1TuEx3u5C6NdSKjZV1YT9CJQla5NCokY9LiFyi8MjTHEtoJJqqExJYKZPM4cZwHiybSk2is5xwf8QkreJaCukIUDmVuM3K5hZ7b34E2Wpx5ozgW3/+IDeTJikWH9Nf43ywCZnP1dESu7EkDhxc2yYbh3Q7UMr7RH0HpQUQUfLGzDcjesE8VqdNwxrSaNTvMCM9iqJT/eamIY7D4YzYSGnIiOO7KFugh2OWgh41PyEXJV4frLE3PE/m70IU4+mYB3mVBr0jX6apyMXz2BuXufqlO7t2az2rHgND6jc3Z881myClZPFHTuP92avk3cvoSYpu1EApxpHFgf8wefME7IXIpUXsahUhzHtbre+f+857he/HthLHBOcYHwoUXj7z83c+nySzVFcUzXQ+RbXXURQEqtD5fJi8fO62Zcdzodsje/V11O0NnL/5N7BPfef90d5tqLuI2szPm/Hs9WYeKb04YJ9ziHZAraI4U97nbGWXg7jGflyjM/JJJOwNfLZeNRGSZlNRzoecCiIifIZU6XYl3a4hCq2WIRTFZw4GhlikqRF9FikhI+KF3V1J2z+BCvs0wuuMy4skbsBr8qM4wyEP6lfpP/UkUWTM2FZWZpVhKyvm3O/vm9V8u20Ewj/8w6ZL9s2b8KUvmf5WhSN0EWE4fdpccy++aKINRSSnKHe+dctcp92uOaZ2e9ZjKQzNe4voQUFwNjZmK/w0Be2XcB57BufmBHFTIn2JWHTJX5TGtdgLyNOEPI3xrJhFt8t4mDPamzAWFXI3YHUVem8OGKYewrYQEjwZo6SkWVFYkzHq0hUWfq7Jzf0GL6uHiLXkI/JFHpRX0EhuOBfZtpdIQo30MhLpMdnuURrvE1sB2rHRWJRVl9Zoi/1kkcWFNqjYtEZYfmtrBK3NmBWuv6ORGa/1dTOOSWLGwrJADRKWJ1cobV9h5LX4avA0sV+Dsg+uQ4M+FydfxhXOoQbHfIZmGNpc8n+UqPwA9lYDBKSpwlMT5kshwnXp5VW2t0039UbDnEPLMtfhLLq8Ql76n4gcn/2XtukPquC5iGoJUa3CJMQr26z8yClqFz68kYr3G/mt2yS/9/vo/sDcV1ZbiDj50LeV+BBNAcc4xlvhuhy2GjiK+3n5TCbmcTT3Xnj5FDqf+4XT3y/c7VXDcIi6ecsIKPOcbGsb9X/8X4hf/v9hfRck592Guo+Gm+uBT+OCIUSDfpXbr8+R7+wz8OYYpkYlGdgx5ytb0N+ifeoxuhc8uj1IugOuv9pDhmNKesS826c8X6F58TxJdZ79/SOmgA0z8a+uGm1KFJnJUOuZ9mVlBc6cVly6cpOdwKbrryOSlFq4y8hukVabvCJ+jMrmhPMfVfT6RgckBJw6qbj16pB5kVBueWwNK4xGkj/5E2MI+Mwz5lp480144QXjSlv4gxYeJevrZiJ+5RVD/DY2Zq0GFhcNEfzWt0xUqNChHByYa7GIEMx0RybqU/Scy7Lpyl9IRKWC8gBv5lgsBAjbBU+SpyD1gBPjN9iMGlz3H0aoMp5vkXV6JMMJEQt4QpFrwSTzWS51OVfZJZQVRGefaKfHl75UIcbnwfw5Hp58jd3gFH2rQVvPk2UC7diElNH7I+z2LgNRw7YlSjisc4XU8siDCvPhJtWDiKVFdc/WCDs7JmpTtJRxHDO+lmW+r55niKcQsGZvYb36R2xS4/nm/4JWGoQFk4jTo9c4Xe0il8vo+ipqaxvV6ZFUmlyVF+nkdZQNslRBV+rk45CFaojYukZ3P2MzswnshOaCg33xPN5ii5WVt6aRxmNzzpNkDZ792+Srt5BXrsFwSM0aMu/exjm7gv3M41gnVt5yvH9VkN26Tfx//F/orS206yH6fUStilxbRZ74cLeVOCY4x/i+xP28fPJ8Rnzu5eWzvz97rRCG7BQRn/fLy+cOW/bhkJ0rQ/r5Cra/Ss0aUQv3EFtbJL/3+7j/2//yjlZC320u/NuFm2sn1njwZ06S/uFrjHrb7JfPklkekzHc2LERwQOULj7Ew49I9PYWt37/BQ5Cn7E/TyzrXEvmcTbG1No3WfyYYHm5RZYZ4ln0Tbtxw5DWkycN0ckyQ3TSdCr+7Q+p965z+pTmWn6avUGTg2wJ386p+BnDyGXUjnj56xMW5nICKyYbTLj6lR2C7hZuNiaTLU7PtdisPcQwr/HSS2ZC+8QnZrqcW7fM9fLMM+Z6KCI56+sm5XHpkiEvQhiCpLWJ2HQ6hiBduGBSH3luriU57QA+GJjJvPCQKSqpimrCgtAU0Z5Cj1acQmHb4Jbwag6rP/mjXNmrsf/1KkmiWM9u0buWMog9hBWhUOQioOynnCh1iHIPnJzG5ID/94slwhjWrZs8earDjb2Pshs3iDMPhwTl+wxlCzsakm3twCRBao2Kc5pOh4HdZIVNpNCctG/j7O3C8mPm+lMKIU2U7tIlGA0Vo3aE1Blry2DXK2S5JAhmjsZnzoAUipf/z032Dh5FNCuIOMEdd7g4/EvqWQcdx1Bbxf1bv0CO5Mrvv8atTQuVppCDshxyv8Si6OFufYv+7Qq7kcJ3FM2WheUpnCxiofMq5VdfxTn9SazSGnluCGm3+9aKJ9eTLP7IGcp/7dR39L36ftOmvFPENzfp/t9/xmDfJax/DFwHlKLR32J+cgV54fyHuq3EMcE5xg8ULMusoOt3Rc+VmqW1jpa0x7F53MvLp4j4vBMvn293gyu8avBc1M1b2KlElxfJhKBDQCeYRzHB6tXw//QG8z+3Qr0h75vj/25z4e+4i/GnPkn169+kvHkZHaYkVpn91QdJzz1IWm1x47oi+9otRJjw0OkMKUdsTlp04gpj32E4cGk/F1I+l9PwQhpeSOC5qFKVTlceipzrdSNKPnPGjPPWFgz2EzpRhW65SdWNObm8z9fbF+klVcZRTtmaIJMx6eUb7KQRJDHL/dfBdpgsLhI2F1lgn/39PU4MBwwe+Bjb4wa7u/CHf2hIzk//tElVdTrm57PPmvHZ2THpjLNnDeG6ft0Q40uXjGNyQcJ2dw0RKirS9vfNtkxzW4UahZREyGQcMJkECCGRcpaqKaoLwZC/4vfip5SSuSWPUeMkG1cgT1JkEqLTEbmsklhlPCKyxMK2JizKNo4wjGlJ7PLfor9GZFusLWc8vfs617Oz7DXm6YcuVTlCizrD1Mcdt5moMj6a2PKo0kfFKUmmWXM3qDt9FifX0d0uJClqe4fkd/9vxgtnuOo/xkBVGQ8ydH/AYnwLO4+Jb3n4Cx7uAxewGy1OnzbRum98A6JOSLYnEIFHxY5olUesrvVxo1V0tohOMnbiOrdfbZK5Fbi4AqsT0l7MnDgg2L1OP3MZBCu4rqK+ewk56OKVLFbdgKBu2KSeazG63aXzpzdQH1sBMSMcQpjrbn7+7lT2uxf85rduk/7FF1Gb2+ZGU6lgnfhwa1OOorgPvkUDqRXZ126TD0pobw5cxzilWxaVikYPErNwu3AOnaYfyrYSxwTnGH8lIOW9vXyKyo5C56PUzMunqAwqUKy+C+JTKs26cH87wnHoVdPtoQdDGp5LQ1zhOuuMqZEqgW3ZBJUq0V6f3StD9qYMrUivNRrmM/XWOyQn98G76WJc6HrU3j5qexcXTXVlGbnYJMth+3XTRVxU6myFHsPMRyO4WNtCCsWGVaHT7RJf67MzsbitJGUrpDUPrQsnSCst0nRWQXf1qplwTp+G1Yuw9dyIblJhLHzGqU/LGzHvDthP6nQGLnboUWWAqtTQUcyWcxopBEvtHXA82uUlZEtTaV+Hmy9SufAw1/bKRKHPn/2Z5Nw5+Nmfha98xZznL3wBPvYxMxw7O2bMz541EZitLZNy8v2ZUHt7G557zpClohw+CECMBrhbewwGMZbokgmfvW0f79QJhGgA5prL81kU52ivuCKa43kmwnVwANeuKvIoZVEfMHTnGSUOlkiIlI1tSRayDZbGN9GNFVrugP9+42OEVoW5BYsf/WmLq//Po+zsSkZemaofkaoyUW5jJ30GqkbJS0ncGtV4g8QuU5MdFntXOTd8FdfV6Mm0R1K9RnrmAq/1lmm/qAn1TXK3xEJ4E1dm6NYipfkSJCn23hYn4jdoB5/iC1+YVRWoNKNBl1IlQEpJpozWZ+C2eCM6SZhZ6HCMGCsmETSbkvmzFXrdEuGr+6R5lbkFiSNHeOmAuclreGWFDmP626fZtddItQsadNBC7/VxBkPc+TqLi7N04XuB9LnnSf7rZ9G9Htp2wHEQkwn0+x86bYpS5rorFnrFve6+GA7xu1s4DU0+6qFzn6Y9wCMxvddKgUmz9/of2rYSxwTnGH+lcbS7+lEUaa2C+BSTTpbN0ixgOkOr515Dh4Jq4zSVZkYpHd5BOOTqCnJtlezV182sZtsIwCEn0GP8OERUyohqCdXpMR5kJPm0g3Qw25ftLbOiUt0FSovr1J2QqggR9yAn9wuP393aoR1XCHOXpjumbMdvCTerre37kre1umJeXEE112inDXajGrm2eKF7BgA/7XN270+xW3PstR6hrZvEsc3tXQfR7lE777K4Xj1cQQ+Hs9L1WrXFfGuV9d2XmNTPshc38aWx6m84Q+r9q3StZbrlszh5QindJ3Qb+CLiVnqasNekJixW2aY3tPC3XiDZb3Mu8Nn2z9IuneLq1RJ7e6Zs/JVXjDj2y182FVLLy9P9qJk0VNGq4MYNQzykNGRnb8+YyFWrplJs+8qQ/rUdqlmPkbVAWhKILCM8mNCavIC79hSZrB46ggOoXBGNMtAWKEhTgZTyUMNy5QqMRyYl5Fg5cRwR5QGeihFK4uUDVvLbyGyMP9rnT4ePMqRKpeXyUz8luXoNdkrnGcgOpajLKK+ChCxRqEji2pmp7hI2sVdnaXKDdW+DpepttGUhbBsNZE6Jy42P0TlYIxymxNplPt/GHW2iLZuSGiF6IUF5jsVmwiV7jS8dnEB+tYt10TStbLXAq0B6PUWkNrZv0c9K/OXBBTMWWjBKLHyZstwCX86+c01nCOE1SnMWS7UQrQXjbsSWPMGus05mW+hYURmUaFYyEFArpTSj65SXH8M6/VZR9HeD7NZtkt/9LKrbRcw1kY4DWYYejVFJYl7zAWhTCnH3aDTTJ94PxaKtSCEqNSvmUHHCJHFgbgFd0ejRmKalZo3TLQudZ+iDDtajD38o20ocE5xjHOMeKLqrl0p3GpgVXj6jEQwGivDyZdQkQsy3GCEYRQDz6PoZ9EEH+ScbNH5mhfIjz+Dc2kJvbSNCB3yPk/lV9CREeQHjpccYqQmRY+HVbCpH7sVKTYWbexOs/ZSg1CDKLfqjMhqBJ1OEhKBSpnZ7j7n9NtbSvcPsd7d2iHKHMPMIM/O3VjluuMdSO6ESb5L9v/ePFtkffwbhOthJyHIJloMe/STgymiZUeoz6cW87n0UUVnBF5IzpT28esbOpEnnQBNvC67pMlJKajWTFipuroOhpFd5kqu7FZobu6wvbCB9h41Bg6C9g84niEVNs7xPZ+jQZgFPZCiREtkl8lTRG3gMoybNJGTBDrFqFQJPs9K7TDPZ5QofY0iJz33O9GoKApOGeuEFE0VaWTFRmnLZ6G9eeMGQsEuX4KGHDMHpdIwG59lnDVFJtvbx0xFZpY7ILTJtYzkC4djkgx72/jbRYpnJxBAYFcdkvS7hxAcCRJajMwmejefZjMeGQKE1FXoMY5chFRCKWJYJxIjlyU0qk22wJM/3n6ZjL+LXPX7ykx7Xr5tjmOQV/FVJfy/AiweEysNnQmL72IFLLjwCK+FEdcDjk2/gDKblT3lOluZc9x/nhn+RQT6H3R3TCvdoyAQpoBruIpoNqm5KPu5x6eAcV1UDgQA/g1GfpfIQu2kuauVWSRqLtHcV1CpoBJPURwjNvNdDDhNoNBmJCnNTI8FyGebjIW21z4Z+mFd35ggzF1dNCJxN0BZCCJrZDuvuPvWqjRCgJxOUI9CTCfnN2++ZPkYrRfbnX0T1+2RziwzdeYbUKDsjFmp76MEQFcWw8f5oU4qCioLIFAuw+6EgMkXAtnh9sWi727QQQHoujqsp06e0rPCuXUcMI3Qw7eQaxYg4RtRrH9q2EscE5xjHeBc46uXT0m3i4bcQy1VEKWGUerw5XCXKXTJt4fgB3t4AdXPIqLWCevpvkfb/At3tADYlEVFpKqqrNZo1qG+8ibV+BvdjVRDmxtXvz7ozB5WQTPUQfpNcCzpphW5SxhIKT6Y41HEnVaovSUprs3Ra0UlaiLe2dlgrdUmVpJdU6KVlSDNiGbDVL6G+9iaqs4C7eJ6WM3pLtCi/fBWxuoK6cevQRbnuhjw1dx01GjF4/RLXWh9j6DqEueS5jlmlO1bOueYNytG36LQWjIZjbFbpjmNSQ6USxFaN9NwF2ltl9vdGVBiy5u0zt5KyQRmr7iBEjPZSQgQhVSaUCcSEUjZgHFtoJTjwTtFVK5zUEdIG3QK33eFRXua69zHGY8k3vmEIzQ/9EPzlXxrBc7Npqr12dsz5LkhOFJkqrEcfNddEUVV1frHPSnKDHX+VWPmUrIh+5FKTQxxLMSotwGBAWklQgY+MRujehDwPSakeuvhqNDKeMB/AYFCj3YY0l5SSASNRIZUeWkBASEt0OVk+QOQuN/wH2Sufwy65/NhPSW7eNDqhODY6k3FUwl32GXQbNMopYdhAdHvk2FSdkCfnrnGm0kbVTqE2t8g7PW7pM1wtP0bPXyHWHqWsx0J8C8fOqeddwtynFO2zr5Z4TXwUyhoriSnFUC8lNKoD6PYQ6TpRZAS+Skny1gNM2lukfU2rPKHi5OhUEfUS5koRpSfmqZ+RzM2Z8b1+Hb6x0SJKPoEY2AjLxpEZzSBmJdxgqfcGjm9DnmIHDyOEjdaafMPYVSd//KeQfvfeLUlivpPdG0Oi2z6q9DjaMVVqDikeEQiBKAWoaTnZd6NNuVcD5PtBiCNVepjXFoSmIDL3ek9hhFguz0r4AbSqkmxo8uvXTPXnhXXyze3D6k8dx8jVNdy/+T9/aNJwd+OY4BzjGN8h7o6GVJyYeW9EkpuvlVY5Oh5CkppwsV4gefxnUW++iRcPSMoWYdWmnWTo6yNE8BD28kcpb8hDE8OjegE1B73n9xlqydBqsuT1qVgRk9wj15Is0fR0g4O9EtbQpFI8b7YCDgKQsoXffJjKzpuUT/hIKXCkYsEfMO/1Ub0t9JmzjOo5e50OolohUzbbkwZvpmtYIqfqhszXLKqb27g/8cPQ7rzVRXlzm7KY8JGTHazG64SpzYv9dbYnTdLc4vXJOjpaxd6QnH1kZtA3HJpKF6XMphpzNbJaBTEKCfOMq7bNNaFo9r7KGbFNz5lHl10WvDG9KCF0asQ4TMQ8QT5CSkGoHCK3zuVkmaCfcq66i1stk3e6PPDIkG5e5/JlE+nodEw05hvfMJNxHM90MI5jSM6LL5qI2htvGI2O75v31rUmyMfkroubTeiENpPUop7tkkuLxPFxpCKJFUGgULc3UVGJHEUuDbHBylB4uMSs9K9z5crT5LnEdxRjWWeEuSAE4BGzxiY2mh37BLfd82BbPPusZHPTjGccz8q0TfRRUqpJBpGD7Snc0pDF+BafWN3Ct415nqhW2Fl4jNfyFh1riQiXIB8xn23TEF3q2S4df42eWGTDWeea9zBSSDOhIjiVXqLsziPdMuPQps8ysleCxEQdoggajRrVB0BtbZEPFc14E8/OqZ8uU37yIhtJixdfvMvryvao1CQLo6usr8Y03MhEabycfCJRnS5ifh7tuTCZGHLTbiPm5pC1KrxLvVocG9Ld799Z7RaGMNrSxNkyeHVcnVIWMWtsUGW6w8WgS/m22pSjUeHR6E6j03shz82jIC9F7y+t7zQfLP7/7UjMt8O9XNLlhXPQ66PaHaxaFedv/o3vytri/cYxwTnGMb5D3B0NAThdntWh5+OQsZ+TLFpMpuXoeqGOWjyHunSZtN1j0smJZIm0uopcW8HLa4x2zU31aNNCU9Lewl88RX3zMksnw0OBcKIsBklAf3tCurJG+oDHeGJuxkUbjN1dsw3Pk7ilJ3Eo41/vU65aWJ5FVfepDDYp1yo4H38SPw+pcgXZWkEJi15SZidqAC7DLGBTNWC8RnVzjfrDK8zdfoHa/hVENnVRPn0a5brIqelQ4GR8fP4yAFFu8creCreTJlpYXLs2O86VFfPo9czN3vgZSYKgTLlq5gxQHASn2NsdUJlzOBkcUF+Y0NnYpR9V6IkmY7dJnPtk2qUkQoZ2iSh1iXOHV3onqdljzmRvkoUJlTmTonrlFbNK/upXje5md9eUe1+7ZkhOcU4efHDWvHFrCxoNRf8g4+YteFinrGQ32BxUiJnHlhljt2VSTNEBQiuyWJF3++j2Pto+hZIOORYgUblAkONZKXQ73OjnZJmkVskIJw4qlygEvk7wiGmoNgdxhUulj6Asm49dGHJwMMf+fjFWs6a4vm+ei6KiSlDy0cdsTr38Cmp7hG412R/7vLKzxF7aIJZlPAua+QGt4XVqpYxdscJe6Ul6zjwCjZtPENKlnHY4Y11CSk0iYCdbQE1cGIyIqvMkUUBJznpBAcydqRE8VKGc9tFRzM12mathmfSVWarDtk0k7dQpOHlSYu35pH94C7U3glYTXZjrVKtIIRG1KnpnF23boLUhNw+c/7ZieiElYTgTuxfkIo5nvloFgqBIWwuycheyAU7Yp1pKqYgZG9NZhshS5NrqoTblqMj3aCXnvZBl5vOL3fb9mYWAZb2VoJiefopS0qckQipNB3vpu0/FvcUlPU0RjoP9yMPfFw1BhdbfVkf9A4nBYEC9Xqff71O7u6zmGMd4h9BKkXz2902jy6MVSRi3VbWxZVJOv/Dzb7nRRKFieLvHsJuRCM+wn2kZayH0iyKzWoSZqNULO9gvPQdRhKhWEK6Nk0WURjtUq4Lqz/8k1gnj+VG0uShu0uOxmeDyHMKDEeFOD8IJro7xrAy/4VM5t0xlrYk17hN8+XM06hq/Mo1IaRhlPu24QnfkMpxYiIcfQZTLoI1QyJMJtYZk+VyZypf+EG7e+LZjI37u53n9DcmVKzP3/QKNhkmvFeH5aBrpDwKwoiHZtdvY6RirZNx855Mt5rZfpkeDcXmRg6jM0F8iD6pk0kVoTaxdciVwibF1xonHGpy6WGEwMBGzjY2ZpX9RKVeYRq6szJyKez0j/tV5zrK3hx1FRMphJb7G2uA1rpafIPUqxMIlwaOiRzTjbbQQ7Mw9jptPKLdvsl06D0JwwDwgsXWMQLHGBqes23zD/nFwbAInZzBQaBQWGbWsx9PRFxlaTd4MniDD5WHnTfIHP8LBpEyWmX11HDOZVqtmDB3HXEfLy6bxqO+bKsDOl17hhSsVtsc1QlHCsTVBIFgO+vgHt9gZVeiUTjCym6AU5aSLEILT8Ru0nD75KGS/vE5medi+TVxZYBJJbEdQObeEN2fus83mrDfXaGSqz+7u8l20cjl/3uiy7rZouKNqcTrhyrU1rKefRPq+iaxOJiR//KcklTkGdoswd1ny+vhWQpi79IeS4Uhg/8gPo6v1wxYbR9s9FN3kCwFuUZBQr0MpUKT/7ffJXnsDhkN0UUJnWSSZZNTPCRsnUJ/6G4j5e+tvChKTprNqyeKz7oVCR1NEYsrlGdF5v9sofJh8ft7N/H1McI4JzjG+C9zpKdO8o9GlrFZw3mWZaJ7fWcp5dJVXhLInOz3G13ZQgxFCK1wrI5grUbp4EnfRrBaLsHSR6gpMgOlQ1zMcQjhRjPYnJGFOhkMsfOJk2vTQUbgb13AGB5TnXMpWgmPl1J0JVXuMs30La/0M2V/7edodSbttyFSWAVqhJyG638fevEFFD1hupTTLMXYa3ndslDKNGV97bRZ9KBAEpqKp8OvIMtDhBKe3C5MQqVI8kWLVK1SWayyfgPHzlxl1E9rlU/TTMomyybUkV4I4EUjHwluoY9uSixfNxDUem3F7+WVDBhzHkIHClbhwws4y2LkZsrkNGjjh7JNKDxWnrB/8JaWsz+25J2lbi0QEnEouUbZjRqVl4swi04JKdMCuc4pcSHrMARKhU3wdcWH4HNvBOXr+IoGVkTkBSaRQCMoy4iHnCrWswwvqcTLpclZdQTQatOtnSVN56LGTJGZ/w3CW8vzoR00TUjDPP/883LyRM7rZRqYTfA9WykNcK2MrbHIQlsmiFNA45JTUkIuDr+GR0Kucom8v4MU9YisglBWUV6LiJJTqLnJthebJGqWSIelFz7Oj51cIQ2bX102k5p04jd9rwk0zeVjhmO3uk37payTNRULlE+Uu894AITRh7jFJHbLhBOvBi8h6nVLpTjJjWeZ6q9fvbQCqNQyvbNP7H19j2EnIE4WKjbmRznNkuYT9iY/DmXNE0cwGoEgbH43OHoWJxNxJYt6u5cxb2sBMU3Gq3fmO7kEfdhwTnLfBMcE5xnuJ+60o38sQ7lEzLtOXS6H705Wj45J4VaLYhNoLvYBlzXp8ed5sFSrlzL25VDKkqd83E0MR7Ylj04JhcnWLOJFI18GywdMRXjykEmRUnrpIabV1GGlxXZhc22LvK5fY28kZxgGpmtaeSgukxLE11ZbL2tOrLDy8dN/JTGuzun/hhTtX1WD2v14v2nUodJQgVIZ0LJTjUauZCiU9HNC48Rx2NCYpN9nPW/SigDCRKOmg6k1i5eG6M9fhRx4xpMD3jS9Pvz+L5gyHgFaINKLuJcS3ttkYVelgVuhLbpc0k5Qne6yPvsWWf56+O0co67h2Tt1NcD1NPhwxoEZJj+nmDUaiRiwCw5TIqWYdHu//Bc+3/jq55WITExGgsfCZMJ9s81DyTZ73fpRMuKxkt3DLLu25B8lt//BasW0zRpY1qwb86Z82x5YkRhh99erUZDCN8dpbLJcGxJbPxmSeVNmgQQiFjCeshFc4uxQykRW2ohZ+0iVLFOM8IHPKlOd9agslZMmj3pAE82WSVLK/b8bxKDzPtOy4cMF4H72dkebdyLLZNVuQpTCcaXzUaEz82mWE6yBsG1uaL4UQULJigmyAHY9wf+yHCZbqhx5Zd6d+3k7kq9pt0jevEh6MiGJBgkfQ8Cg9dIZgtcW9ghzfCYm5H+6OIhc13EK8fRT5+xXHBOdtcExwjvFe44MM4RbtKY42IC2+1Ukya1NxNAXkeRD4Ci8ZIvME6bmIWpVqzQicHcdsq335gOFrNxkdRESZQypdkqBJ1lxABCWknK1KK6qH9/q38JM+jTlJvZyio4T2Qc4+S0xOPERWbU7d8MzYFGRredlMePdrl7GzYybko+mMwiSvUjE39CKdV0S9HAfK9BG3buOO9qkwAt9jP1inV1pjokooNUsV+L6Z4JpNE+HodAxpe/11QxDsbER20INJSJ5rmqObpEGVm9YFBtSw0CxYbVQcM5fvsZre4HrtKXbVAkhJwxqyyB5ZmnLACq6VMUxc+tTJ8QCFAFrJBrWsza3qI1gINJAgsIEKIz4+/hzfdH+MTFjUsw5lMaLjnySvtxCuRxiacQzDWQTvySdNz608N6Tx0iUzYRdi7qXSgM5r2+xaK2S5BQJ8K8WVOQ/UNqkkB2zsuFAKsMoBY10mySycZERdDvHOn6Z+bh7Llm8hHgUqFSPKXl9/d21RlDLRwV7PnKcihTsez/4uRNRSTvuGodCX3sTv7VCe93GkBgEVO6Jijwm2r2OfNRO/1hBudRh1UsYqIAlqgDSRyIFZRCjLJfaqxLE8FG4XiwfHVodpKuGa75GQpnHqURJzv6jNO0URwS1S11EEyUGf9ItfBj9ATAd13h/QdMfmPZMJejjC+8W/hfyQtVH4TnFMcN4GxwTnGH8VcP/qDM3g9gHh5Q3ifoxSEqRE1io4p1YoLdbv0gMoymkfX4TgeIycJoOhZDyeddCOIkV8fQs1niBLPp7M8KyUmh0RWBFBb4u5UzUav/CTjCemOWYxYd2NatVEGxYXZ6m1o+h2TVql3Z49V7gBB4GZSNIU8vGEpD1AhxE11SMQIbJWpXxhlcraHLu7ZhtFKjCKDNkpHKpPnDDRBaXg2msh8U4HX01QroulM+Jhwly+T+54XHIeJ8HBJqNOGzuNaeV75PV59tMmoZJUkx6r8VXGXouOtYDIE4aUiawGpt4jR+qcpeQmPW+Z1AqAHIWLAoJ8yJPWi1xyPkKWQyASas6ITt4gSUHaNllQRUqJUmYcFxfhr/01MxG//jq89JI53qJ3VqtliNzBboaOJoBpJzHvD3iwvEE3qzHIAlw1YTIWJG4Vmac0dRvX0ZTnXNTqSfq6eUgwCziO2f7Fi2Y/3kmURmuzf0U0sRBITyaz9hYFqXGcO1NKhW+V40wtEsJdgi//D5xxF9lqkrs+k4lg2EkJ/Rby6aeMod2ly6h2hzzVJHZAUlsgqbbIewPcoTnnnp3hzDeQD1xAzjVNqXRqCE1lpUqlKr8rElOQtoK4FNfi283Oan+f7MtfQ843QUg8mbLk9/AsUxOu8xy1vYP3v/1NrNMf3mqnd4NjgvM2OCY4x/iriv44YueVy1h/+hfEExhVlsm8Bi4aazwhD8roR58i8ptvWYEXK9Yi0lF4AkkJ6UGXzp98k4nTIJQVwtymm1RJlYUjcmyd4OcjSg+dob5UOtSDNBpm23t7ZqK9e5KEWXplefmtHaGHA8ULX5mwtSNAWgjPJVeScTfG7u3iZiHCc0mFQ5JI3HxEw5kQPLSOrNUpl0104OBgVkETRWbCCQKTOjh7VqEvXSLshuxaJwismDB1KIe7xMKnmnZQjsMV+1EyLBwSSvmQStbntL/HRjLPgVhC6IxGtssYh1Q7OHnC2KqReguABDJsFVMLb9Mpn0daAhOLEDgkrIbXCMuLpMJDS0HTHjJIS6TaQuUmw6XcMqWqRb0OTzxhvHquXzctJfp9M2kWfdbabcVooE20Qws8NeF0fpXqnMV+0sQmI1MWkXLIo4yFRkL54TPYachooBglDsL17ujxFAQmQnPu3L3J6VEUZnVFxVyem/NfiOHT1JCZLJsJcKU056QgM7Zt/nc0xXTUyXe80SZ/0xAY8hxlOaSNJZK1deJEkl2+ikhigrLE8zReNoF2Gz0aQbWCNd8C16GUD/EHu5RliDdXQY9G71jMWxzX0ejL27ZJOIKjlVS+b8a1GA+1t0/8X/4rolpFlN464McRnGOC80HvzjGO8T1BfxzxyrUdqn/6p1R2t5m05lHMDME826LUOSBau4D82U8hdHAY+s/z2c357qiLbU+ru771ddxWBWmZ8P5WOEesbDJlE+U2ySQlX1xF2cGhPXxxwy40QbWaSV9MzDzDePzW4/B9E1lZFtt4L38dvWUqR1I74NXSx9hwL6DaXdR4Qu6XGScuVjbGT4egFDE+mVehfqrB3Gr5MLUhpSFaBRGIoml5vpXij/Y5VdojtwK24znCzCaPU+rxDiCw8xThCK77D6Ow8IioMKSUDvGSHpvOOqmW5HEPHffALhNYktypkbjzGO2Ewst6ZFjkVslUpkkHUARZh1rWY1JeJBM+ZSshVA4STapsBDk5grk5i/lVn7/+1w1x+MpXZumiIp3Y7UISZog0QaOpMOKse5uh1SQLTYuB2CqT2z4N2accd1Gux2j5PLlTNnoRrdBxgtA5rTl46KMByyvynpqTAlE085XJ8zsr+4qmt0XKrCiLdhxDZAqyWa0a/VXRpf1eTr5FNK8gFEopmIQ4JPhlm2C+jONA9rWvo3b3kfOmVKtkx5RkhHv1ZZzt28jVZawHLx5WAKr+gPyFl8DzsB57BBH4ZGHK5GBCUpoj+/iPklQXvm35990odHFHiUtxrO8E300l5/cr3s38feyDc4xj/BWA1pqrWx0mm7sstvdJ63WklKRpznZ2ChA4uSAor+Ju94mu3ubC46c4M+8fTlpG1GsmpUJwWbSRCEOXvpiHkYuYxukFUHciLJGj4pSxK1GLy+SOmeDS1Lx3MDB+Msbrx0xwpZKZzObmjH4hTY3+pigXv/XagBtXdiBZwSmfolkNWRPbfKT/BZ50vkwWJ1xqPs3V8So66iHznNx2GckaQinKYZt0I+La8AROyWNubjaR1utTt9rudB8nglhVeGNSoeZOWHLbbGcNQg0D2UAKTVX3SGSVtfgqt+1zxJYP0gWZM8yGJNkBub8GXgvCA0CQYGNLyay5DyityO0yiMJXX2CpkEa8x8BbIExtKk7EICthociQaASO0LREl49+pM6pR3w+9zlDEKctkYBZhErnGVYaMUebWpCQiICDuEU6TsmUQ1kPmMs2ie06A3+BsHwSZ76ODErYgJONODF5k3OjF/DzMWLgINNV9DNPwzSKkaaznm1JMiMzSTIjHzDTbxVRmKNppnrd/H70uhv0Ff3bQ/IoIcEjdirEiTzUlxVkwffN+5tNAIkQZaB8WC3lj/fJ+88RN2tMhAJh0bQniChhPEoZ1s8yHlcY7y3jBBYlGRNt5Wj/EZTlYE9OIPJpe5OmNm1ZXrqG/fHWYUSrSJ3dHXl5JyZ77xT3MuO7u5Lzw9pG4XuBY4JzjGP8FcA4TNjrjnGTGEfnhI5Lkiu01ngiJNYBKkxI0VipYHCgeOWNmP6ix9HKDDA36WZzRgikhPGoRLc9ZLjRQ9UWUQii3CXKHcaZhxqnyEYVpxaAmN3kC7+WIhWR57MUw86OeY1tz9yYjWhWoa5eYZxGZLUGGYL92GefJtp7CNlpU5tscnKlx8X2F9mY1LhRfpQNcRqHFFdGZJbLRFVgFFFpOPT7kq0t8xmtlpkjlpfNRNzZ10R9mCiXJLbpJyXqap9WPuDAWSNTGbvuKRbUFhOrzkK2wb51ilhZxHkZRBOiDvgAFrh1sHws20Jz52yXWSVDbsyRg04oxR263hKZVcLLJgxlDceCFNO0tWpPaKl9fnT9Nt/q/STP/d4sGpIkM58VEx1RNMU+TjYi9aoMtUeWgB8PqWZdRqLB2G2hyk2sPMS3EuT8AnOrJR56CJb1JvkfHS1JrpGHKZ2rPYabL5N+xCf0W4dENI5nJKsgM6WSSU0W7tql0qzKr9DaZJlJGx6NxGS9galUHI6wVYJvpwStErWLZ7GWZ/YIBYkJgju1LeOxIa1Xb6R0bmSknY8RuhUybQE2NXuApzIca5nc8chTSIdzqDigTg/iHB0sQJYjwyangiGBleJbCd7CBHf4LYLW2e95KugtZnydqdnm+pnvCzO+9xPHBOcYx/grgP4kJkozbM8jFRZZGJLbpkZ7Mb5BpdfGDUOs3IgT682Yg/pTCPsi5CYiU4Tdi6qsTufoJ0j0+uOo9vPY7R3KTRe/pLGTCWEnYlKfJ33yHDRNFUqxDeOubB5Fv5yizLlIixVVMu228VAhy2B/EceGcpoihSbJLRQWUihKbkY6CuluNtATH+3ZoASLcgdHZAx1iY6cx5UKOx8QT3wmiXcYkdrc5FCkWypBc94iSUKGYUKoS0y0R5Qv4tsV6vToyTkEsCtP0cz3SWSJSrjHyF0EqwJ2Ap6AtAf+AvhLkHZxlCK2j7Sx1wotbMDCKGo0TjZm4lTJhQdZQm7XEFqR4uCgmLMGXOR1Rl6LP+r/CElfEkWzqiKYRcYaDUwqb0+TWlWklmilyZKMKHfBaeCS4+U93HKZtVrI+eFz1GrLuD/z8wBEv/dN+n0YzD/OOPWZhB6jzCdybOJeivpmiDyhcF1pKusqM/fiwv9FyllfJKUM6SjI0N39koroRzlpw83n0WGErFXwfI2TThDt2+gXNkme/hGi0jxRNG0uOjFjUJBm0KSZIowzxlEGcYAnFlC5REiQZAyyKo6OCYhx4wgvHyHTCCkEfjahHO5j+y5SKyqVlBOVCXoqJNJpghoNjcj+Pfi+vltYJ9aQqysfGjO+DwuOCc4xjvEDjv444vZenyTNOXBK1GtNavu7JM0WbhTS2NvCSjMyx0GqnKxUotTZ4OSLPZrnSjQunr1je4eGg9PqljCcTqZzLeRTHyW7dJluuwPdHKwacn4d+cB55FwLIUzqoKiqKVJcRal2QXyO+uMURKeoookGOanWxMInin00giQ33aMtoRgIH8t1UCOJpcuU0iGlfIi2NR1vjjS38b2MajlDj0d00kVcFyypSMKUKBRoBAKLfl9iWZJKs0YtPcDNEmJ8JriMqRFRwtURls7QUtCxlrDVBEkM6RCcKnjzoPcgj42uxvawoghL+qi3iDWKlJUAFZKqHJwyqBzsirEjsCzcZMi81WHRnnDJepTIrhGO3EOtzdFoW6Vixi3LII0gUR4KD0uleOmIUtjG1QklNeJEepV1dZnS6QcRtToje47bNzImXx0xmgh6l04R2w8xaZfItIUrcpxpxVy9lOGnNykt1LAbtcOWAlKaczcYFCRGocchOsuQjo1VCfADeWiqZzxcZikerRSDb90gGttE1XXC1CONbTJtoSzQ3Qj9pRi9rMiVqSDL81lLA0VGFCekOiXXGdJT2OWExfAW9XGbvGrK5rTWaKWoZxvUhvtkZZeJO4ctUjxCFqLbOOMMd3kRt1lGD4az5pNJYjqMf+nLCMf+QKImQsr3vGv59zuOCc4xjvEDAqUUB/2QOM3wHJv5esAwTLi00WYYJigNCMHu+gMEowGVbht/PMRKUjLPxUoSMsdlvLBEFpSo9TtYL7yIPn/mjpXgHR3VW3fvRQv18SbhVodJLyXCJ3TraOSh+LIQlN6NQothdBMcuvEerULJMtCBIt3fJbNLRLJEmLk4IifXklRbhImFEk2kpUBoRtSRTgYKZKrw7JSKp8kzQSIquD6gQ9zhAaOxha1dLKHIYpfMrRBLm0gG2MEyVjTCTUeQRaSWS2xVmCgHSECBJifTLsL2IRmA5YH0wF+EaAfCNngNpFslJ0eJI7dgYTHT4+QIlaOdabdV252ySIU92acbtun5NbY4ibDriNQ+FErDrFzesmZpHyMkl/giQRJjJ2Na4W2Whlfx9IREeAxllTd5kHTnDMPJCcLUYRJC/qJjXHrSeRxL4qIIZELLH+KIHM/KECol6SWMBurQbPIoXBeCpItz6zKq0yPJJFg2Wb1FduIknaBxaDtQRF7SFNJJRrq7Si7PkA+N+aAjU3wrw0JhOS5OPMB3mwRzpcNoketCkifc2u0jwxghFcgM28lRZDglWHrlGl43JCrXSG0bK0upqgM8hkRUIXPIbcdE93KFUopRfY7mYIi6ctUYbfq+6T1Vr6F290n/8HNv28jzGN8bHBOcYxzjBwCbBwMu3TZERmuNEIJq4OK7Ntn0xiwwiY/R3Dw3HnuaE6+/RH1/By0lMsvpV5bYqD+MljZOmpHV5rC2JqibXbyV1mGZ7ttBWpLyyXnK97HdKEhO4TxbmBDeXX1ytLmgO21WmiSQVAPSHRfd3kXVmmSeTaaMbiXJBeFen4moEgVN0klGjkVECSRIclIChrGPwnjHOGPwRl10nuE6Aik1OleQRehI41RKJJnFOM1JMo88s/AYY+U5Qo8Q2iKzfEz0JQdpofMY3DnIJuA65n/eIla0Ra5iUqdBGnfvKLG+a5TQlmdOmJymrPIYErOfdvUk0m6QASLLsSyBlNZhBU6hW7FtM9nXakU60GLF7lK/9RJh7jKgxq3gIhOnTiLN52kBhDZSlnGEAlL8QBgHXmuMJSw8x0S8cmWZ8c3dqdjHIvAs7NIs0lfogQY7I/Lb+8RJg9xdIRUuWQrZjkDtKaikYDuH576I/tg6xxcRrp/gyRzHymi5I2quUSo7xLidHaoP1ymfK+F55n1aa1672cON+jSrCtuWTKIUASSZYtxqceOxp1m6folKt42bZyjL5mDtNKO5eaqdfSrdNjIfoiyb/ZNnKIVjSr0e2cEuIorRpYA8zLB9D+vMaUS18pZGnsf44HBMcI5xjO9zbB4MePHqDmmm8BwLSwqSTNEZhSilKfs2YZwfEhwwJGfz4iNUugeE5QrKdujbC6TaTHK5ski9Bu3JiP6GQt4j4lKgSIdY1qx/k+ve+Xvhm1O8vtBWFNGaoyh6KBXkZzIxBKhoOOr7EvXIGvk3d9HD24hqhdTxiBJJNIixs4TqnI2yc8bjMVFikUmX2C6T4ZLmtomeOC5CBmTDiFAFIGvoXCJzhRQ5jqXw8jEMR6SUIAVHKaTtkdlVciQiT5A6xcoTcmGZaA2AFZh0lFOGPCG3fBCC1J4j0xk6z8lzTVUUZ2WalgJAzX4Xwmwnm0AWgVMiK61gYj3mfVqnQIqUJcLQtKkommyDGeOVlaLVh2RzeIJLbo1MmXSYQKHBVEShsYVCpCFOnOE5iqDh4Z9wkQLSsRH74lbItSTMHdKix9ckRgWL6K0y2W0ThZlpahR6oNBZC2lLpDLpRNvO8Z0MN+7iyz7B+dPYjjwsD69UoJKFeJ//ApW6jVX233q9TCZof4S36CCPWMGMo5TOMERrjetY5EodGl0qDVoJ+tUluo8s4Q3HiCQnszxCv4rWEl0VuIsTrDwltxxiv4Q/HrKyc4vW7jbYEqFsRNNncUkyVzPnTLaaqM1N9EH7OGX0AeOY4BzjGN/HUEpx6XabNFOUfNtoWpKcNMuwu13sNCF1XNJq/S1WspnrkfoBynHJXI8yQ8oUvRAESwS4PshlSV6bVeQcNQA8GnUpJrR7mfXdC0d3525C5DhGmNpo3NsBN0laDOceY/T8G0x2h+hsREVaVOZ8NH3EnI/evkqYZewGZ0jSDDfPyIRNrm2UsBBOjbzmEbdDEumTa4tMm5aSApsQBbmDQJOjUdqQHldFZLJEhkILC6RrNDs6R6sYsE3UpUg5WRKtcrQA6fhk0RBLShx/jiRXuPbRA1TThzQcJ4s5pKXBAmZy1thCQJ6CFAgh0FmG46W4ZQ/bNjonyzKl9Xt7RnhbeK6oNEBJhSafnmmNk0eUsiF1uijtECubOK4ychoMrEXUdaNtUdk6Sg1RPRDSQhWCGaUQooSwKoiJPNTPTBtsY6kUa3CA7YPnaXyZsCD2OWlvUvFyhFAwGuM99VZDOq0aJJcb5NdvkgVr5NrobzItSXNJuhejVh9FjFrkg1lLknEEO1seYWyie7lS5Erf06MmEiUo2kccETrHQfmO6zWu1jiQp2gmE4JGBceVOCWLijeC6XjieehOFx1G977wj/E9wzHBOcYxvo9x0DfVPZ5jGXITp/gH+6xeeYNyt42cht1HzRa76w8wmps/fG9YrTNqtqjt7zJquncwCaEV2e4men2d2nLAfPWd7U9RCXWUDB39vdBm3J2Oup8u51443E25jPXMIqVJHzuPcSsutpXDH/w/kIwIhztEXp2adZ2hrNBVdXKlyZSNqs6R6T44Pgldcq9Mpi0SZTPOPHItp2THIi9iHJZHggChMZOZINamixRCg7Snk302bTA6rT7DQkjIsxhhaTzbQSPI8gSFxLLNQGhlNEMmyJDhCGE0PEIAEo1CARbKRHW0gixBCIEjcmSckVmSJHEO208UY12Mm22DFOBYKbnK0dIlwyaXLrt2mT2dI8kRShkrHrcOqY9OzCEJ4SECiUxC7CzE0SmOzHArDqXVOuUF97D02/dnguFsb0C89ya6PkcepWTdIc54h24+4cByyYIq2CWcSyC7dxvlSdTSj5DfLqFvRIhqBRwb0gw9HCGCBaz1h5HJnekgKYTRcKkchEBKYSq4VI6UCiE1wtKmmajUCGv6nNDftq1ELRiwurlBo7aALN/DrjmOEY6DCN4abTrG9xYfCoLz7/7dv+M3fuM32NnZ4SMf+Qj/5t/8G5555pn7vv53f/d3+af/9J9y48YNLly4wL/6V/+KT33qU9/DPT7GMT4ciNMMrTWWFIRJTtDe58SL38CZTAgrVTLbwc5Savu7BKMBNx57ekZy7hIcR5Uque1gZSn+aEjeqLG/foGdrS4PnJDU75EeuBtCzCIx7xaFqPRe5Oho+fDRyS/LJZlbN00RdxNwbFTtEdStDXKWkFYACLQALSVWlmAFNpQEajQCv4XnhCAV2NaU2FhEsUU4TomFT6olE2xyGWBumXL6UNiWRa7BKiZEIcAqbqsKtEAJCWg82wOdgdCIPERnIZFTQ6mjBwe5ynDsgPwwdQVaK5RKcafmfwgJQgE2StokUpIhERONlgqt5WHX6qPnJs9BCAtJGSEMo5RCT9NdhhRYSmPbOa4D3rwkqJlITCHctW0HlVtkQwuVZNPeCQG2LbFtc+61NufNtqc6Gt9FOxrGHXS7jU4ztOcRSWNWo4YTE10ajRHzC4f7W8BdaiE+8QTizdex9newwgjHFjhnF/CffgTvdOstBnpKWXTzEDmcGGGyY6GUIk7voYB+FxgENTbdMuLWFo0Hz77VPbjdxVo/g5h/iwL/fcEH2ej3w44PnOD8l//yX/j0pz/Nb//2b/NDP/RD/NZv/Raf/OQnefPNN1lcXHzL67/yla/wt//23+Yzn/kMP//zP8/v/M7v8Au/8As8//zzPProox/AERzjGB8cPMdGCKO5yfOcxauXcMIJw2brcIbIXI9R06XSbbN4/TKjI/8rBMeF0LIQVA4Wlkkef4zq6RMMJgmb+0NqJe+Om/l7jaKs138XC998Y5P0L79JvLlHEkPmBGRBndiOSNIRaWIUyoEa4ccdhOMi55bQYpfMTdCnLhBHmuRgn9SfQ2szMZSzhDTrkUmP2PbZjgS5O4cSLhobhYWRLEuUNqTHKshHQUqQKAFoNSVAU22NBp1HSGnhS0GqM5QGKS2UFthWkSsRKKWQ0zSUZTkmanOY+ipYpJp+XgF9h0gXZukpQ0AlVmDhtXv4cR/XzfFEiqNjrDQFy0JaNu5CC++hVZxpytC2j7rwSlicpW8K3H152Pb0fZUqesWHbz2HlcW4NR9L5NhkprpJDUFK7HEL74FT95mgF9BPtd7xZD6JMyxL4LsOcZKRZTm5eg86EwnBzvoDBC8/h7x0ndrJ5Q/MPTjf2JwZ/L3D3lh/lfCB96L6oR/6IT72sY/xb//tvwWMpuDkyZP82q/9Gv/oH/2jt7z+F3/xFxmPx/zBH/zB4XMf//jHeeKJJ/jt3/7td/SZx72ojvGDAqUUf/HiTXrjCLfX4fxffpHY9Uldj06+RKJnbMHKUuw0YfPCIySlOycngcYNjaBS2Q6iVkNKSaNiJts0V5xZauC7d4ZmtFYwGKIT01mZWhVx38qg++PoxFhMxHf/7+6f6mCf/GvfQIcT02xQK3QUISZjU7k0GKIHA5MukBIqJayFFqJcQu/uI1dXsH78x0h2D4i/+jzJJEWVa2S2TzaOyTd3EK6DWlrh+t6I4STFL5VQwkUpC7TLjvAhlkjbBctHSw8tXHJpo6eWb0ZiI4BpqZiwIOmZ51SMFg57kx71yiqO401TVApxR9n49Bwe0hhjBIjKgBwhJb4tsHSKW/Mp173DsvuiCq0gkIfEZ3sD9cqrqDAylV+WCb8I28aqlnAeuoA91zgsP7/7/e+U6x72iLpxk+SP/8T8EfggLU5nl3HCIcL1kCdWAd6zxpC9UcSrN/ZwHEl3GDEOE5Lsu4veHEWlc8DJW1e5YBmHQuE4yLW175l7cL6xSfqHR12lPYhiVLuDrFZwfkBL1b9velElScJzzz3Hr//6rx8+J6XkZ37mZ/jqV796z/d89atf5dOf/vQdz33yk5/ks5/97H0/J45j4iMJ/sFg8N3t+DGO8QHhXuHoB062+NblHUScILOMtGRIiC0yjjYEV7aNjENslZIemZy0Bo04FFRaAnwp0VqTK3BsiUoVWX6nQFO126hL007NWQ62hWzNIR+4gHyrQc63P653u8zSiuyVq6hxDqUGeq+DnhjbYy0Eghy5uAK1ZTNW1QpUyog0Q2+PEME81vpjyLGEyiL6ox/DunQZDjpY2QgsF10vk+OgXJ+5OZtJ2mccKWxX4GiIpCRIE5AuCYo8i0FqhJVjadPBXEg5rbvWRxiBBstF5gle0kWrFIfSNIWUIoRAKU2uQmzLBQF5npn0kU5MRVUyhjyEaTeq1aU5yp5ExBFObQlZUlglD2nJQ4IihWk6mU0N9lhYRT9VRd28jRoOzUlwXGStil5bIXdrMHqX5+UeKA5bhw2y+oPmHCUJZBqlU+yGi9WaM+Lcbhf3MsjB/YntvZ671//CxGLQ9RACogTCyCLN8iMvPLqX99DdCPP8HS898ppOsELv4jLOfMBSyUEEHnKuaQz3Bt/dvr/dT60U6deeIx9MsE6socU0dlgKkMHqcan6FB8owTk4OCDPc5aWlu54fmlpiTfeeOOe79nZ2bnn63eKxjX3wGc+8xn++T//59/9Dh/jGB8g7heOXn7maZ48v8xrvS65tLCzlMz1qMk2NdqH77eTGFeEDOdOYNXu3/HPEmBbFo4tWZ4DKQVxmnNxvU4lmO1L+oXp6nH+6OrxCvLV1+67evxu9QJFO4Vku0M4eI2sZJNtvckksWmXTpJJj0xJk67as9EXLpIHGWo0QYWQY6FKZ6ExD5tl8lvFllvgN1HzCVmiTEVVSZF2R6QHgly4ZFVJkmu0FqaHlADcQo8Ds9SU+WlTJI+OQAAqBQRCK6RTIteSql1jGHaplOZQSqHQJoIjJIdVVWBEx5YPTgPIQOVocobKJhpMcDV4mz1ceYBT9XCW5rGbFeyoj7W9gTXoILMEyxY4c1W8i2exLj4Mw1kUTtSq38af514nRRkN1Nu8X5chvdqHIEAg0LlCWGuG2CDQcYyWDrntorJ7fM67ho3ObPZ6oRlTLVH6vZ3sBbBVbSBrdUiB3fd08/eF7g9Jb9iI4BHEyERZhdCcr+4ghDguVZ/iA9fgfC/w67/+63dEfQaDASdP3seF7BjH+BDi7nC0mBKK/PpN9EGb5U99ktInHmHv+htUNzboOndWRaE1/mhIf2GZsFr/9p+lIc9yNJr+OEYKwVKzQtmfVgUpZYjWcIQ8sTrT5ZQCtL9GtLFL/NWXkJ9cQWnT6TnPIdnaI3n5DdK9DnmqULaHnmshV5chKKNsF1Wu3mG3f/Sh1JEqrIFDNnrERAJYJarUCMU07WaBCkDFKWpfopbPoOyMLNXkGGM63ZPk7dk2zUMy7Yg5q0DCRU+rljQSfVSDkhdhpyORmSNL/PtmcIREZBFe0ia3PCJ/CQdBSbgkWYRjB7iWe2R7AkualFSOMO05pQLlgMzRQjPOQUoPLGkk0LlC9lLsXoIoTZBRhlTzSHsRx1a4KsbdmeAe7OOdcwjm60ZADNiTmeamSEUVf9/9U+5toZ//BuL6NXQUI3wPeXYd54c+9haCq1WVZDMlv37lzuuGqTi3v4V19gzux6rvil8dPV9H/9Ya2tmEkRiazKDWvDVDNdVE3fHm6fMceV6/9T3F08KzqDQ8qsFd1859fn6n/zv6U40jEjVElDyQMWioOZPZRo5L1YEPmODMz89jWRa7u3fS3t3dXZaXl+/5nuXl5Xf1egDP8/A8777/P8YxPsz4doTiaDi68Qs/j/zZH2Xwe39IvdthXKrcURUVByV2Tl8kzy20EiYScfhTHv7N9GdsWYzaAtsSeFmZS7Epu9X9Iek1BxE8hhia79U490iVWSHn9hnyaxniL0KYan30cIC6somKfURwFgLb9CO61IM3LiFKZXA9ZK2CXFtFVO/MrRcTrZSGjGjLQiFJI0Xm1smFy4QSGaalgEagXNCpAz2Fkt4dJerFz4LcHC2lPhx3bbxeTF8qaarDhTYRJ6XQJORZ8SabaXiGO0377gEhEdIhdpsE2WAqiLFxpIWDh1JFGsVsR0+rscRUxJzrHEtnHKa9hIXGlHVLYUr8pdRoXFKVISYxGhukD0pAopGijNB1ZJwgL2vYMVGiYpwPXYTtmYanqI7zvOn/xwN47TqyL7HVKSxS7EGGtT/AvvI89o/4WEcqiWxbIk99ArbAujnEbZaxfQs7nSA7bZzvQpx7d3pJCBhNEoaTCM8x6dYkU8i3nJa3MqNg2MdOTduS8B7+UUdhSXD9jO1+m2q19Y4qDd8LKEcSBx2ESBHBcan6/fCBEhzXdXnqqaf4/Oc/zy/8wi8ARjT5+c9/nl/91V+953ueffZZPv/5z/MP/+E/PHzuj//4j3n22We/B3t8jGN876EP2qjNLRO5mXp7bIQtY48PELTQ10PsbwwJ/RPsnfvrODdvIHtdVJihpUXYqNFbXiWiBr139rlKaMqejZCCcZgyV51GFFLT9VJ4zuE8Pkp9pNBIobAcgRNOcNwYu1FGCoW+/iak+9iLDYQYwWRCMtwnz4y7rFYhorJAOthGxT144EGyUuOOMvHioRRkWYmcNVJrghKeITu45JgaZY0yldRIY9w2vdMVXjBFo0+YGcMdimGnhKcgVIVouni9EJBExk/HsB7FLNoip7/PiE5unHDuHFvpIVEoZU89czgUIEspyLLYVE0JMeUxxfsVmcrIhUBOt2pjWjUA5FpM6ViOJ1MsYlSU4gaW6U0lNK5M0VlOnDtkCLIsQxOQ4d5BAouKqTw35frF82ZsFHpvAuMlhL0EUhoSSI6Vx7i9GOfLXfyHmriuPOJkvYA8/RPIzdtY3Q5WnmDbJZy5czgXz2KNWnCXOuEov7CsWQTpXlGlo47ZgzAhznI8R5JkCktKhFKzwNtdqHQOjlQT3t8/6iia1YB6xWcw/t5UGh5irolo1FHXbyFOrCLKpcPP/SBK1T+s+MBTVJ/+9Kf55V/+ZZ5++mmeeeYZfuu3fovxeMzf/bt/F4Bf+qVfYm1tjc985jMA/IN/8A/48R//cf71v/7X/NzP/Rz/+T//Z775zW/yH//jf/wgD+MYx3jfoMPIaG58Ey1RWhOOFTofIywL7TiQ5egkoZNAWK2RPvYYg90DSFOUbZGWSwhL44j00MwMoZFSG2Gs0HeYnLm2xHNsludshNDE6YATpwMqgYvag/hbGwi7jygFaA3LfvcwgpOFCamXouclWQmS9oiwMyIrzZMlPmHuMO4G5DTJAw/Q5LlAjRooaZMPM9QrGcwZP5ejEZZZ2bNEVGroSTxtHS1xSPCIjGuuUEjHQWGhqyVSFCrJyXMFuQTbOiQu9hHyAwqpcqTIsexpCAPJeDwjWOKwKgqzhM+nO6WnTEkUjRTuTFlNXzQ9EBeVh0zspjEIvKM6CizLRaNJkzG25WJJZ6prkdMyco1SOVrHWLY1LVsHgcIRGY5Upsu6csmFJMo9HAElRrhZl3I8pKw1tk6RKOz1RxCrporpsPN4eqThZTojl3kOIkuR4QiLCEtKEBJPh0idoYQkV5p0MEF1Y0L/zgiDUnVUrYoIUoTKsR2BXfGwexJrOKvYOsoTiueKsvOjj/sFfDpDi/5eBcsSZJkCYXx+sjxHC4WQ5iEtTWXQZuWNV/DDIXE5IHe+jX/UFL5j0awECCEo+Tb9ScQ4SqkE7r136D3CoRZvaxu1uwvbWzDXQp4+iXSd73mp+ocZHzjB+cVf/EX29/f5Z//sn7Gzs8MTTzzBH/3RHx0KiW/duoU8cpI+8YlP8Du/8zv8k3/yT/jH//gfc+HCBT772c8ee+Ac4wcWIvARrgNRjM5S9OY264MhKNPYUQQ+olohP/0U/VHEnG1xMJhg2wqNhQU4TN72cwpoJbC0SxJJ+j0LoSThXsil7QP8cpnMnyP0P06y1SOvz5Fr04sIQKPRgxGyUcfqmmiR6iny0EMIB5HDIHXJkwRtGZ2Q0oIJPpYyK33H1dhZDzsog+ORhUb4i2UhHAfLNqkUu1lCxDlZf0CSOSTCI5U+iROgpYutMizPQmqFNWijYmUiOkLiuAKnXqLU8LGsKXkZh+h+HxmOGKoyXarGbM+yEdNwRnErUkoaIqMVwtLoLDfVUoc5EGHiOlpNV9YF2dEcCoalO92GBlH46BRvF+QaSk4J0gEI12h+LJ9cmHSYlBaWFUyVOSCnUaNMO6hMYVkKhwxPj3BEjNI2USKY6GWwVxAIAj2inuxRu7WJXa3gNGssLppWD1rDaAT9vmmaeVQnku90ia/dIPdK5MJD5XBy9BpeNjJCbi2Nt49w4MxDd3QIL8Yvz707tFZFU857VdQdujBLky7zvFnEZnZOZlqtPIckdUnGPjkKRI4ltYlKSUGeZTMhtNY0r/fpJfNEtXVzngpdd03jj4foSzGj8xUQAkuA61go32LY9bBsk7aMU81gqHCnREzw3hvw3aHFW1pCNJuoW7dR7Q6q14NTJ7EunP+elap/2PGB++B8EDj2wTnG9xO0UiSf/X2y1944rHY5bBudpuhOFznXJP3lX+bVzMFzLLbaQyZxhlagconKLZSSqFyic/MTpjqU3GhwVC7RSiCwsKWZ0Kt5gnOwjzUe40mBtG1krWLC49s7MAlxPIHlSiyhYRIifB/90EPIegPV6yGvXoIbV7HJcGxTrp6GClWpEQufSe4zUiXy+jzCthA6R0ZjnJV57PEAORqQZoLYKhF7deKgCa6JZskswu7sIrMYJWwyyyPFJs8EQgrcmoc3blPKhziBTS494kwajYJr459ahlKF9nZIZzcjPXQpnrZEmJIG17fIbY84lodRHCHAlia8kShrOjFb05YKAqVypLSxinwXcIcgOQ9NVZRWII6a9omp47HFYR1WFpnJNR2S5ymJN49le1hWMWEa0zwHZXRKWpJPy9MtMlwyKrqNm4RkdhlHpKBNmip1K6S4YLtY9QqVqqTZNL3ACpRKsLgIy8vm98nXX2Ln//xDBvWTDKkTRbAQ3TYleEJOGUaMdfIU9k/9+B22AQVZKdyRpTQRoskExmPzuJsQHV6r6q3C8+LnUZiIj6Y3mjDJQoTMsF01JUQKRU6uDJl3JiGL166QOj6Z7SFQlOmbPmOAzFKcNGHrwiNkpTK2Y2FLSbXk4dhm/HOlSDPF6alX1P0sFKyLMwuFYhzuJ+C+01hxdh/Ir9+8Q4untUaPx6jNbazTp3H/v/8fpP2Bxy7eN3zf+OAc4xjHeHsIKbGe/ijJ158n7sekzUUyq0SSWyQppHPnISihvrzH/olz5FrSGwiSxJAXpQToGaF5y/bvSAVoHFdjWRmVZMTS1hVkNIRqnXKtDFmIGh6ghxJpg5jsIjqmUkP4PtbJE1gfeYCs1iDa7TJ+7TKjyCIrXyDNBNqyEWmITYiV2Tg2yCymFkDmRcTKJUxtsjyArQipLYSzgBWAyDPsyQCRRKj5JTIrQNsueW0ORn2CdICrB7hWjmxVSOdXyHfbCN3HqgVoJCWRYAuLA2p0w4Ds/8/enwRZkp33nejvHJ/9zvfGjTlyHmrKAmoECZAERxHi0Gx7arW9Vsv6cUNpLWkhySQtJC1kMtNCZlpo2MhMT1L3Qk8meyaJjw0SJAVOmGuuHCKHyJjjzqPPft7ixI0bmZVVKABJoADE38wtMu/o97j7OX//vv/3/e9IcDJUzLGTNkiRYqkYX/UJE4cJBUaZhTBS3VDPNfF9vbCOxxYpBkhFlqbkeQRCF4gLIY7FwACzpn2n01bHflKnS4ZUfhzkMZAqIhfW8f8dSMZkZhEpI+zJFuZSE1PUCGMgE6Q4pOSYeUJBBljEGIZgalaJIkmQLyFNhUWGryasxvdxjITYX2KiCuRxiizZpIbL0dE8omIYUCpBrwcPHhzv5uQSqvwKpfCI9fgdapNdTFtoETM6rRqZRRLhEd9/l6T5U5pQDEbkcUxu2yRPKCc3DB09EkITKc/T28zXKkn0fk0mOro0neptllJ7lOgIHM/msJswCSEY6cdQOmqoMsgRWJGNFOtkwkTmORYxJdnBFWNMEWNbEdXgiKy8yHRBUPIdKgUHx06P9WAwGCUsuh5rSybx7hHBt75KMomhVNNNJ+OE/KiFGo4wXn0Z2WiglCZyM33Tt8NM3C/9G3BcGr7ht3GNBFEsIjbWUIM+dHvwY1wafhpnBOcMZ/gBYObVM5vgTm+Pi14B1KBMUnqJ3Aogjkkik465DCUbCiWElKijmNiTTIRBEttkSrs0G2aOkClCKqTMOQ7OaL2NMXOU1oJchcAQJpYUNPe3MMIhWbVG2XewDMAwSFOTaOuQ0PKJl54nyB2SREGcwsTB3rMxxzlsHqIisCsedhST93ukiSA2PZLMI0gshBIYZhHllFCJgxIKIxphZDkKk9QrkyNBZRimhbQMCtMhdpRirzVhfx81GaPyDAyBdB2cjVXU4hLWKCCZdkgdj2HiM05dxolHpsRxZVSOVCmGMCioDo4NBSchCCVHYZFDVjQvVClWHuJlE+qix8g7z2BUJElmx0hi2ynTVJELgRS6WMIRxqlIzGyUtQBZqAyB1CaejxMfKdEJDomXDghlQUcTDBuSEbnlI0trNG0oTe+SITgwlgiVQ5abpMpmkFlII6fgSIoVi3LWItneoy+bRMJhICqMvRvYIsfLcurWgIpsU1urYzZdOh3odB4VXrdaOlWVpmAYPsXSNaaxoCvLUHtRV4cpWAy2WJWblBYLFNc86L2LGdbIN+890sMpXdkgeeFV4uoy0ykn4znbxmO9PY4ZIXfdeURp5lp+GjrtZdHqedzZGrDXiolDCbmOUColmQQJhozxeqNjl3m9JLbyDUQOhhQUTMlRMSGJrzLYEYyjKX0jx3QzlO+CmVIo2CwsFXFshXvnq1TDexgXVhGiO7+GG4p85z7GYYD9k792kq6akaQZSXvS3zwHdSzuV5alte1wkhoGzkrDn4AzgnOGTzR+GIzklIIkzon2u0TjmMRwSfwqSSpPTCK/20TwbDKXWYRpxdhrdZxkShwrTGViOBIpp5BniLSL3Fhmz7IYTCeESUJ2nILKU4MsNXQJ+EmZtMBQukpKHrstC/RfLwqwRgNEuYLv+XSzMlFso5RC9HOE1URJAwMT25HHGSNJPhxhtHYxvDXSyQi7YOJaIZiK/WyFOEzJkwxlZJCkICwyrwSmo7vwTvo4yRA/HeGYKR4Ss1bhSGgBrBQKywxQgw5ieoiVBVAoMZUF3VsnCBlvtQiiEpOpJIjXSHNH+0IpSY7AlDllc0zVmpAEKcWFCtPdIV21SGtSJckhR2GoGDuPaKRHVON9dovX2VNrREMLDF0bNavQjSJdWWQKkyieYlseZNoCYU5wUpiJgfPw5N/6YBwTISEhj7TwWFikWBSjQ6ZGmcz0wCrg5ANM6dEZegRZiQZtnrXfZewscpg2mOYOMSY5JqOpYDzN6FCgTJVi2uNqtk3slmib54mlxzS1GSdNZF7HuVmi2oZaDVZXdTSl2YThUEdver3ZDksyp8kgDgmmisTwMVRKJe0g7Qij4NJaWUMkDqp/iPXbmxTlmEqjht00IIwwtu5hdY+o/MovY1z6oF4kyyAIdIQmCDS5On0tBYHeut0PvPWkd4/vQ7Xo8NOvNOlOxuy1R0yjUEfXjvVSre6U5fF7+O0ug+oyqXJIhEduFvC8Cmo0Jq83yWyJvb1H2h+QpRm5FKReAbW8gr1Y4+5ti83JhHRzGemcgyMLS2YsOMN5lNSrYz0YUnx/gLtcOzEwtSz996OQ147F/ZYW938A34fS8B+G+fg0zgjOGT6x+EEZyaXpkyMrySnfg+/GskCcukG37eOJzcyxJ12sNMQuOcjmkyeM/EgQfaODIIayR5SbONmUKLcIM4s4yskNC2mXqLkeUWAyDgLiNJvvq1LYwQQjSzBdh+bGErWKi2nMqoMkKIkUkB61kIbEKPtICWGckgkD0hgjHOKYKXYyoWgmjO0FTJkjVUZYVoSTLsmoSp4qYs8lSY69qm0Hy3IwM13pZE7GeDVwky3sSR8x7INhIqtlVL9PwYrw+x3SwCNbNRCeh0Axji0mkxRck7C+SpYbpJlknHuMpUs2TVA7CZlbRGFjklO0Qxr2EMvICTIHRyQEEYxVkaNujSgvoXKtEfbTMXYWsDF9H0+G3HFf4sh/mVBokakixyOgsOAzHs9I7KxEPMa2PLJkiimP9TyPpKFyHY1KQzKrwLxjXHZChHT5eA4iJzGLKJVTCbYYWAtk7gK5WacYH5CZAVOjwh4lkmiblXiHxXqXLk0OghJBZhHJAmkOMRYdc5UeCX2alLI+jek2a82YiVlhv+cz8Rok0qDV0tEaw9AEodE4JgpVOH9ea3EcBx488Llr1Bnd3iPPAv1TLItW4Sr7dgkV2LjBlEYwoZGHpGuXGORibv9QuYjX3aHyx+9Q+0srumrtFAwDikW9PQl5rknPjOhMp49GgGbXbb8/Oz4lChQxVEqeKywhqFVMVssx7STG/uPfY3W8S1qp4pYKVMw+5vB9ZKOI8UqJ7Bv/H/J4jFivExkWYZCT9gLSYY30mZ8i8JoE+yEhEzLbJgWS/JjEzg6zaRElBlk3ZfThzcQfsWY4EVWbDcTiRYztezjrLvJUU5/vR2n4D6Ox5xnBOcMnEt+ucy8f00hu1sPjSdsM30105cQTptPG/OZXMYMBTq2A7QrsZITRuYv5EZYFs9+Y/ul8wohti3xlnfRTr5LUVghDPYFnGai8QVp8ifyghVioI06VH+vKpS5yaRFRKVEQkmsbddaWphwNhgzGAUanS/HhHuY4xhHgFlzsqH9CwuzHJlunXCBxLLx8jO8J1v0OrhGTDwZMd7aYuA0meAzURUbxXI0qVIaTdSk7CbEZkuS6YYkrE5rOCEukurVLFKGMAOsnb+BYOe4f/Q5+ckB2/hLDzGQ4lIxkiU5pmXFkkfQMhCghEIhgjEpjYnuJKCmQZgaj1CXNdfM/aYKfDlhYs6j7O2SdPmNnCVMoosykFxWYphZRIkiFjcDCNiLsLKBmjTg/+BpJItj2rrNtVOgaTdRxMz2HFD8fgWEyHjlkx5VfhqGdv9PMIEnHyBM2awHqOCqjuxPLNEActyPU5OfYIfyY5ORIrHxKZrjk5KSmS+CtUMv2iaOQsbtOz16hlh2yxD5dscCRuYHKDK6N77K6ZNJ0B/S6gn25SpwLAgrEhonKBEMqjIwyHRY56A4pmAmXSwc0f7LGyJLs7OhITRzPNS5K6ShDraaJjm1DuQzP/1SDivN13M23GVXX2crOs5cXCTNQKkJNQtpylZZTQnVtkFCyQmqWjqBNC4tMtgNa3xghKpWTyEu5rLePimpIqYmX7z/5+RnJOR0FSlPxiGGsToE5FFZuMPncEmrzLqLXIxjm9E0Df/EyhU9dwbn5BtZwgrmhxb0e2i9UVS3ynW2MnT/F/p9/DbWUE916D1EsPTHKoqZTcm+MvARpRUf+4nj+90ldi2fzAEjy9dfIDgzUg2N/NcuEJEWNx0jvWczll/C25Qkpmv2d9X76bvG05uPvN84Izhk+cfiwzr2555GsnCPa7ZB9+SbqZ1ZIUkkUfXBi+E5w+sKfRVYe3x7P78/2M373j8niLeT5VYQ4DvE4Jqqw8ojhHULv52yymmy3CP70PfJAIorPQFkLEdXmGLH7Dsar9qOGlUJiXL8KoxFWdx+v6uC6AjsZk3RHxOU6yYuXiI4jBWkqcGSBjZrPQnhIunMbEWcYzRqG4yBTLXp0J23KP/saxUtLuO685DZNSvTvGwy3uvSsDbqUQIFKamSWiYpSZLmI5+VINTlpOuikExxzirNWJB/a5Ic7iKJuUGiKjKIVUDQC7MEW6sIlJoWQ9p/cZPBgipArsJkw8GwC+zwqSTFsA8eaIIIh07hBIm2ywCS2l0lkjSSxQeXYMmXRHVC3xxTNCWlvxMisMV28SDK4T2dgEhhlEgzixCRDYIqcUs3Crxicqylq995h0Fc8NC8zMgoMrEVSLKSKkcLEFxMsFRMKh4ASZALPn2kkJIZhk2YjsixFSQlWUZOWWX+cPNM6FTKQJvlMUCwywEDmCbmhqaudTlB5zNSqgbDJDIdElWmEB5SNlH33Ej2WyBhwgze5LZ5lYDT4Rlrl0rjNmtulaAypqj7DQpOjrEmcGQSYJEhNxLBpsUhXwYB1iu87VCrwyiv6fO92dSTn8FCTgyzjRJsDusqqXpc4zuew3RrFUZdGbci1whFuOiZr9+h7Ze6yStspEuf691okhJnNQWaTZoo4TvG2oSa0mDmOod3W2+lrtFjUpKdQ+PDeN49f146jt2r1ya9JklkESOC6S4QrTRiOyCPtq5WUS/SGI5KHCuG9cNK5+0pp/7iS7jHfp4UGcm1VVzp5T7CjOI6y2CsNXPnh0anTOK3Xi5oNgsKnCN66TdgaoiYZwjCQi4vIa1eg3jiJaH27sTk9Rh9FiD5uJ/VPorHnGcE5wycOj3fuDTKLncm8yZbyK6jdAGtH3/U9jtn1Z5rzi/b09rQqKJ/UYfgoqjBJXTIlUW4DdS/A+tpj+zlzwp6G82iMAMOzcAo+dmsXf/drlF//C0hDEobHd9J+gwkvk9y+Q9TuQi8Do4xcuIi8dgVZaJyEwk+EmI6ivP8uNtv4V+oYsj/fjYYi3G4Rvu3RKjYJI3mKIEry9VfJDr6JavWgVCQ2PKLMJ6OCo0a41TqmBJOYghHPI0nLi1TWKxTr57G+9C7q8CaiWGDsLtCPSnTbGVgXkfk58i+9Df0hSIEo+BSzHsGki5I+E1ElTSAw6kTSIpo6JFggG9h2SNUYUXBTKvaUqhUQphYHUZX2pIhSDqOpzTDyoHSdIA0JE4k4Ti+5vqCxXuDCcw6OA/1+la3JZ+hOB/RjvfiLPMYAfAIsCbaKGaiSTlUBpWJONBOAKigUTDzPo5WGmLJEptRxFZV2kFJCYGZj/KRDZFZQWao1Okr35jFVQoyJEgaR4WKKDDufEBsFYlkAmZCaPcpliWce8mDaYJIVuKOu87Pid/gKP8WAMnen6/SnLlfLJo3wkFrZpJqmjBKHSeqS5oJx7DBMPUIspGkznJoMJprUtNtatLu2Bq+9Nte59Hqa7PR6+jdHEezuApSR3ktU5AGFdgujk1Myp5Sa16hdbvDpN34fu3yE8D1yJQhTg4OozmFYhUQhRYxhm4xGMDM1T1MdmSkU9KYUJ8+fvs5NU+uEvl2058Mw64g8rzaWwPxazTKY3BkzUB0ib41QQdF8TMR7StwrpcR8/VU9N+zsIRs1PQlF0XfdgG+WprLtY0LUWES9uPChWpgZIXo8OvRhEaJvR4jUYER6X1uzyImNLRNq9piiGX3ijT3PCM4ZPnF4vHOvJTJcM0YAlkyxrAgz2KPYGOFernxPodenuZ85gmF8Kl5umTDJIIlPSl1dF+xRFzX6FnK5hPBjlIJ+UmCaOkwzj2lhg+7DAOubHyRwot7A/EztpB+OV7TwV8oUihLP++CdbX7UIercRixoA0Ol4P5kiSw/ngy9hiaLh/PvmlVt5IUG8qWXkXe1vsjOxtiGgThXRk0khAGuk1L0M4pZH9ntIOtFrF98mWRB0rll0MsukY4sVCuCrAt0wHGR5RLmN/6UatrG2VigNw7p5SW2zctMDJ8gNYmNAonhkiYKOw8oMsare9SvLuDt7lFt3WboXmA3aNKLSihgnNj0xyUs1yAOHMYTyHMXr2pTlwnVQsziYpH6ho+Ukl4P9vf1At45cojiMoYRYaURRp7gEGKqBJUKevYysXRwjIiSkzPKGqSpHvOZR5NSLqVCkyjKUSoFkevOOnlGKg28bAKmQ6qyObk5tnpIj32klDBJhYnNFFtFxHiAIjYLDJ1zVIwhjpFzzdnh3nSZEUV+T/1Ffj3+v3iP53hg3qBLg2/mq5zLb7GRDDhfOmIQF+jHBaapTcUOqYVDprHJqLjB5LgkPI7n+pvBQAuLfR+efx5ef10/1mrpMu2jIz1uYQjSshjKZfqygRQKt2hRX7I5ikFaMcX9bbxmkbozoWqPuWQdcbFwSL6zh3H9AsnPvczunj4WWabHM0m0fkZraPS5WyppwmNZ84X8SdGeQkGTlmLx40V7PgxS5PhqjJkdIsIAcXwz8wgeE/ca62vwK78816t0e7pB5cULT60Bn5DyQ8nEaUL0cfBhhCiKjp9PYlSSQdkizwVhbnOUVygWj/QLPsHVW2cE5wyfOJzu3IvvYcqcDb9z8ryaTlFOilN2fmDk5kn7aQjF5dL+STNbNZ2ivDHONZCL8/dl/YAoSeCYGMW5STucN6xSpqnTGcfE6HQ/EJ0qe/Qu86PwOAlLldTkRgAohG2QjTPyKD6p6XkkwuU2YLGGORlSMgPKDQt3tU6+t388gd8nbWUMjRrDhdfIr1xDjhvkWx2yr7+p72o3NjCCMaXtdyhN9hF2mUHxefojm31W4NCh75xnP18mkw6ZsDDMBC8dUa3kFKZtyjWD+s+8QL2cs7ubse1eYlc4cJgQOdDLqzgqJA5zJrJIKopYoaRY1ItisylZWHCwjxsETiY61dLtzhbpHGMa4jJFODaOMSWPYmrhAYfOOaaySiZNakYP24C+XCZNtdZBSn1sZgRBKRPLysjzmCwzSfIIS5qQJ4h8Qmb5WOTkCgQZOaYWcBvHx12YWFJBahBKBVkf7AWktEmRtIKcteqI2Ha4IvbZChcJMpf/Yvw/+ZXJf2R1w+UN6zWGkcv98BKDzgGXjTFFK+JKeZ92WKYX+8hxTLVRIjpnMA10ZGY8nlWE6TEaDjUpHw7hW9/SqZ7XX4dnntFk5+AAgv0OnfcO6fQkg6wEQpFEJrthDdN3QF6iKAz8/QF7hWVcJ6WkRliTPs3SMo3XXsEpSa5fh+vX56feYAA7OzpqNOuV+HjpuGlqXVCx+GiZ+eOvm0V7Ztqej+O9PBPVZjt75Edt1NY2cmkRY30VcRzy+TBxr7G+hlxd+aGoOPp2hOjx6i2lTgLFGp9gY88zgnOGTxw+bh77B20k96T9PCE3H7GfjxMjx0hZ9zvYRoIh1IcSo+9qHz0XLIvxRDAxq4zSY+GjAhCoOEMaBtLRs5sQegGY3SnroZdAFaiilF74emqNyXMrqLXjzsrW8VSSpKh+j8L9tygl9/AvNshUTPtWh569TLdyEYKQqGMxlBUSr0SaKFJcbBnjxH18M6HIED/ssjBNKCwWeLjxOW79UZt8OIY8J5MWPdnAthIIIqY59KgiTQO37FCrWqytzXUbM8TxPPLQ6+nUn2FA0UlJR1NsU1FwJvjFhCxM2TafIcDFUBnr8T2SSvNYdGwxm88dZ54SnEz0Auv7OrmVZRlZZpGmEs+KsMjI0hRTCTLTJQWy4zdnCAwrQeAjnCqGU8R1FI4jGI0sokgS5QIDj+FwTL0wYWp5XMl32Jo0CKTPfy//H3x+8lV+9fyX06Z7rgAA0vRJREFU+ar4LNu4dLoLjA+LbBR7ZCVFQ7SoTQZ0iyuML15D2JJqTR/zMNTEot/XYyWEjqj0enOC0WppQrexAS+t7mHc+m3G3YC2d57ILjLKC3RHFoMkJhGLuGWXiTjHqD/ECALMaUxZgF1d5mB1hephmcJYE6lmc65LqVT09vzzs2tKk9Ld3XmPnjTlpOprBtfVlV7Fov49s9RMkjyqIZqd70+K9jwuquWZa+S3buvu3aMR8tpVhGN/ZNrpo6IsP0x40jx30o/7EzQfPwlnBOcMnziIP4c89idpP580YXimLuv6XieMMNQ6heEQkiQn75uk8jpqr49Ydk5bJeHKiML0AZWLC3ivlR5vKgvoxaHX0wveBwXcElmtYA1blO5+lcLRveN2shn5UQt5/hxCCPa7BcbTCcIxEULQdlYIE0tXEqkM21QsxNsUmx7FyT614QPyOOWBeY23yz8JxQbqvQOyKGVsN5C2wMpDwil0RJ3c9fFcRdURLKxarK3JD2ithOCkOqjb1ZoDw5jrO2SWURYjir7gemmXNwYXOYybBIZDSU64aG2zGzUYyibStHBdvfC7rr7zzXP92Vk2F7aORpJaTRt15jmUaj7l88/ROQiRKobYxTMlYSRJc4lheKyt+ezt6dLzesk8+R22rRd2aRikZoFepvDDHbzkkHiacVF22fOv0lc1/iD8HJ/efJPPL/wn3n/uN3hn02fUMXkwNhlNJRtOSrm5zOWf2WBUqNLv632fVUnNSsMnE/34jCRIqcdtPNb7M5nk3Pkj8EYv80z6Npc7f4yUgqTcoN14luloh9BcZ9D8FAPbY2Q6SBJUntFWBpZvIQPJZHdOara3NalxXb0fCwuc9BoSQv9/4ZTnZZ5rwrq7q4nZrOro4cNHj73vw8qK/uzpdK71eXJDwRy+skVhYFFeO4drZhi+h3j2GbKdXdThIdmtO8gL555q2umTih+W+fhJOPOiOvOi+sTikb4LSYKwLOTa2iduQvlu9vPRO8QPThgfVV4O+s51RmQeN0KEeW8e1e1iTIcUOluU5ATv4oomTk/4rjzXi0Svx6kuvY9CynmpsGU94U7XdVBHbdJ330NUyhjXrtKLC/TudUl93YN/rHyyMKVgRTjxiAVnSHFySHrtBveMK7TGPmo01mXDzz7D9NY22WCMW7IYJAW6SZEsNzBFgpWG+AXBlc8tU6nKR1KWMyPGBw/0OLXbeqxmrtSWNdfPWHnA+t6fUSkmfGV4g05SJsslTXfAxcIh7/dXmWQOVsnDL1okyVycGkU6KjCLBlWrmgjMHLDH47loN8tg2AmxJgMGgY0gJ8Qhxca0Jecvmmxv6/fXanOvpmJRk8zBQC/qvp/jmQnnglswGZNX6zhGxn5Q5iCoY8qMi+ouv/zcNt2f+DW+8lU42ktJU0WxACvnLNbXJYWCjsTs7s4JzXSqv3tG2IJAk4hZyfWsZDwLI/LpFD8fYklwCNnIt7g2/QY1a4JcXyNVBoOf/8uMzRpxPI8OzSJCtj0/12YVPDObNdDEpFyep5cWFvR3f+T1mMHesZ5nPH7yeTyLVK6uQr0+T8fFMeT9AcmX/1j3XTrOZZWtCUveUPs+dbqo4Qj7N34N4/rVT+TC/ueBT8p8fOZFdYYfCfyw5LFn+5kftVD7BygEcmUJ+RHh6Y8rRMzz+eT7YZP1DFLqSdsf7WO989u6N0ajDmsOea1Cfq9HdmcTMRgiymWijWuMrrxMMFlE3fzg581Kc2d39Y/jw8pHVbHAuLRKK22gdn1Eo0FuBJDlSBQr8SaN+BBr/RzBboe7wQU69ouIySJCClQQknplxOo5rCAiGU7pyUWiiYtnhNgixbYjlt0ea8YehCGm/CmE0LqkYiEnOhpw/6FkFFl0xi5xLE8W1JmWKc/1gtpswqdedPiz/3Odb7ZKjCnjGikbpQ7r3hFv9C4yzSw8O8OrGIShfl+9rhds3V9Fj0m1qsctjnXEoN+ff0+tBodbU2S/j5EFSFnXaYxUABkyjklHEtv2CcOcKMgpWjFSGJimRaUiGY1m5pQSkUv2giprRTClIlOCdb+HLVMeTpvcVZf5zzeL/K+f6/AX/2KTN98wef+thNEYks2EyURHu4JAL/Rra7C1pQnbTNi7uKjPvXJZk8PDQ/18MM3JowQjS0gMj0CYWPhIQ9EqLlGKOyy2+1wqHbJSGmOcr5HnmmTOiNNwqLd+n5NWD7OKwVkzzDzX/5+dj6fLxOt1vUnxaHddudBgY0OysTE/V+MYdndy9janBJMMZZgMco/BYD6XCKE/b9mZUFSbRJULjLOcUeqdRD6FEFCrosIQ4fufuLnozxM/LPPxaZwRnDN8ovHDkseei24/fpfPxyeMUHoMrTqjsSS79eFkZhZyn+kGPuA4/CdfJRs/1kNoYZle6SL9h2Oo1jFe/TSyUtEddtX8brpW05/7ceasx8vk01xyf7Kk02wlFzUcoSYBfjOhLvdxDx6AgiC3eb/8Cv3uRag4qNFI94eJQnJpIisVxOIqnbDEYDfGzqoU/ZiqEdJwhiy7A3wzYpJ6qMxGjaeoOGZpEbqbLd77vQ6TQUonqZBKC8wAWSxSqjszA3ZMU4/dtWt6Ufu/vyg5DC8QkFClz5XCPtKUvNG+yDS3qJpjzHqNMNbVao2GTnXNmsklCSeC8E5H/51pRGbO0Emck3aHWFmE9BxELDFEhpQCmQsEGdNuRKkC49wgDnJEsEOORbdborkoWKp4HHRtMCQqU4zzAhMR0zTGBJmDQHG9fIgtMu5NljhIF/h//xeX//WX9vjU7tcoDA3enl5loCocDiyivkd9zSfL9D5euXJcaXdfk7V+X/+2pSVN5ma6lvZezGjYJxIQoRs9SjL6VJBCEdo2/aTOTnCJ6rs1LktNoBYX9aaU/uyjI9hYz5kcjen3FOPIYpLrhkxpqsdy5txerWqyPxjo8+/oCEpZF/P+beh2qOctqk6Asf7B68442mX1ja+zfOr6jJbOcXjudY6S5olOp9OB9qRCmn4WcWQjbIumM8CSp9qYf4JFtX/e+GGZj2c4IzhnOMP3iO+0y2eSzNNLUSRR6qPLPWei349T9vk46ciV4O5oef6Cuo0KAqSU1BvypCvtd4MPVmgZx2RJsLBoUp48gEEfOTjPeDzhHftV+uYCyjDBcxFBgDUZYlRLJBevk1seg8BiEDiorqRchqWFHC9s82xpn436hG5UpBOVmCRamCHSlFWnRdez+Ob/GDK+1aITFslMm8w0dZlv3MeedEn9NVTBx/O05cALL8Dbb8Mf/dFMeGqxtpryjNin08q5HZ8jxmLRn6AqNcLcw/M0CZxO9bjN9DymqaMgg4GOTqyt6WhFkuiFudmEcBCRhimurU1NpcgxhC4RN0QOShCFOcXwIVa2TITDOPcp5EPMYIrZ3sMtLuGxQih8EsvEAg6nFRr2hJo9whQ5RTPgUqmFkUfcnazQ60v+w/8p+J/sFlcWXRaNO7zbXuHesElnRxFEkjB0WVqCO3f077t6VRO3x4nO4qI+d/1oymhnl2HuMMgiMmmRCZMUE4OYEVUEMYFhEQ592l/R53C9rj+7Wj0m0xOd9hhu93HDKrlhkdaajCrXmJoVRiMdxRFC74MQsxQd2EGPzs37JJFEehv0CysU8yHy9ghn9y2WfllSvbbyodenvX2X8/1Drpy6Pkcj2Nn22NstknSHYJm0wgqtUEcHFQo5GrK0IrngNPhOBA4/bD5OPwo4IzhnOMP3gI/q8om7ymB7wPQP7hK9uoKuRvoghNARk1JJkxnP++7bqj+pLNwyUiyZUbUmeP4UFR3gLN3AWPx4peaz3/mByfmxajDXSLha3j95zzC6zG1VYHjo654wUkGucPIRXtJmUFgiNl0ie5le2CQayJOUmOPAuXPw3LM2+X/bYuduyG1z7cSiwjVjFp0uD9sG3yh9jni/ROtOCxUWSG0fKRRFI8Y1InKZk0Q5ZqdF8/w6N140KJfhv/5XXX0zGunIxPIyvP66xze/+RL3hxmZAetLKalcYhroyE21Oi9ZHo3mqZVmU5Oc6XSeAktT/Zxp6nTVwWaKIMcwIVIS34hwjIRx6mmCIyRxLHHDFrZbIxE2ibQphh0C5XForrEaHZD4PoeZRR5nCClJs4xbo1VerG5hmxlB5lC0ppxL2ph1k3tjwTDz+c/p/4Nf5RssO0NeX3lIrRDz7sECo57gUNpkmWQ41Pvd68GFC7psOwx16moWdUkSaCwYGNaEghVRH7QYJw5Tq4ZJykT5jFURISF2K6QDiWlyosHZ39cEZdnpsPHul7AmPUqNOqWlGMIRk/Y27WmL7OXPwPnGSXp2Mplrm4JpTrg3QgUuuV9iwR4iURypRVRhETEc0f/DA0rJIvnXtnH6RRbWG/jmcS7xQ7rwlkrw7HOSa+USyX//E/LRmGF5jf18hda0gJpG5LbDYfUyra/Pr2fL0sd4fX0uij6NH0Yfpx8FnBGcM5zhe8DjEZPDsMIw8efmen4DdTDGGsy9dp5WE7In4XHSYcuMC4V5Da2aRvAdhtc/bHI2Xn35A9Vgo8TlzmiFUeKSD3rkcgpZgEdIXbXo2UtMVYGRaDAwlxm7y7hBQMGIKTddKhV4+WWdAooiHUHICi+S5W+j9g9pLBlUCwkPBjX+dHed2PLpWufgMCUJcoTl4BoxBTPCSAKiaUae5BSTIWuDezy3Z7NX/EW+eLdJu60X3YUFnaq6fh1+7/fg8FCipOTCFYgii+lIE65yWS/M7TYnlVGz1FSlosmSUnqhG4/nz8njBovCMjBJkUqRKRdLpjgyBRRSKLJMt0S2CSmJMRNKpLlJKLTWQ+Q59riFkzkULcFA1omRlGRGGgtu9Ze5VtmnoMaM21PKpRzzsgvfusu2uc4wK/Ff917jV1a+zorf5xnrDpXiNrcmG+ypl2m3nZMy93JZi7MdZ050JpP5OdHrlkjLTcq9B8TNMs5gSi0YMKZILqFCl6C8QlD2TsrNx2NN9tJUE5T2XsD96fOUagbnRZtl0Uf6HoUNF29nE+MwhVd+jaOWPPnu6fTYJ2sQMAnGpJaPUIJOVGYkEyp2gIGiUI4JexG77/bI9k0c7wrDzhhLRdScgGY1/sguvKc1cpXdPcrJLs9YFvLZNYzXXqHnltndnacpk0RXbp2u3nJdHdVbVruI3/3h83H6UcAZwTnDGb4HnI6YKMW8k7EAWyYUiwGF8AHF5RsY5z9+xOS7xdPuIfTt0m/GK5+mfxBy516DqdNAGQZkGWo6wo8HNCa3iRMJrktsFtl3zjM0G5Bl+OmIWnpEwY65ftnl2mddBDn9Bz3efz8nH09035Ful+VkC3t4yIPJFd7xrpDYBbrOMrJaJjc88jDAIcJzBRf8I4ZTg6OxhZlFrMsDrlq3cKaH/P79/4X9BzEDJ8X2TNbWdKpqaQl++7f1giUlXL48Lx92HE1GFxd1JGO2YA8Gc91KmurogutqUrO3pwlQoTAvuU4MV5dGTwcIu4AhFVLmSKEQKGSWgkrJHY+GaNFTdXIUE7PKyvQuI7PMnnuZdfZJRYEwd0iUzdQvYxMTxIrDrk3DNqktLRJev0LFTViT+0jf5zDKaYVlvrj3Ap9Pf5u14W2aWUox/iYLpYRb5VeZTH0ePtQkbTTSf2/dmpdYzxr8gSR/YZXuVycwDags2CSZixkK6uEBA3cZub5AtaJFzGGoU7JCHIuGkxRrmqDsGpPApBuX8Y2QT9e2qDmTE+LhDDtsbGjikWWaRBYKkNlTpre2GBXXiHOLRBkkuUmSGaQCDOlDNkBEEeW4hR1NGIUOIR49BJnXo7lqYxS9D+3C+1Gi2iY6andynWTzcvXhcC6Yvnc3586tEXn/BWS5SCGMWBddmt4Aue5+on2cfhRwRnDOcIbvAacjJsL3HknRwHE3Y1t83wSJT7NnxYdWSXkeg8Ur3N3xmP5ZAbH6BZTaR43G+IxZt48ImoskBxNiqwgq5sBcp2uukAsoZgOcLKAR7fLc9G1K5xs4V/939t8+ovfNe+SdLmo6xeh3WGcH88I57i29xEGhRDIK6MkFZG0B4ZdIUoltgW0KNkYPSIwiw9QnHfVoJAEX7B3WxR7tpMzvVf4PenKZae5Qygcsrtd4/XVJksB/+296YbIsTVgGA01KZvYa58/D3bt6XNJUp2qk1KRHCE1olNIpCtAL3qwSaFZNFccSq1nHPBhBGCIchTRyJDlkKRggM0WKhQEUGDHFIcUkF2CqlEwIbBI8GVFRY3pZBZWk2Ctl4qDIkayz9ExEWC1SLEqSsE/JCRHGIaancPMJ+5MSf5h/npddl2fy93FVwLPjP8MVIVsrP8nRpMThoSYSM+NF0F2LL1+eN+DrdBogILt9h34rwcxiSlaEWl/AOn8Ba6FIu63HZxblimNNIvNUkWU2cWZhk6LtKhze6p+nZk8oGFNW4vewTxEPw9BpxOVlyKqw/40+rnTBdciBYeIxzVxskZDEgogyMs0IJwlGFiG8Aq7MKeRT+mOLwX0DUS9iiEuspj5V9cHU8JNEtU9K1xqGZGVFH+sZ0hT23hvw8Jstpn4FEEwSl1vJKsPY09/lNbAfDGje71G52HjqEd0fd5wRnDOc4XvAJ7Hr8tPywjmdfgNBPy5wb7zI6Fjgqxytlq6UV7n42WsEBwMmw5TQfklHeQ7+iMgo4lhj/GTIyKjj52Ouh2+wnDyEKCKVDg8LP0N+B7JvaGuHUk2yMLpFkoy45z5H62iDvFKiJxooX0EQEgwSvCI4BU0+HMdmOq0xbcfgZjSmO1wx7mIJeEe9wPviGn2riRIOC/aIC3KLlz59nQftBl/9qo7UlEpw6ZJegGfN7BxHP7a9rcdk5uqc55r4rKzM+6fMzCE3N+fRm1kPm+FQP2aUCtjeBvLBGOIxIh0j8jqYFqZtorKEOJEUZYJNQphbGFnIxKiyNr3F1Kpy6K2zwiGZEgS4TJXLdKRYWrUYDODdBw6f+YzeJ5wyhcUFirsHqJqF6ES4scV9+xm+wWcZJT6v199FXrnIpd2bNHPJ9gs/z+07Oi10+7bukxNFOpW3uamJysqKjkw1Ptfg4FIN8+EIFcUMYhenVqBYkOT5nAC2Wsf7g/68sJ8xGMQEmU2ERaoMJrkgU4I0F3Qzm4P0UyzulmnYOsJ2evGXzQbNcz71+28j6quMMh8poKoCcqWYDqck5Q2sYIAhFF1zCSFtpFCkhkvqOUwiG681pXh9ncO4xuEt/dmFgo7OuE+4J/lOtDSmCWuVMQvGt5ArOkITZQaduESqTKLMQpkmYWKwv5dzeFyoNauSrFb1ufODtKP5YccZwTnDGb4HfFK7fD6NnhX5NKQb+vS8CzCS5HBCbopWyKXSLmFryNQ+T7dXAqeGWNDRC7IBpm0gCBCOxdr4AZ9ufxlpakX1NPfYt6+A72Nduoq6c5eFaJvqokk0DLk9WadVuUAmTQZpgXxqYxYSRrGLJwV+2Gal6rJ0qch0Ct2uJF+8RGnyDhfG36Ay3SSoNPlK/hO0siodcwlXKqrukOuFba5MvsV7t67zzrYmM42GJkrttk6hWBYnTfAOD+duzKBfL6WuBspz/byUOnozi1LMGtfNmv7NhORZBk69hAwLGNSwnBhj3yOXBpYFcRgRJR7FaETJHhJiIBQEskguLCpigCFL+GqCk4wpOUXAIMwLdLt6YUxTeOMNePVVyJVkdOFFGoMW6vAu+ThH2os8l7/Fe+IFbrufZuJf5pfkLVSjRrl1hxd/5lP4hSa3b2uyt7+vF1rX1b8XdITr/HkdmVlekSwtV9jbA3ns9t3v6+e0AamOuig1rzqzii6LlZhosEdil+glZcgV/cRliI2bjqjUPA6nJQ7f06mfUkkTq1lvN+PKZfL7W6i79yitLlP2tZHWqBXSKp6jfNkjfececdMn6WWIaETRmjKWNWJloVSErYDlZcYTHckrl3Xkbqb5EUKL3uu1HPX+u6Rf/H3yMEKuLiM999tqaT5oy5Kx6vVPnlfTKak3JVyWjMz5uXN6H2b7USzq88n3v4cihB+zSq4zgnOGM3yP+H64B383+G56Vsx6gXQ6kPfLJKKJiLTGaMEeseG1iZRNOyrTmhRQQmLZNkodRyiOuwRTK5GvLLLaegMnGSOKDkpZdLMqXaMJ5GCamFcuc+FZH/Hmn5CPRoSHGX8mf4o8TRmkRcrmlEV5xL3kMt5wgpcPqKRd1sJNxP1rdOWniJwqUsK5F4qcv1Qi+7LgXus895IbtKxlJk6VkpGwUJjyfGWHFXb5yuBV7m6WCdJ5k7tWa95krlzWi/LMgDLP9eMzMrO4qEnRgwf6Oc/ThOjWrbmFg+vqyMN4rBcuw5ibPOa5BM/FqLoYXciPOyPHhSKJKmEEhzRHd+nZLxBLl1xljI0K5egB3vQ+ofRYMtvkbp04VcSGIMv0d0THzuB378LFCznCMGlvvMRy+sfkBy3tum5U+bT3Ld4xXmOIwf9v/9P80sI3IOlhJCHPPafJ2sOHcO+eXmxv3pyn4EolrUeyLLh4UY/J2pr+3ocP9UIMmgj5vn6dEHo8dJM/ySBZIQseYgcdHGOMmoxoZwskGGSGRTK24V7Mwqp70mV7fx/qooOzdZPl/nuYQYA6bo8sKhVkpUz58hr111+ALGf4zkPa9evYTobqDlgfvsuQCkOzQe55TGSFo6iGGOjjNetdNJnoY+W60L7T4ej2HdJbtzGmBgt2SindxlxbQZTLT6zGOrkGP0aE17p4gcLlGguneMYsDdrvz6vxRiO9nXz2cePDavXJlVuP48exkuuM4JzhDE8BP4xdPmfI87mz9unmgqJSYnHNorT7NkH1IgdhA1QJ0P1AstEEZ6WOKJdA6MV15hdUr0vy0jniXp14c8hBtMS0+oxe3cIIVwasn8/wf+Ma+d5Dxg8P2PKeQXk+MUWMbMT6+H32/GtkMqeQdvCMlPPWQ6SRE1pFBl0LEd5n8dXzXD0XYr35NaKdQ74xfo5WyaYtVjBsi7Kbc77Y4tnyHpZI+P37z7Ijz5NkBhcu6HTEjNzMutlWq3pc+n29y5Y1N3S0bb2YDwazyqmctXpIU455t11DYGDbEsvSRCBNNekwzTnBUUqP18xOQkr93NSyyBvLiOER1mAH4UAuTZx0wsSqocJ7yMGA4cIay8uS4njMpLCAs2jQauv9PX9e62XauwHVnduUxrsQhGxnPudED1ldxqxWmRglPmfc4c3eBcaxw2/vvcTP23+Ae6wXW1ubN368eVN/9t6eHhvH0Wm3hQW4fSunafWoWlOE53L+XINcSe7f1yQJ9FgVCvNzq9mEcrnMpLJK+6v3yA4PkFnCkhyRGQZTd4FJUic/6tFSVUxf9yCqmkP27zxExRbt4osUmzHV0gGN1vs6mvrTn8N88QWElORHLVxHsSF3EUseadNATq/gpykrliRXIaNhRG8NOLbc6HZ1GvKkk3Knw+Rr75AOp9hpDqUqh6LG4Tjm/J2HeFc3NMn5kGqs7zbCa5of9N2K4znpyXXBHYPBvPEh6P2uVvW4n3ZM/057df2o4IzgnOEMTwk/TF0+81wvzo+baM7usnWqRbI9fInWEajtEFGKELaJk00I+gmG56KuXEcIeeK1dNrgMm2u8eAzf4XUfotsexumU2ppm4Y3wXruWaxf+jmy5gq3/+smsXWNrrtBarg01RG71jI5ApklRJnJ1ekbWLUCubQ5jBeg6FBY8Dg3eY+lB++ibkccDmzeM3+Sjlml63jY0RA/6HHda3G9MmI8kfz+4TO01AK5V+TyZcnysjbhnE7n/WxmRo+zkl/b1nfOYajH58oVHb15912QWYQdjrE377J9C9LU0B49rk2p5J6kpuJYpxgsSxMeIeYdqGel07Obe2WaqDgitxwsS5CRYuU5oSjSL6yzEGxTnz4kGS9TL0CyUaYdSnw/JxhnHG6n1N2Azl7EzbzKS+lNClGHIHXYSle5eOdP4QULUXUIMouXavd5d7DOdCr5ovkFftluMKv383147jm9j4eHekzGY3jvPR3NGe30kA/uEveP2M0yLngHOOuLmK+/ypUraySJjgDNiM7MlyuK9P/rpRhzeJMoDekUL6IME4McK+1QiXokTpHe0CAyHG0R0RujwgrNcsg4mDBsZewnNWrZZdzxhJX/7x9Qr9cwz218IHpiSgFFXeWolIKdPaoXL7D4qjaaDUNdCTWd6utjOMxpvdEin3gUixZydIgyTCLh4XgBapSQ7e5jlErgOB9ZjfU0Iry2Pe8EPUMYzj3KZlHUbldvJ98vcwpfv0lhGOBsPNqr66OiTz8KOCM4ZzjDjwlmZbazyXAGIXQ6plLRC+3Bgd4AKC9gvPoyzr13iY/6JGNBYBjIpUWM61dYv9GgVHr0ewYD/X6lQNQbWL/wec7ZHQqDvROfrqzWZPOBJNsZcDSwiMtXqQZ7tL3zdMQyliUYqxoXRm/jxX0cVxCLAmmYgW2wUk+4unAf4Wck793i7dJP06lcohOWiHMTz8mpmVOudL7Opb3b7GUv85X0M/RlHTyf689ZLC3pNMsscrO0pEnHwoJOhUTR3Feq09HPLS1pHcjhIXQPE9R4woZ8wIVGj9/tv4qQBl4ywOmlVC9WCcMaQcAjruCzMZ+RQdOcC2ilBMKIdByh/AJ1o0+LJkJYRJlNkBWxVUQSKrrOMiuvXmSpWGbyzhg1HBKObUIy4vgASzkIqUXWn7LfpOROGFll7ufPcPmdP6DzqV9EFZaJQsGz8Zvcda4RlFb4nf9b8tM/Pa8IEkL3wWk2dXTg1i09Ztt3A6q9fVbSDpPiIlU3ZCsp4m8estb+HfiVX8ZaX3ukWeDMNTwMwbZyxl+/RT6dUi/luFaLDIMeNeqyzTB0mBgGq9kDaBbo9g2mYUqKy1EnhtCmFI+pqD5du4FylugOhtT+r7ep/oLL+qea31H0xHV1c8nZtXJ0u087vAdVjzC3aZlrkBssGS0Q8LDwLGKa0BhY1K3RR9o3/HlFeF13XlkG+poLAk16Zo7paW9Eey+i4z2HGDkg4HzhCFtmH9kL6EcBZwTnDGf4EcasP8fpMDboRWsm2FRKE5+bjxluuq5eDwY0iGufQ+3tY0ymVJo2y6+uY9rz6UMpTWpOf49l6QXDsiToziEkCWzeA9XXd5nTQ6hmA4aVJqQW1WCfh9411uU2F0QLP91ngkdkFBBpyrlCh2srOX5Vx9+Hkc3X+WnGxnmmUQmFwFEB5+LbXBq/TT3e4ZbxLG8mP8XYbWAVHK5ckayuao3KdKrNGht+gBqlrKxBt1ei15Mn0ZZOR5OQGRGMYzg6yrHjAVKFeFWXnqiRZCZCAqaLmx5R2G8xLr1KnmujzzSdp6VmNhww/+yZgJcsI1UmieWzxAEjKiBzbKGYGE3G1gbOpMPCxTKy0cDqdPD2HhIEBRq2QSup0xUNNibvceScwzYUe2KNNfYpuWNG1Sr3hi9w5eHXYeXTKFkhW1rh2esLbE98jo7gy1/WDRevXJkfz3p9bhmys52z9+aAYWAz9G+wJrskKkNZNtQ3uN3usvEnb1P9X3RUwHXnzQK3t4/1LoMRUWeEYRiMzTICQYMWq+xSEGNaVoN+NCS2FxjlKc1ySpI/ZByZjLMCsfRI/BWsKIUsx01G5JbJQVDh6M+69EQDv7BG/XO/QunmV1B7Hz96Yhiw6E+osImorzDKfJzuGGPUZ83v0BNNhrKEyhXdwKPblcilqxSnDZaCJ2tivh8R3lkF1mlz3PTBkK7aY+RfJNDOILo8X2b6BR8RffphxxnBOcMZfkjwcSsg0lRHGE4LEkFP2isr87vofl/fjZ+O5hiGXsg6HX2XHYZahyA2b7Pafw87nSAeWmQHq4jXXyVfWmN7e15hBJo0raw8WukxS1XM2v1PJjoioMaS1HSpOxOOGleojh/w3PSbuNmEyPCZNM/BaEx1tcDKUg7+PMS+OVpiq+fRMyCXBTwzxs1GrAze4WLwPo4nec/9HG/mn2Kau7jRkGeecVnaqHD79jG5iSdUx9vED6ZsiF26d8vsmJdxlqoUa0V2dvQYSJnz7IWAq5Uh7z6s0G9bEAsuF/o8U97ly61nAYUrEyyZUy3G0GnjpmPGWZk01WRxNiaGoYnCTBh8OoKjpElumKRpjmeF1GmR4jASkAuTvmxwXdwnKPv0ujml23c4l+8RlW8wzWzcLCIC2tYqi8F9Os4aUZ4R5CnKc3GdnNDz2a68yNVfeo4jZ4OhqJJmknMNvThvbcE3v6nPoZdemp9/Rq/DVS/EdXNK+R3uupdJge1pg4ljc7HQZpy6+KUi2w8Djr4x4uorlZPfVyjoZoHDIey0YiwV65RcrhCGYEiFi9wHYFG2WAgGjGvP0F+FNMnZjwIKiaKW7DExioRGidAsoBAYUYrKTKTr408O6O6u0/ULHNnL1K//Oua5EWuVMX7V/ljRk1kFlIgiKr6ktBaQ39lGDWMW/YhmnqOSkMkooFO+jLh2hTCUbG0dv/9Ej/aDLfWWvkvJTSiLfUTpCczrR9g89IzgnOEMPwT4dhUQSaJJzXj86PtME5aXcvxAE6Og63F7t06uTlV6HE/EYagXtJmYVgioZwcU3/nvqLEWJ+KWIYwY3TviYOc95CsusqF7/Cwvz6tnZjhNbIZD/fmLi3pRTxKorxc4uLsI/T1WmhNEpUkpMxhEqwgpkaMxF9wDLDMGfw0hBHFu8M3OZV1SHOe4Rh9hxJStmKXRe1ycfousXOMN8SLvqueJpU3RjLnKJgsdmzv56wSBxIgnlFt3yKKMc7URobHI3rCJmIzw9w9pxc/S6/lYMmYhP6Jy9y6bNy0OsjV8YaJUjOUK+kmJSepgGTmOkWCJDNcGxhlGFp8IjGe/eVZt5jiaZJ2O4EgJmWUjfI98OEIZObZMqdAjYoOhKhMlgsnCRYzVFQq9EXmnC6UaXpYSpLDEPtvZAqH0SKVDMR2QmB5RYuCMBuSeiTAtEulxqJZYe66OPJgLqptNvW83b2rzzX4ffubqo+ffYhLjtwLcDcVDcZFe7BNlFrthjTW3RyYsojTBDGNu39af2TjVCqpchmeuw/7Xp7RjG8II5bkUxZyVqzRFpAm19SLLr5UYP2yT5/vkcUhfVbCsnGZyj57ZQClJJW0ztJsIQzCOPMw0wXN1JPHgUAIVxrJCIYdSos/XmQ7qSXhcwyPLJbh6hXx3j3wwhMkEUS5TfXaV5meex1hvEMf6+hmP55HR2fVUKOhz/7T49/uBT2Kvru8XzgjOGc7wCceHVUAE93Zp7xnEL81JBsyN/3xfv3f6O99i82FKGBtgGshGHXntKgtXG9i2JkazSRj0+1ZXtTgx/i9fIRvPOxl3oiLdtISqKlS7C3c2ufhSDc9/9G74NLGZOacvLurFPAx1qmMwgPFEsvTiMtnX92j0btPxLzC0qggjZWl6j1JdYFx5jeyNt8g373G08Bw3w4sMIo8syii5CYbjUg82WbEnLA6/ReDXeVd8mlvqOokyqZpDrtTHVFXErb06ITFmwaY03kbFCetLCYny2Z82yKWFV87pDkv0DkIs10CNJzTkHuvNAW9Mn2Ey0aVRF6c3uVHZ5Xb0KVQucY2IJLeoOCElBqSGQ5RrCw/H0eQlSfQmhD5O8GgDO9uGSWJgnltD3upi9LsUiyaRUUSQINKIqVUjuHGJ9ZpBrxMzTGwKlsVzhYfcHq0SjcdU0pyetUTLPcfG9CahWcExMqLEgWlIvRIy8JoMEw+/B0uLOXm3T7+bkZs2pUqJGzckb70Fh1tTfvtrXX5WbGEs1BB5jup08UaHXHj4JeTFX2ChWGNrukiWSe5PFliVB7hmSoyDiz6/2m1dUn7iXl+v0WhIKu3btFWJXlCl75TpyypLyQ7F7iGyXsf6/E8hpMQXEVeK+8TxiJ1JgzTv0shbuNkIMgVSYNNG5TaB5ZEYFmE4jy7W6zpy2O/r87DX08dgaemDxByeXAElCj7y3Dpi7wCxvob1iz+H8cJzJ9GgWYUdzKucDg/nvW3u3z/+7FNi/m8X3flee9d8Unt1fT9wRnDOcIZPMB63S8iUwV5YJ0qtE5Ihb2/i/kyNlVV5kvvPMth544jeH76HChSiVEGULAr5iEr7TdrjLm0+c0KMhNAT8yx9BZAfPWokOkw8upFWFHtmwsrSADnawRlfAl9rC04Tm9liMrtrnU71hB4EWn9Tr+vvWVxs0DZeovt+CdXp4k9HrDpdZLMICrL33ieZRrw9vUpvUmLgCCwzxnENXJnTDB+y3voq/k6XHlXea77CoVpF5CmL9LhYHVNxct7vX2CaKhyRUrEzGA9YqITkuAwTjyC1KVghUikmokgUS0QacU3d5frakJujq7SiCkUzQFgCI4TebsDW4gKmTDFFjpQpTbeHGIzIm2tMReHE7kBKHaWZLbizhX4WRZj10xmPQdWXkC/kRHcnOP0WPVkCM8f1wV7dYFhewHVB2DaWmUOcMDSKLIoWe1FK0wwJU59Y2AzMBSp5n7ZcZCW7Q46gbyywviFplUvsv9dB7X6V2uEDoqDCSFYR9Trutau8/FKNb31xSB45/K7zi3z+4X/FGvZQWQq5QrbbXLT/B4PnPkvBjLk7XiJXcDRycRsLrCwUUUqfD1Lqc6NQgBV2yb72de011u/TiFvUTYPD9AITWeYgqyAXfoLzv3qDwrkNfY56LrJSxqmUuXDrNmo4RpgGNXFA5JXY8Z/DAkQ0xFsrIS95xMm8oqjT0efhzILi8FA/PhjMtUXr649GWD60AurZZ75tBZQQmjjNyFMUaT3czNB0JuafNfFrNk+Rv2M8rd41n9ReXX/eOCM4ZzjDJxiPu5X34wJRqm/9XSNlcXGCPf4mTukSwm3S6eg7ZZXnpN+6hwpC7MUKq16PSebSCStMai+cEKP6F2osLcsn3kWeNhIFcGRC0x1QsaYIASqzyXsJKggfITYzArO4qO9SRyPdT0UIPcE3m3NvItvWj1FdwPzJOhfLHcwkJB/0Sf/0q6jxhFF5jTern2fiSqZTRUlMyRtL+OMjluJdrqyOkM2L7Nxb5f3gEt1kgRxJ0+6yXuhTMhJudi8zSU1sI6RcFdgipph3kXYZVEY3KlKwAurWmAeTRQaqjM2ERt6hVBWAYJrYxLkF2FwsHvCSvcPDbZdgmiNtA1MorDzE6HcRvot99Twk87tiz9OLW3as7ZyJjWcEJ8vmXWqjCIorK6SXfw1/uo+5K9ioSfbTRSZT4yQlaTVKOAtlosMetmOzbByQpAkH3kUW6HCgVkiEjZ1MmBoVjryLNOMdhF/gcP01ztkH3P/6u+wEKRcWa6w0DeRQMThqkQ5HGM9c40X1Pu/Lq0xHIb+Xfp6fcb6Eb6Yo04SjFmpnl1rjJqXVDUQc0u0b7BtrGEtNdvfkSSooTTXRGz3s0P/6eyzFLcpLi1Crkj/chnaH5eABLC1ysPYZkuc+xUG9yeGtYzuO06mWF2+gbt5CRTHC93BsmyvDN8ktl1b1J4iuX0GaEs/U0cgs02Q7ijTBnAm+63V9vu7t6XGfTOYdqBcXNSl7WhVQjqM7Y8O8q3O7/cEmfrNqPX+wS/rbT693zQ9zr67vFmcE5wxn+ATjcZJRNAMq1hTruAJCZZJh1+bBbRCnel8wGrE0vElp2UT4MbkSdCY6+mLJnJUZMTIuIcSTKzs+2GY+xTHS+QuiiNT0eHhYhkAvHu22XhhmHk2zpmMHB/MeHpalF52ZGBOOK7pKoNqQxznZG2+TDcdsVl/nIKzSj4qYZka5EiHGivrhOzTdEecvCYRwuDM6z92lRXo9RR4lNGixxhGFoyPeN28wNSKsfEip4ePXPYpkSEtBknCQL7Hi9UAptqaLDBMPTwRkSOr0ubFwyM50iVHmUpRT3ZhPQFpbZHewiGUJvHzAOCmwYI7xlisY1y8jGw3Ujl5QPU9vcZSTTyPyMEeOU5QqYRh6gZn5W0k570YcRgYrV9YpeFCsgPMQRuO50/mNG5K7167C4G1Uu0vbrSBFm0Lax89yJmaZ0CvRY4Vi0iaWPiE+rm2CynnwPx6yktxnf+F5HuYNLokDlqshwinT3w/JN+9hpgHPJt/gTnaJyCrwDfFTvMrX8AuCfEnCwSH5zi6mlFyxj9g9d4Pa+Sp3O2WcSJOHUuk4kqFyjM2bpEHI4cKzHGWCS5UDzBtl1GSK2tlDXjzP5f/tZ1HC5MEDHRV88ACEkJx76TVku0M+GiMuXiRvt1H9AQxH4DjYN57jwi+9grHeoNvV5HlWkl+v63NvNNLpKdBRHdfV++f7+v9pqp/vdjXZWF2FUunpVkAJcezldZxZDgK9r0Ggydjebk76Z9vkvSaV5XMsOCNMmX/PvWt+mHp1PQ38wAjOgwcP+Mf/+B/zpS99iYODA1ZXV/mrf/Wv8vf+3t/DfjxOdwo/+7M/yx/+4R8+8thf/+t/nX/1r/7Vn/cun+EM33c8TjLcY4IRZhb7QY0kSFEUsSwbIXR0pFaD/OGQKB2Cq5uZCBRrfgff1PkRlUnyfvKRpaEfJU6MM8n9wypycZHcKdHe09+9uqoX3plvTq+nF4/lZT2pX7igF7wZufF9fVeb7+4S/95x+Hw0ZrLX5e3az5MIj9Q08M0IhcA2cha8PssPvkL92VWEqLMfVLkzXKEf+0grpBk8ZKV3E99NuVX+DFNRxErGFLIxThzhxX3c1SrpQp3WXopTicmQHIQ1otQgyQ1UprhaPOCcarE/LNPLdRveSe5xwTvk0/a7HLR8uixgNBsYXgMnUJy7VsZZLJIreWLKKaVeYK1RB/PWDnHLI88h694hdy8jVlYRonwiPp5pdUxzHtUpl/W4JomOMLiuTq30emAsNPBeu0F2e5Ok02Y9vYuK19mvPsuCHXE4tYicAp4dY2Ux/dJ5vGiA+2d/Sh7ldBsXWfKGHAUV7o2WuVreZ8kbQt2h3xmikgx31OGql7ItLqGAB1xglT2qVkJeqyKaC9g//3nk2iqXFxpMphJ757ijcntO2hbsEVlL54QUAqG0v1nVniKKBdS5NVS/D90exmKTy5cfTXtuRauIG/8T67t/itzfQZbLUCwiGw3MV196RA9Tr+ttOtWl6TMvMN+fPz7zHgN9rEwjp2aOiccJ+20XfI8g0GXunqfP75l26mnC83SUarYfrTt9jrpdRLHIKHEYJT4XiodYMv+R713zNPEDIzg3b94kz3P+9b/+11y5coV33nmH3/qt32IymfDP/tk/+8j3/tZv/Rb/6B/9o5P/+6eL/s9whh8hnCYZubPOUVRjmupojkKhRmOqGyXWXilhnLqaHydGQnBCboCPVRr6JHFiYvlsDerkwzGpW6RXf5ZFqfvKTCZ6gS6V5v+u1fRnra3pO+PTIsuZ4PRxEfW2Wmez4CISgdvdY2HR5VCuY8mUZbfPeXMbEbdRcp1parM5WmYcGsh4RDPcYWV8CzfpcrPwWYLUwWGEb2W4CxWcZITz8A7u5ddorV8naT+gPDwi8GqEqUEnKePmExr2kMK1ddbDLe5vTtm16kilqDBA9DrI4Xt0sufJSynWoEdiVHEqPm4TEGBbOiUi5fGCOOyT3vom1jRH2ldQwsL0HVS3jwgCRPFZlHIf0erM/maZJjszU9BWS9/pz4SyGxuwoxoYn62R90f0ulfw3nufRjQhng4YZxk9axkzTVk39xhXlmg7DZa776EmU8Jmg1xNaLgj0nweDVgsTcn7KUOzQtiWFP0J59liRBGDlLZaIIoSFmsW4th6WwUhtDsUFhpcuyZPzCrv3NHnxW4HSolFsWIigJIV4JvR/KR7Qk8Wy9I9dKJIR3JUbYHt6q9iXx6yUR1hFD461eL7+v1pyknZvy7912OXZZosdh8OUXt7ROM+bjbBs0yKDZdRfJ2OXcVx5uf1LPry51H+LSUsuBNKbCIbKwS5yzD1UJz6sh/h3jVPEz8wgvOFL3yBL3zhCyf/v3TpErdu3eJf/st/+W0Jju/7LM9aN57hDD/CUEi6V36C9raP2goRJcDK8PIJzfFdnLqH9fmXMcxHJ/enVRo6EycGf/Yt7m5JVJqRGjHt8jWWnm+ytlQmCPSkXyjodEySzH2HZr5O9+/P++2cLic/LaLOV9d5c3CBYWoijBYN0UXEEa3BeaqLI6qqx4X8IUzHZIbBIPZ5q32F7tSB6YTFZI8VdnDVlJuVnyQwyyyON4m8KpZQmJMpxUKO33vIcPcZhqLGyms58Z2MewcVxpmkwIjM9mk84/Hqz5d5+OZP0Nm6RzYNmIgS58Itnh/9ETvWOjvFZ3BsQTnYp7OvWLqaI4RWaSeJJiGgq9Gie7s40xRnoYw5FmS5wPJMRLlEPpwg1JDctzFNeRK5ma3Xp5snziqBZiRnOJwLU5WSiEqFtFLh0nNFbv32Jp1eRIMWqXSInAoD36HkZYS5oO+vUe6/C+MpbVlmo9DGtZP5l0URS94YeflZugeHjKeKmtsGQ5HnOWmUMnKaRM4qG4dvEv/ul0Aajwhhr19fo9XS+7i9DdPYIqdGGiiqXsIo8RilHpeKBxhCfSTxdhxNVKZTePhQEntV7kZVihasfQyiYZo6gjgr3+5255YRXtjh8u4f0xsZ9N01RkYZkaaIgyMYvMfiK9eh0uDwUJPO8VhHf4TQJOlp32OfvkHxffnozQn8SPeueZr4RGlwBoMB9VlpxUfgP/yH/8C///f/nuXlZX7913+df/AP/sFHRnGiKCKK5ncJw+HwqezvGc7w54VeT+fklQLsJYxXX0Zs3map/z5uNNZ+R5c2PrQC4mmVhiYJ3JusoZ5fQayOODqE5iKcq5eIYkmW6fD67C54Jhz2PD3xb2/Pq1hcV4fhT9/1zkTUw/I52pNFRomHZSeUnSHhRDJy1mkOH7AY96mHe2RpCtOAQ3uDO+01eiUfognLyTYb9gHECTfdTzE1KqxEDyjnXdI0Yuo0KY8PcIOQobdA6xBWr0OaNrhbrpESoyaCDMnVZwxW1vRvyysNDus+0KXSP4AwomgGHJSrhOYCpgWxKbDGAcvDfQQvA3PRthDANCAdTEmLFRwBUigEClNkSAHKc2EaIJwMw5An/XIsSxPGWfXZrOv0zPxzVnG1u6uJT7erHwsCOJRr1F8TJEdfZVpYpIJFK/PBzonzFAEEdhnPLlIaHRCUi2xPFrhcOkAK9QgJXv8LN2B7l+7dDr2kSCPcomuuYBRdUq+E2t/lrnWdqw2J9N0PCGGb62sn5dCTusudvWXy4R6BqNJ0hlgy595ombozpNrZPSHeH1Ye7fu6WeBopH/7eKwbVlarc8uCj4IQcz3YeAw7Ozn57TuMxwKnWeWq3WIYFximLkNrFTUck723T+kzNcplSaEwH3fLmqcTi0WtJfuo3jofFz/OvWueJj4xBGdzc5N/8S/+xbeN3vyVv/JXOH/+PKurq7z11lv87b/9t7l16xb/+T//5w99zz/5J/+Ef/gP/+HT3uUznOGpYjLRIfTHfaJWVqD8TAP1k59Bta987AqI76U09LTuIc/h6EjSaFRYq+tQf5ppvjQrBy8U5j09Ll3Si/Lt2/PfcOHCo+W3s8UrvfuAO/1FWGogBVwt7TFKPI5K58niHsuTTTZ6b+L4hrYBGI14YF1jy3qW/tTDivZpZgecs/ZZSo/4kvo8U8NjObpH1RyDKQlyl7rqYHomk7HFoWyw2IA0zdm/F6ImOd2Bi1uQLDUltg3PP68Xzb09kAWPSbTAerjNjUaHjvfTHEYXceOMghES5A6WA95gDzW8irdUOUmBZBnILMHJpiSyjFIBC86AIHMxZ63yTQNDZQiVn3Q4nkV/QP/70iWdVpmNdRTNXxMEeny73fljkwlcXbfo+Dap6bDgBPhZh8OogiGm1O0RvYlDp3Ae1+1S7j5gVFjlrmpyxXrwCAmWpsnar76E+i9fYdBT9L0bLBcnHKUNeLiNQiLX17iX+1wWB8gnCGFtW3L9OmxtSdzXPe7+SYFkEHHk+9S9ACeb0m4ndP1nufrKc+R7+9+2PLpU0kSn19MVZTOn7cdduD8KxSJcq3cYj95mt3KJDMEwLnCpdIBAcRhW6QqLdh/i7QBRKJzopGZeXb2ePjblsiZdUmryNEvPfjf4ce5d8zTx1AnO3/k7f4d/+k//6Ue+5v333+eZZ545+f/u7i5f+MIX+Mt/+S/zW7/1Wx/53r/21/7ayb9v3LjBysoKv/ALv8Ddu3e5fPnyE9/zd//u3+Vv/s2/efL/4XDIxqxe7wxn+AEijvWd4KkA4yMVFqejHd9NBcR3Whp6mtjMQvm1mr4zzvN5qS9oEuO683SU9p2avx/0RP94UHbW22O83WV3WCHr9jDimPKKR08tcxhWKPkBRUtx7t23kckE8gJqOOBW8TUOSlcZmE28oENzss256XvUvSnvFj8DtsNysEUzPSA1fJQQVNIWqBIpFgOjiMcYOeoT3LpH96hEPyviiyFZ4lJeMvn0p8scHemx6Hb1724UE0RX0lix2RxXmE5dLJlStgJGoU/VGWPGEWkc4zh60XMcvfD5JQPDShinkFkSS2TkMsEUOQggzTCEQpg68lMo6PfPxjcIdApEynkn6Ciae2T1+zqi4Lr6+yoVHU3bTxosrZuo+3t0zfMkuUnNnJAog2HqUYoOCRZWaa0+h+y9j9PdJxybPLRKnL9YeYQEG+trrP/Pn0H+4W16+wFHA5dlNtmTHsbaIlbBIVOwNWlysXj0RCHsjOiOGg2EgP672+y0HVQQYhkei8s+8uoV7u5A+Y0/oB7ufqzy6FpNb60WJ20S2m1NQGYu5h8FFYSYScDFxS4IySDxkSh9c+H1WbYzhuGQdu0iI7tAp6PfVyzqiFmxqK/V6LhibOYef3ioj8/6uj423yl+XHvXPE08dYLzt/7W3+I3f/M3P/I1ly5dOvn33t4eP/dzP8dnP/tZ/s2/+Tff8fd95jOfAXQE6MMIjuM4ON/v/thnOMOHIMt0dcnjXlHl8tzR+mni4xCjx4lNp6MXh8XFY1dwoSfuWTWMYcx1B7O71Z0dHTkAHYW4ePGDIsyZoHi7XyQsXIAlk4Voh84gYy9zGNRdFssDGu6YZrpHikKsraKE4P30OoeFS4xFhRIjFp0OG8kDyqLH+6XP029epyoCFkcHhJaLSFOuRO8yEFXCdIV22iSySzRlC958i3v5dRJpIy2TJIeL8SaVrYji6BJ3p2snvVGCAJoLgmcKd+iPfBIlNTnBoJ8UsUhZlkdERgHTtgmC+TgA+AsFxEKR/n5E4kpyJKbIMEQGSukFtlBDWgZpOl8MBwOd5gtDrbWBeUXW7HjMUlJ7e9oY886d+WvHY8mVzz3P0eF7OMMjyiWLgaoyiV2MeErslmF1TffRWX8dMx9hxTGZZTO5UqLeeJQEG+trrP9vK5i3+vTaGa3RmPV3fof9ykUyJfHMkKp1KvT0IULYUgmu/0SDO40a5d6Iu/dXwTM4Krk6QvhnX6U7sukvvMIF9wjrY5ZHN5s6cjMzfd3f19v6+qMNLB/Hab2L8HVF1yOIIkpOwsJ1yOv6hmQwmFsySKkJzszrTUp9DI6ONFmdda6uVPT1/Z0EXX4ce9c8TTx1gtNsNmk2P95d5u7uLj/3cz/HK6+8wr/9t/8W+V0ctDfeeAOAlVm88Axn+ARCKX1X2e0+moJyHF1h9BGdEf5ccZrYAHQ6OSVG1FUMExuzUSLN5In2I8/nqaZiEVZXFYfthG++CYYUuLbJhQviiXesKs+Z/tm3uN9tIhbqCASrXpfdpefo5ilxlLE8eJ+NRY9CPiLf2UMYJnJthc5OwNBbIhUOVXos0Oa8uIdHn/cXfoZuVsY3Ys65HQZOFYHi0uhbEMdIN2cgqvT9VVb8IVkn5bZ5EbNo0wnL2CJlyR9hWkWejb7KvT8wiF5cYTDQOpxGA0TBo3muyK3bksNCBd+MEChaYRmZJ/j9HdLmGqJUJI4fi8hJiXHtKl7vDml3ROYXsW0wwzFqMEQ4HmazhggkSTK3dQiC+WI4HGpiU6noY+W6c43OrImelHohVWquyWlba1z4VcnWl7fYPTRxVZ+mVBwVL2Ceq7N4qUyrBWEo8VYrNJua3B61oFD8oG+SkJKVZ+vIQ+g+sNmT5zgnd9hR5whSF89IOOESHyGENQx45hnJ4WEFsza/NnbuTLCOEpoVjxzBg/ES64U2npF8rPLoWUp3eflY1DzVxFsIHWF8osv3d6B3MeW8nPt0amxmdVKp6O8oFPQNS5bp55JEnxODwam0c/mD+zK7Th4nNPKsFPy7wg9Mg7O7u8vP/uzPcv78ef7ZP/tntE6Z4cwqpHZ3d/mFX/gF/t2/+3e8/vrr3L17l//4H/8jv/Irv0Kj0eCtt97ib/yNv8HP/MzP8OKLL/6gfsoZfszwnXjDDAb6jvI0qZFSk5pZaucHgceJTb8PXtSlcu82eaeLkU2JDJ+8UUdcvQq1BoYx79Ny+TL0xyFf+uOISRiTKyiVU1bLJlFWwuWDi9rRnT6HDySi5FG2QlwjYXfa4IAqxcUejfYW57tfx9pZRpWKyIvnwbE4zBbZFQapdDjPfZY5wGOKnU1533mJXuUSbm+X84O3GFoXwXW42PkawrJIFlZoLb1G2zrHcnGMPJzSFg2U5zFMCjgyQQpF2Qp5trKLShtER3327wdAgTjWEaoXX5Rko9fg4bsM++B6UGJMPhJUgl388D4Dw6f4xh8yPP8pYC7+jCIoNhq4n5JEm9tkgxFGMET6A2T9BqysYxtFRKiPy2yMo2hOcIJAL9ppqqND5bJevEejeUqr3daL+NbWvLdLvw/Lz6xgpkustMeoJKY99ig6HkpIRiNN4Ho9He2QUvd62dvTlW/Xrj054rC0BCov0W7UeXiYceHcAVvTJTpDmwSDRW+I6vYwLl38SCHsaR+oSgU231S4icNO2uCie0iUORgin7/hY5ZHzwhNlunxiGP993R7gpPXfpd6l1lqLI7h4UN9XNptfc3btiaZpqnPn9kx3N3V5HRWJff4Dc7TsmY4g8YPjOB88YtfZHNzk83NTdbX1x95Th3PukmScOvWLabHV6tt2/zu7/4u//yf/3MmkwkbGxv8pb/0l/j7f//vf9/3/ww/nvg4E9BsIktPNf2dVW58L8LDp4HHic1wqO84y1mH7I1v4oQ9wmKT2C4j4oT8qIUYjnQV10KD8+f1BH1rM+L21oQ0zfFcydJaglKK7ihgEsVcW29QKWiSk2WwuQnZUQZpxkZ1gGdlbI0bHIRVmu6AshWzuihRD5axfv5nMS5dgHqNO//xKxw8iJiaiuV8l0vGA0wylFK8n1ylU1zH8+Gi6DGoXYHxhIvmNoZQ5IbFzupPcGSfY0H0cHpHTIwih+55MmwKZkiaS5bcIUUrYNEdcmewTpCGjIYKvLl+qNmEO/1Vps96FO+0iHsTWhOQTFl2e0zPvYyQLubOPZJWjv/MqyRe9cRcE8BbrhE6FcR4iuMFFJ55FeNmAzKJEc6Pzyz9lGXzFN90qonA0ZFOi8xSmbOKqpkmZxY8D8N5FKfVgsuXJXcpc3AAzfNz7dcs7WiaepHt9+e/t9XSKa/r1598Li2vSNQrF2n//ogHdyLOxV/ifnKObpoRpX3WCn3kz/30t02nzMq/79+HZ65mbO8Iokhwj2UKcsKqE4MEyzS+4/Jow9AC7TTV532e679S6sdnerLvRe9i2zo9ONOsdTqaMB4c6Ofrdf0bZ5qdmQnnZKLng1m0rxIfUv3T30GNx4i6NjVlMiV9/zZ5q439q184IznfIX5gBOc3f/M3v61W58KFCydkB2BjY+MDXYzPcIbvFz7M1Tu7v0Xc6tN+5VeIio+Gkms1vVj8oFPmjxOb8VhPuuUyoHLc+28zDEPCxhoCgRA5ueMgHBvV7rKw/U2aP/kLTKaSmzcV20chaZqzcS7HcXO0WtagXJAMJzG7rRFl32E0EuztKcI4RRhw0XmIFVc4zJYJc5sVr8ey16dsBeSTCCxTf6eCO5uSg9oLRIf3WB6/xeXgbWSlhMpzbiZX6TirOCWHK8l79Jafw/zMq1ysdLHSG+SDPre+MWHYiikMdjFlQNpY4K58Eau1T5wJLJlRsSKEgBvVh0xSBxWnHOUL5MIkCnV048aNeXl2O6nhXCtTvvkNHuYlpOtSaE7Ict24buxfYnKQYW9vIp5/GdOcl4wbBuS5RHlFzKUixhLI24/2vElTvWDOyItt60X4dFXVbFGepalmuqkk0e9fXDz29zrGjPjMnLPDUEd+lpf1c5alF+HDQ62rShK9AM9Ey9vbcw+lx7Hy4hJ5Z432F4+4P13iUvYuD/znCQorbNmrXPrGG8ilpScuzHme0x4EREmKY5mcP+9y6DsEjYTp/j22squMMDkYX+WKu6mb3/U7uFcvf8fl0aapo1FxrIlUnmvSbdta+Pw0PKdOl5/PuicHASeiZM/TJNVx5iacaaqJkCFzos0HtHpNROEcSw/fwR8eQp6BNMg6HRLbQf6//sqZ/uY7wCemTPwMZ/gk43FXbyEEuRK05DKDykVtXvn2PcyfaFAoSlZW5gvRDxKPE5uZmeBMdFkqwWh3xPBgiigWcYwMW8aMEx8BlO2AxeU+6dGUW19/HcoVwjhFWQHnV3J9V30KQgh816Q/CXnvVkoUKg56Y0x/gu0neG6J3kMPe8nGMjlpP58PhmTv3wLDIPi9/8F9LnPgXcI+t8K5z53jws13yN5Q5K02t0qv0y6cxy0YXM/epWOvY1y7wsVLBq6rCebO1gbJhTbJcJtG0kZlsNUrY/t9BkYZO5mC7eKZCReLR9gy48G4yXAQEXhNTN+mdEpEvb+vx1JKiAYRxjRFuC51P2LRGbIf1DFlRpg7BK6H6g6Rkwn+Qgkp50Jtw3hUnwOAyhFhgIoMskmOVXExDEmazq0FhkNNSoWY98OZNf0LAr14dto5u+8NWK+MORyU6aoS1Zqk39eL7MWLunQ/ijS56fc56bkzmWjyc3ioF+e1Nf2aw8O5K/wslfTorucs9O6QVBXD1cts5Re5UjjgvjpHBtzudLn+BFHwTmvAza024zBBoU9OU0p8z8I9bxLtJ6yN3uDQvYZhGdyfXKLb2eNyYUD1xgs43+Uib9s6WhQEOq0Ux3pMZpYhT8uradY9Oc/1uTMY6Gja/r5+fkY4i0U9T6jBiEF7ylCsUtzfJkuq2G6Fc8aOZkGjEem3voX5yqcxX3zhe96/Hxd8AqbgM5zhk4/HXb0fThaIMm1KIwCz4rE8fIdK49InQhD4OLGZtfaf6X5KJf3YaAR5FEOWcbHWxTKhHxfIVMyq10UKxcFkiUEQYUYxpoSN8xmT7RjDeLIyOs8MDnctQj/gaDDBrQzIckEwlLTLr1Fv30bt7bKxEmDkRfJWh+ymbpqTPfM8W84zHIzLOINDmg+2uPbSC8if+N9J336Jd/9Hh87AwZUJz9p3aVeew7h2hfOfbpwIm7td7Vjd+sY2y+kOhYbHkVpgOqoQ9nNsOUWSUw0O8EzYcA9pDRzydpeRuUZeq5NEkmoNnntOf+ZgoKMijgPZNKGd1MAzWHL62Mf+YIvugL1pncy0mQQO1iSjdpx9j2O9uFqWjorMoIYDsq191HCMilZIBgNkP8G2rxCqAkGgj9lwqLdGY+6fVChoghIE4KZD4r09BsE9mmITW64TVFdxP3seWKHV0u8tab9VRiN9PszEuKDJW7Go/x4e6scuXND2CAcHmkQ9LjqeXRfLiyUMmTOIC/SsZa66B9wbL5EWi9y+H/Bcq4OxpK+LOzsd3nlwSJLmSCkQQKYgTjPiNMWrVHFejhG3WiwON8kSj66xRr98gT9uvMT1xOVlpR4RA3+n8DxNQHSjPz2et25pHdDMN+1pYKa3W1vTx29/f96JGvTxKJXAEyFJPsQLj8iSkL63giNiEnWErRJy14Ven+Rr33zEb+sMH40zgnOGM3wMPO7qnSsBApbdHiUrRGUZ+f7kB+4N8zixmQlVZ9UjhYJ+blairkWXAt7YQcQlMD2q9oSqPWGa2uxOG6goAiNl45ygdB7GgURKQZblyMciOP2OyWgEWZ7Qj3p41QDLlIwGBuOhSW7bcHmZ8t6b9Pd7pK02/qCHdByi6zfYlZc4CKuU/SnNesZK5xbpVyOs3/g1bpsv0Hk2p5BOeWFjwMH4RcxyiZVVeULcggCODnP23mqxmOxQW7XpxSW2BssYRobwTKxogONJpFfmufRPyfZGdNUVRvUNBu5l3Ip/QpZmruhK6c92HGguCjq3TESeYZsptsy4UDwkV3rRcbKAyDDJ5dyVcWaSedqoMdvZJbu5iwoVtm+BslGmjdHZoWBIhv41xmPnJNo2mej9abf1wuz7+vOMeIKxdZM8zojKZaL6eVbiMZuHLXZ+Z0Tlpw0m3iL9vhYQ37ql3+c4esGtVOYu1uWyfqxW0489eKCrhra2dGrn+vXHOlGfui4W5RDXSPANHaK6VDzkoaoRjDNu3YZnmjCcBLy31SJJFbapq/PSLD85X9NMMQ5i4lqD/LUasjMiHBQwXIdevIzjwjtvKYw05VM3rO+ZiBSLullgvz8vLx8MvrNmgR8X5bLeZp5Y06mOrI1GYIQFyrmBOR5TclIM0YckZWta4/JoSx+cLCN75x3Sd97FevHG0925H1GcEZwznOFj4HHzygvF1qMv+AF7wzxObOJ4XqUBejE8LSKFeX8QldeJT5XJ5kjujZdBaUPP4mSf1StF7A2tuC24FhXfpTsKKBckQgiyDA53bRQQRgml6pQwi7EMSWvfIUn1vvnVCalZYHP5JylOhiwGI9bf+RZq5RI9Y4ODoErNGbHiDmi6QxQ1sp1d7nx1xOG0guNKXnyxyN5eEVHRof5ZM7dZxczhgymN4CGlmsUgKbE5XsE3QsaZiyUVwnWwkz5rn7tM7fKvsfUgx8xdkqCEaEnCcN4lF/SCP2ukl2UwyIuYfo/KdJ8lpweAJXMOwhJJLigEbahtkPreiWnmrLeRZR0TBKVTnkR1RKWBJSPENNManKJFYdhHjccM+hbnzsmTYzr7nNlxXV3JGb+1SxgJrEqZHoK9UHCllCKbdfJWl+rm15i88Bc5OJBUq3MBcber029ZpsnTZKL3b2VF/+ZZxKbVmr/n9u1HRcePXxfl031wgA1jlz3bI7Jtbt9W9LMOcZJhHqft4lPkZgal0K8xJHGtiKrAsGcjrB5J6pHFNru7MB3Dyy9/d030HsdMEzNrEjjbTvumPS3MPLFAE5xWC8Yjn5a7Qa5MKkZMIRlS6G/jJQMwjl1XfQ81DUi/+PvIev1McPwxcBbnOsMZPgZmvTLyTvcR4TvMe2XItbXvuzdMksDNmzmb3xyQHbWIO0OSJD8RpzqOXpym0zm5WVrSi/csMjArk5WlIodbIXfbNVSWQRxxof8tVqrhI2WyQgjWmiVsS9Idhhy1U7YfmqRZTprHLG+EmLYCJTncc06qiIqNEYZ5XPIrBONihYnr01IN7gUNFIrnyg+5KLdoRNuoyYTcsrkVnGN/X/+WGzfmOoZqVaddZrhzRxORghlhZRGBWWScOtSsIdPYhCTBzAOKdoStQq6vjMnrTaLyEp2kQqsl8bxHHdCTZF4dM9O/tNuSrNZksTDBO7iPmk5RWcZobDDphFi+SfGqtieYVdLNMgqmeSwKHo7Id/cQBR8VRRitfVQYkI9GiJ1tvKANUcB0kJz0twmCuZZnlmpiOESOhgytBRQSECilwxrn/DaiVOThQ4Xb3iZvtejf61Cv6WNQqej96vf1Qp4k+t9xrPU9lqWjGtOpJscza47d3fmYf5zrYu28Tf18iTBOebhlaqsPpYjSD5KbGXIFcZqTH//eYn2MUwhRRoBVmhBGiiCAP/5jTWqfFhYW9LUxIzUHB3Dz5gebcj4tNBr6+268KDn/QplFDhmHJrvxAlvWdQ68S2yZV0ksD0pFRLFIHkakX/0GKtfHUeU5+VGLbGub/Kh18vgZziI4ZzjDx8IPwhvmo/rtzCI2WbtDfvsOcWeISjMsUyEadeSzV1h+boG9vXm+v1bT5OZJiBtr3H/+N8hv30F1uiwHe5ScBHnp3IeWyRpSsr9jE6c5qIBqI2KxYdOsFtjcmtBv2aRZju3FOIXHlbUaB+MKStRJJwEb+R3KgzZyOiXLM5Q02PQ/zaFdx/cNbtyY9xQqFB41Vtzc1GMSx1ArmxTsCd2ozDCUMO6TJQI3m2ATo0x41r+N9K9z5+68gSEcWyv48yjF9vbcniLLNLGQEsyij3Pt0xgH45Oy4jwvElYu4Vxbwl6o4adzYvOB0yKJUXGCIoPegHK4g/SvY5gSIU3McAh5RDQMyXMH39dEIwz14tvrQRjk2Ic9RBxhuBmuEWKJHM/UY+0YKSpNSQ47NL78nxjKazw0Da5fSlh54XX2WaHX0+fEaKRJjFL69xYKWnPUbM7dzK9e1SRyNJobq37c62JpWRKkCcmDjOmkSKE6QZoffyEWAgwnwbVSkmEFrzzGMquEoWRzU2uGPv3pp9cwc3lZj8vMzHN396ObBX6vcBy4/ovnCXckB2+/Tz+wmZh1Du11hGMSWouYSUizFFBfLZ40O8zj+KxvzkfgjOCc4QwfE99Pb5gP67ejXn6NrWhVm2B2OsRff4MsiLFLLtg+IklYar/F4VdH7GQvIxsNCgWdjnqSXmHWFyRNQTYaVH6pxor90WWyg0nI25s9WgcWFV+SZDl2uU+UprSHKQf7BuOJJM0y/MoUw8qe+BvH3SLKhEqhyPXWn+AcBkRZhlMuIByfO9kVDqYVHDXi+XND2u0iSunIwumy5aMjTWyOjnTUZWW5xPZ7Fzh4mFEO77LPBsISGNLEzCPqoy3K6TbjfnwSnTk6mle0gB4vpfTntts6NTR7rZQ6wrH2QgP7J35dm4aOQ6xWGTssERmS/DjNNYtQnK6oM00QM0FOu4dKtUO2L0IkCgwD5VgIpUgnEUmS4/uSyUQTi0YDOpsdhu/sU+3eZaW7S+I+SxwqklKFwD7WiQ1HLGzf5jBdoF24gFsqEQWK4d1dSu3fRr3wG9TqjZMuyLMFfTCYl5TPnMrTVJPIGcmZpbBs++NfF7YXYbgTiBwm/cJHnhsfOE+Px1FKhVfrMwgSEFCvFRn1bUYj+PKXtUnqx3EU/zgQQp8Hea6jRFE0bxb4uHnsU/k+KbH/ws+z1PtPLLz/PtPCCgfOJRJhQpIhLItufYNu4mAFR5y/+xDx9ptPbFvxJM+uH0ecEZwznOE7wPfDG+ZJ/XaSacr9OxKx/S7Gqw5UayS37iKDKcaCruxa89rsBQ0O7OdR7S7G5m2uvP4ZTOvJ+zbTGYCetC9fBtOUwIdXgSmlePPdgE7PxHNNvEJEqMakmcIxTToHPqYBSZrh10dPJFVKwaRb0ukWCeqyg70dYE/HxMUyeabYTa9wmDdxnIxn7bv0v+GRvLqMkJLTlnPj8bz8dm1NRxz2DyT9xevUN79IKy4T+T4uAY4KMNLo/9/en4fHdZd3//jrc5bZF0mjfbElW7azOYmTYCehUCAphtBQKD9aWiDsvUpZHvbledjCUwiU0hby0PLQUigXBQrPFyhQIE1T1uybszm2ZVuyrF2akWZfz/n8/jg6Gu2WHcmSpc/ruuYaaXTOzGfmjObcc9/v+31zsfckBIP03z0A17TgVuqLReektWuXc99u5sttD29ocDQoUjo/O1b7TltxSgNRgpDplHkqlYWDHufocSJhtNoonMqCvwZd2BhUEEiklFhliebTsctl7Kk03lrnzlIpaKgMYj10GE/OqZnVFIcYzFeYKMaoKSexTQs7KLEGBgmnRxhv3Ukm2MzO4Cgn7WZGvZcQTD5M2+D9DNW+iGLREWlPTEwHT9NGda6nSzbrvM4NDU4Gbds2p8X65Mmq6PhM/xdSSvpHp8As4Y9Y5FMBcskA/kgew1PhbLAlaL4stmkznpK0NoSZnHBSN0895bwXLr989Wa6aZrTYl+pOEJry3Kuq/8zq/M44LyOnt+7gcLIKMFymp35x7A1AxEJY7Q2k/DZJFIVSpqf44cyyFQDzW1NRD3TzQ0rmNm1lVABjkJxlqyWV8ZizPfbqUiDvmwjSBAxiTUxieg5jnbRbkRiAi0SoiM4gUezOJF2vroKAdsbM5ipAbTJbpi31kLB6Y5xaW1dei7ObEolOHykwmSygtej0dxaIpHJUynaCOkhnfChazYVUSRQl130PqQtyCQc8Y9uOhmeSsqk6A+C0DHKJU7Szbiox/TCrqYkFW8H2dE0nnSa3c+qRg3lMgyctomfztHkyWPmTBLxCKWShuEzSHg7SOphApUkXisLus4efx9mayeTBT/20BBTPROMVeqJRrWZMtK2bc51IlEdpuiOqfB6nddh/kktkXDW4/c7gZI7KmE2rqGfYTiBpL5nD+KxfqhYCLuI38hg2CUoFrA8deheE7tiUxoeJ2RKkBGsClQeeAhZ0AjU16EJQSq2CzGqo9kWgcIEpQmNhKwQHR1FC4WobfSQwvFtMjSLiq1TrGnCN3IK0ZkmGI5SKDglTCmrLsqlUlXA607rBohGbOrEFBNjFkdTHvZcE3b+J5b5v8jmS0wknRfF8FgEarLkpoLkU358oQKmr3zmN+Ds9xFQsApEolNMZQza201yOcHkpHMsfv1ruOKKhZPsnwmG4QS/rlmglE5WyzSdjM5qBVT6ZZdgXLsf60gPWkMdhmlCIIAQgphMUVsYolTfyukJiQiFGCt4GStAd3gYIVjRzK6tggpwFIoNxHy/nUQhNN3NBLqQEA5iT8TRJqdo4zTBujqEpmFLCJkFaswsfqOEtHTsqfKctnUpnQ/mUsn5PRRysh4rabV1LegtW2J6KzQ22eRKNvlimWLWT6VoYlk2RjCLEIvrbeyKRnbK6eeercsxyiVsXWeirYOEtQ1ZsZBGmMbgaSalgV0Jo1spuhtTCBGdeS49D8TJHu7DMzWJbeUJ+JJMRLsZq72EiL9MToTQImHaPJNo0gTNoMHnwRoaZjTbRqlYIG/1YIdKlOwogYbwTHbInQbu6k9Ms+oN09i4UMtk284+NTWOfqVQqA6+dK9nl6sMA/TObYiaFKIk0HMFTDuPnywiFEDzhfFkp7AqXrJPnKSu7yEsz5Xk80HswSEiNdvJCEHJ1gkFTRobbKypMrJiMJUx8Gg2UX8Abc8uGqMWQSuBqdl0BCbozTQxaLeyozxKVyzFyVJ0JoBLJJwuKvd4t7Y6z2lw0Cn95IYT9N17jK70ISbyTZR0L/09Adp/d9ey5ZBUvkTJshFMv5cNm2BthuxkiELGh7QFnkDpzG/EeeSLZRprk4SifsCDz+ccJ9uGRx91sk6XXba6TuKuWWCh4JSsymWnbOf3O8HxM21dF5qGeeBZEE9gpzOIWC3YNnKWril4SRc7f3M3oq6F8XItRctE4nhyASue2bXZ2bq5K4ViA+L6ijDttxMy8phaGQGOz4pp0MIgu5tTBLy2054LaAJa/JP4jemTxLy29UTC8T8plaqp9aV0ObOxbWc/124+1lhE82UYncwykcwxNuQlkxYUyxXMcBKpLX6SqpT0meDGFyrMER1XTA+WbjBWascyPegBg721R/F7BIlSmHwuzw7/IFqg2g985N44xQcPURxP4QvobOuoMGG2MTQkiPU+SGaqTEH4qNWS+AM63qCXbu8p5PETDKUiCE2j7I8Q15sIZ4cxT5/ATqVm2ncHpg1k3aCkttZ5DQsFp9NmdvnJ3aZQcLI3pukEOe5rOzPvSHdeTzfAEfUxtIZ68HoxdjtDKfWWZkRtHSI5RSAXRxiCbE0bZsiHmRwn88RJilM5asPO6xcy83QE43Q3JjHqahirv5RKpA7tkoth+3aEx+N0IU0Ljw3NRtMkslShpAcxQr4ZP5183sl4uJkbKZ3nbBhO0DPZP0X50FNURsbpNfawY3sF4feTPJ0m8ePfYPWfXrabR8w5A4OmS4J1GQCKOS+FzNmJWoQQlCoWuWKFUNimu9sJZFpaqvqn8XG455616YLy+ZxAx9WD5fPO/8rQEEt2h60Uvb0N86aD6F3bkekM9vAIMp1B7+p0bu/chvCYiGKRJl+SbcEJtNn/y+tsW7FRUBkchWID4fqKyHyRUa2FdLnastHsnyRUTiC9FURLM9os75rZrq5ue67e1UkpHOPU0eoHbkvLvJPzMp1aqZTzYQ3OCTDWVODYQBJbQrksyE2FQFao2GUCtVkqS3yol/ImxazzQeuP5jDmCUvz4Sgnwvvwp6aQHkGLf5wKYRLlOnTNpjX5BPLS7pkW/IEBm8qRHiayAZqbbNoDCQZyTWR1L/UNFUbHw0wNVohEoTN3lBKtBI0ixql+KkWLXKSefEGQDDZj+EykEYFMkm3pJ5D2dVi2NnNyF8IpS9m2013letHMDgxTKec6GHRKU0JUfYcKBSdAcHU8ljUrwNE09O3taOle9OQUQtexdA9ydByZswkFAkx6fWQqGrYvQLDOy+RYmVRG0FDMAE2UbA+akHh1C4FAolPrL2K3tDNZ3kNs+MkF748O/yi9YyYDzZdxaX2MDuGcmGeLZmtqnPJcoeBcdM3GGOjFKGSIh7to8qQYL9Wws36SE94mhgeLmF//Nzw1AShXFnTzhP0ePKZByZobAGuaJBRLk4mHKRc8SFvDH5nrpbMU1qwAytA1DMNpuR4YqB6PRMIJOh94wAnod++ezqgt874/W4JB53GTSUf/47pO19VVJ4mfC8vpmqRtr+j//3zbVmw0VICjUGwgRH2MibqLmDydQdT7EECjP0nUzDkfXCPOB5fW2LBse64IhRjsuI7SKedDe7H0+VKdWvqzrqG/3DZTympthXBY8vSpNKWyjV+EmZqyKVdsDH8e07+0fqKQ8VIuOALQYG0GTV8YBWUSEcpNECik6Jh6DMw2xuw6RKVEZ+4wlt+LdeUVCE0jmXRmZw2PG7TUpKn1VCjaJpYUVGwdS+rkPEG0fIbtF/spHatBTiRoiQxTSabo810K+QIYIZJ6PbXWJAIBfi/bkg8jJ3YzmJ+rWfD7bQaP57HzNvW1UB8LMjv5nUg418GgUx7RtKrHDFSvhZg7QBNAq6lF261jFIbhZBErk8XOZPGGooiwCWUoVQTZshe/USERCpHORYkNjUDTDpDVAxoy85STWURTDUk7grH9ChoyfQveH3p8Ei1wMdqubsoVDY+nOnm8UnHeK1NTThkumXSe07ZoikBymBFfGy3eFOmKE7A2eFO0Wv2czpXoLUbYE8MZVTKvmyfU1kpzbYiTwwnm900J4XgkZRNhKiWD3FSQQM3iGq7Z2BI0IQj7PQR9VYvo9nYn0Ozvd0pUk5PO7wMDzs+XNQxhPvbgqrdWR6POxTXvSyScS1NT1VvpbFlK17QethUXIlv72SsUG4iJCTh6TCPdeTnC76Nu8jjdxgkiWhqZyzkfZLM+uJZKY6fbLqb3sj+gFG5ACNixw7Hbnx/clH96B1bvKUQ4jNbWggiHyZ0c5unvH6YwHEcIR1QZiUC2UGYqWyAdD5JNm2gCfJH0ssFNbiowE9yE6tKLBzdxp5uqFA2R29tIPNbGSKYGI5WitXwSq7WRyeuejdHRTrHofEOeSlg02CNoXoMGX4qomSNb8REyCpRsjSIeIqQwwgH0a65i2w4dmUpTyNlIaZP11FHW/Zi5JCIxgYzH6Ug+iZ1MIfMF8nkne1Eug7c0hffxB5g6PEj6+DDhJ35L8Fc/xhqout0Vi06WQ9OczI07+dtapAPaspzjMFukrEUieH//xXguvxixoxstHELXbMzECJ5cHG1qgsLpUcjnELqBFYyi+bz4EgPIYpFiWSBzOXakHsXw6oyEd1OuaGixGNrBxcscO2++FC0Wo7fXWYObaTAMJ1sVicwVzcbHbXx2huZgmngpTEAvEvVkAIlvpJdoeRxhejhhdzkn5YAfrb0VO52h8sDDICU722rxeWbNqpiFEBCc7rqzKhqZeHjR7eZjGhq722ML5lK5XkZCOMFFY6NTUkoPp7j7+6P0PZV19FE1UQiFsHpPOf8Ps47rueKa97lBzeioYxboZvpWizOWsbZ4izioDI5Cse4kEo4Pi0vD7hh12y+h8kDujH47s9PYxVSRUwlnep8mtCW/OS42GR1gRGsmVeNHTiSInHqctut+d+YbYL5gMzrow9Al2XyJQF2acsUpEZTnncjntIHPOnHNxw1uNN0mWJslIxsY2d5JxC7R3VgkH7mIhCdAXSSI32PS0+MEEsGwAabGDv0U4HdmV5l5xvJBRnNhInaCPdoR8sYuvA21RK97EdbRnRz//lNomo4vnWHMaqTLOM6Y0YaQNtsSDyOFYGIgDzGnFGWnUtD7NPlikqA3TFH4MXw+7L4eyvG5PiO5nHMCDQarAuVSVQ4FVDM4UA1wZl4XoaGHQ5RSGWQ2i0Rg+n3omo4tPdiZLP5CEhmtxfYFMX7v+dQfOU1/f554rkizL0Ntdxua2ElFhma8a8aNNlpftniZQ0yXLstlpwTZ2lotSWpadbhnPA6ZrBefFsGolAmbeSq2TrIUosEaRabSNOgaGS0Gmk6q7IxsmN/NY4QieD06xXKFir0w2HUzOZlEyOm2i4eXfO+AI+fZ1VZHTXhx5z0hnCDHzajE6mx46DDpRJYTNDGR9nCx9QTeiBfR2oKdSq9qa3VTkxNYDQ05GqChIeeybZsTgLk8k3LZ+bCtuJBRAY5CsU64NXuXmhrnQ9H5QD+LDy6hMVBoIFcBIk6murNzaQHx/E4tKeF4usW5K6ZbzOMDyInLEI0NTE7C4GndETrrOcINBXShUbEEQmhzUhWzgxtNq4pI5zMT3GiSSJ1zQk/FQ0gBWoNOuaWGXEXi9ei0NYQ5fly48wbxxUK0d1rIUwmynk6yZR/xtEE8BWZxnI70gySDEYynj7K9rQOhtZFq2Al1w2RPjFLGRyhQJCPq0BC0MgyajtQ0xh4bhotryKcDyOEhAsUEQ4FuLKnT6ElRFy6hxao+I4Uax0dH16uOxz5ftXMK5mZy3J/NRRIZumZjxaewdC+6rCA1DU3Y2MIAv59g+iSGHKe0swntkkuJXnYp5oNpCuUS3t1OedN/j4bMVUcwGAaItsXLHNu3O3YBfX3VTN3wsLOvm8UpldyRHn7G/J10xR/CjjQxKWsxg5LBbA2NlQpCCLpqhxkLbiNozOrcme7mSSeS9EyWSOWKThfV9GMsJtsK1WXITQWwKjrZxNJBTnNdiEs7zyxyicWc53Lsrn6C/YfRhZecr5602cij9rPZkXySxtwJRHv7qrdWC+F0Ktq2UzIrFJxr1yzQGF+8THw25bK1tK240FEBjkJxnkmn587zCYedb8/zP8RX8sE1P0jq6jqzw+r8yeguPqNERyA+p8W8t9fJQPg8Bq2tNsPJHIZuIHC6WCRypvXXtgXZaY8bw1NZUiw6O7gJ1mWwJeQmnbYXX7CI4bMolHQaokHaGsJMjvuQ0tFP1NdDa6uGP3wVpfg4A6cFJT2LlShRFq3UWVOIUAitvp76icNUfnYY+6orGXqihD02hjc9xqS3k+bSIKORXQhh0ZV+FKEJCkaISk8P6YkKZS1IKDOEUe+naHmxENSYOeo8mTmZifjJJHhqCQad0lQo5Hw7dwOeOZO35dIZHClBy2eR6Qw0NiESo5DPY/hKlIWHkm0QrEziN9IUa64gk9UcnU9N1Nl3+jzf3e2Y3Y2MVN8HpdLiIwzcQMyyqsHZtm0LZzv5fJDozyCLJcZTBo0jD2P5mxgNdtLos4mUdQJ+gd7WQltgau7OxSIVodM7lWfK1ECCoTuBcblsYS0hTA/U5Min/FRKBpl4mFBdGjErtveYGmH/yruuDN1mx9Cv6S9OQV0rPjFFjhJF3Usq1EoqX8v2iRE8keCatFZrmhPQuCaBlQqceCiO/fBhOkqDeOsjyol4DVB5LIXiPJHNOrV4N7hxdQIr9aKZTbnsdL64wU1Dg1P3X4l9/JwJ0Ey3jYeH6QhM94IXi1iGl2MjEYpF5++7dwtam3xomqA8XZPSNYFtyRndhBvceAPFFQc3AOl42HFCDlRobzGoC/u5eFsDF2+vp1LwUSg4Oob6euebeCTipOb79v4BNDaQS2QYtpuJyCn2Bo4hW9vQaqLUbItgj45x+rv3YPWfpmyGSYbbqdFSyHKFwNQw27JPI/x+QDAo26aFNH7QDbRCnnw8Q8BK0uBNUefLVI+T14ssl8mmKkjpBAfFYrXF2h1k6g7XdPdzG39mC49dNKuEtGz0aBCtpZnWUJL60hBmOUepomHXNeEP6RSNwIyew52L5A71dP150mlHOGxZcwPg+bgtzv391fsTwlmfpjmBUXYwQf3J+wkWxknV7SLT0IUmoHbyJLl4lqHYFciamlkTQB2cbp4EyUgdmUCYcMBE0wS2LZ3s1hne9P5IHo/fqfNlEmFsu7p9xO+lWKmQLazMIFBOxJHxOG3GCK2VfgQQJMfVPIBXlMDrpTfbxGQlvKat1bruBKE7umzsnh7sXIH+mivotTuxhb5Au6QGZz4zVAZHoXiGnKmGns87JxC3ZOHzOd+Uz6VM7k5zzkxXfkzTydqczX25E6Bnt5hqM5kESWq8yFj9lRiRMJGIk10CqAn5iQa9FMsWFctGCIHQBHbBIJ90Tgq+cB7Tu7j1fiYemh7PUA1u3IBHNy3CNSUQPuojQRpqAuTzgvFx52Ttlu7ctSQSIOpiJJo8jA1GCOg2u2pLpL170WyTHaERZCpNaXCElL0Du5xG19Pk7Wai3hRaMEokl2ObbxThMbEKRaTXS8HyktdCBAMF/J4K/XYH3lyKnY156r2zzFSmfUYwPRQKTmDg8zklCK+3GmgGAlWzQKhmcNxWZTuVRWYsrPEKutdA6BpaqYjwB/Dv3kZdIkQyG6Ti2UYhIDBSXtCNGafkujpH/5NIOHoPIZyTaCjkvCcmJ5d32HW1IKWSszZddzySjh+fDsakjdnfg8wXMOpa8FkeJuRl7Go+SbLopZiUUOuhT9axY+Dogm4eKxBkvHs3Ab8HKcFj6pRKFcqVlc2g8gaLCE1SzHrJJkIEa7Pouk2hVKFcsZnK5An5zzxhU+YLSKEhaiIEkhN0GSVyIowOdNBPFi+DpQiJ4G7SiRi761fXHHA+RjLOzvSjlBrrOC1jWLbgdLaeztC4ciJeRVSAo1A8A5ZqtTb2X0OloY2+vmpg80wt3eeXtrZvP7fJxsu1mA6Oech5t6Pv7qa9Q5vJRAAEfSb1kSCJdA6fx0ACySmNRFpg6CXMYAbdXPwbZyYRQkqB0CSh6eAmOxmcFhlLgtEctq1Rrti01YexLEF/v3PidZ/j7t3OdbnsiLLzedBlmQoGdZEynbEMZTtH0TYR6RTW00fpl9tB17B9QdJ6DXXJIfR8Gru2jka/I+glCyPBXVAqI0M1YBr4TNCCfio5A08xT4M1AjhGha7PiLV9JyISpjIdbAYCjpg1Gq1matwAx+NxMjwzupzp9411vAar4qHYdxSClzlDODNppNdDWZoEQwKvZVDAIDtVQWtsRQT8M/fjHh93dpT7vjh50lmL+14rFKpjF+bT3u60UJ8+Pa0LMWbeDvhKSUrxOJVwLSGjRMHyYJUskpafmD8DRoSRTIj2KxpJTwki4yfmiOJLl11GvmQQ0jUEEPCaWJakYjtBsFveXA6Pv4QQkkLGR34qSG1DEV0DLR5nNDtJeEcrkW2tywprhd+H5vWAtx47X0BLpwj5y0hdB8sikE7QLcYY3H0Q0Dh2bOUjTM4Ft0zsbdDYpQ1TsExK9qzTsXIiXhVUgKNQnCOLDcWkUCR/cpCBgQDa1T6nVVdzWrXPdShfpQInTlRPjrGYU5J6JsyfAG3Fp+hlJ6Ihhr67m90HYgvWK4SgrSFMtliiULIo5fxUCgYhn020Pke2CF7DpFixKFWqgU52uitGCCe4EUAh7ce2NKfLqtaJELymgc+jo2naTAbB9Y3p6qpWNE6cqAYQQ4kAtUaaA4GjgBdTszFEAWtwiEIJLM2DpZsYooLAxuMDOyUgmSQSjkOljLQkWdNA8+iIaBSfXsHQbfLRFvylIoFcHDIZpN83x2dkatc1ILQZI8CZQZpUW8VdMbEbMBiGMwW+/GvnfYOvCaEFwR9CjI9h2/XousSaSFCqL+Mz81ApU85ZlCI1RK/oYnxKo1yeW/qa7Zy7bZsT4ExOOuJhy3IC49lDSmfjBkmFQvV5bN/uDBbNpSpQsfB4oZwrEJ0cJJgeIyeCTGgCfEUatEly+g60K26ktmYfeqmaySwXK2i9o1iWjWnoRAJeCsUKpYpAn04b2rbzRKSUeExnbHuxbDnBr5jOSvnLmIakmAliD1rERh+hITWEblnk7/fh292FcWBpYe6crGX3TuyhYWQqDbYFmg6mB3PvZey5cRuptFPWGxpygsQdO575+IUF65ldJg748ellfPqscptyIl4VVICjUJwDi7VaV2yNPqsVuwbkRAJ6jrPz6lq8vnPPdQ8NVf0z3PLBaqXO3RbT3ECC/lMSw+PB1ximq2vpB4gGfexuj/HIYwXS2TK2rNDYWiDg85POaYR8HiTOjKBMocTEiA8h7ekWYCe4Ked8WGUDbbotWOI40cYifixbcvw4+D3O83Y7y9ySj+tSOzXlvDahOg+7RRZ9bAIZnG55z+WRqTQDwYshmSYgsxRzgksyj5LUY2S1CLHcAFhZsG2SZj3C76emNYBuath2Bk3AadGCJ5Sks/wosiiQwyNz2vXT2SaQTgCWyznZGlfD4raKu9qY6rgGG/toz8z7Rps0oaIhfH7MWBTGQIsEsH1hilOnCNoTUG6kEo6hXb2Nmu4YgcNOAJXPO4/pBk9uickNsvx+5+d43CZmprGM1JLdeG6L+OCgo8txzQqnkh4CnhK5VBo7nkQvFyj4avBqNiVbJzp5kimzEVHIYcsYvakG9uyp3m/QZxIN+Eik80SCGl6PQW3YR7FiYds2tnSOvWvUZ9kSy7Lxmk7AWyhblMoWugaegCSWH0I7PkK+bJKPNhPwl8kXigRP9CLjSwtz52QtU2m0be1g2ZDLYaezaPV1mL/3fITmiLdDIadM52rd5rd2P1MWKxO7KCfi1UMFOArFOTC/1TpZDjCWd2YgCAHbGrN40o9gpnaA7+zTLdmsUzJwWe0PWJfxCY1Erh6twRmmWFOz/PZSwsiAj5Y6L3XhCts6LUwjSsBrcKQ/Pn0i8+AxfRSSEcK+CimriLdmCiQU8x5KeRMhIFw/XduRjmGbEJBNeQiGBPG4k6ny+6tePrmcoz3KZJyTuW1DKKzReeVOyj89PlNus4slUmU/CBuPZtGaPkLa14BfKzDki4FtU1sZA92HtvcSJs1L0ZIpfCGddBFwmn0Q0gbbomn/TszfuRaKpbl2+UeqGQ/Lcl4bN6hwtTaF6QqDG08YhSx2PIFVE6VctqhYFrJcwkqlEXYA/CHI5dH3XQOBPUTCGfQnarEzGthZ/Nlx/P4Yk5OOq3Mg4LxOQ0PVLjNwRnIMD0OibwrZf5py+jQJOUjIV1m0DTkSce4jm61mhlpaIDkVplDbhnz6aTQbZCCIFJIwE0xoMaa8zTTYoyQmBknUtFHfoDE87OwLc7N+qWyJgM/A7zMJ+U0y+TJeQyMWCUwHxpJSxSKdLVEb8rN3RwO9I0me7h93pmRLaOp7ikApzmDdPhK6hs0UPu8UdrQWe2xsWR+b+VlLWS4jTBPj4j0L/aX0uR46/f3Oa+1Omn+mKCfi84MKcBSKc2Bhq7UEAR2BCXx6GWlpC6Z5rwTLcr45zh7yOH9y9WogpVOCcB9nx47FW4nnr62nx/nZ6xVcdJEJVA1dZp/IXH0NAkL1aUCnUjQp55ypx6HatPOtVYLQBF6PTjolEBU/VtkgFnNOstu3V9fb318tWZ086Qhsn/1s0I25Jy7SGUb1VoTfRwfHEDlJTXmc8UAnAHVlZyy4MHQq4Tr03ZfCo48wOlRChIpgGmTyBmZ2ikiggnntNejNcw+CZQHSphTP4KGIbnspmSG8Po1w2Ak2wCY/nMROVZwIR9ZSLmSRuQJj3gpafIzSlAezXKFS6cHyFpE+g4pRRC9XKAcb0Mwc+uApKpMmpVNHKR8aQAvto1x7CalUZGawJDhCYzfA2bkTBo+mGO0ZZYd1CjsSZtR7MWGjb8k25OZmp718eLgq5m5q1hhuaUF/4iGKwo+0HX+elOWnOd/DqG8Hidjl6IkJYp406bQT5NfWVjU/btZvcDxNMlfAtiVe00BKgWloeE0dGydzUyhahAIeulpr0HWd5roQY1MZdE3gTU5Rl55CNkTY5h1hsNhKohShzZNE07QVCXPP1hgvFnMyWSdOOAH2kSMrs2JYCQsCrmUMPRXnhgpwFIpzYH4NPWrmiZqzWqPPoYY+MuKUXsA5uXd3n7sgeTmKRWYs+t221TNpDMpl50MenA989xv6bGaXrwrFMlLaxFpyZPMGhvAyPmlgmja+SBqhCTRNTAdYAiyDfMZPU40fv99ZTHd39b6PHXOuCwVn7eEwXHzxrNLPrBPX8EAZ4/4eQhMn0XJAfQyZy5MUNVApU5cbgEgU7aJd9E1G0E0PHS/eS99v+qnEp9AyGSbkdrw1XnY9L4re3rzgucaPjFJ5oBcrnqZSKaDrOplIG+Zl7YS3xYgfj2P39JCOx7HLNjlvjlyog6QnQkSCN5XEOz7K9sok4/7t5E0ftq4j83kqlSSFiUnyZYvckZ9hTu3AY7ZTCTdQ8mSdid0TJ7AatsGe2IIWdACPaSOHhjBLWfTmWibLIerIOm3I/qpR4exsR02N8x5MpZzj6445GA4GKNU0E7STZPMV/FaaJjmCGfUTjzVie3zUJnuYLJXw1zrBX19fdVSC+96IBLxkC2Uqlo2ha1iWzeDEdNBTrKBpgrqwn7aGMNGg838T9JnUBP1OZtCqYFgVyh4vhrBp9w6QKpp4TQPT0EFbmTD3bI3xDMN5Lu7/Z2+v81o1L3xbnDXKiXhtUQGOQnEOrGYN3R0M6NLR4Vj+rwWzx0LU11e/8S9HPl81f2tsdDInsHh7fHy0Wr7a3mWhaxFODKTo7YVQQOCNpClVBFoihVYsUDE9yFgMUailNRbAEB40YdMRSMBAHtvvY1KLIaVGOs1Me3Qo5PgHzUZoGrK+gXQC9Mu9NN/3GJWBDKKuFmJ+jLKP2uwAoj6GfvEeCAaxT9vo5RJTvlaMa5sxMymsfBkzHkCP+Km/ZOGJxhoYZPyup7BzRcxQCDy1aOUyhXgW66FHwKjHemgCf3GSQqgBwib5ikZhcAifNoHw+/GfGgTbRkQaEZqBVYJSuUxFQtGCyWN9aBo0pMeo1O5BSEkJL3lPDUZ9EDlSxjp6HHldLULTME0nCHVLZnIiTiQ3TDIQI1uRmNMjLiu2hqHZS2Y7GhqckszoaPUE3rFN0BsIUAzUoktB0bLwhCJoQT87SHF8wmBcNLOtVTI8kSQ5aRGt1Tl5IszO7urrJ4RY0NIdCc4NeoI+c87/0pwSl60R0Q0oFbE8XsoVm7BpEwn4nbb7NRbmNjc7AV9vrxPoJJNOpuxcmwdclBPx2qECHIXiHFiNGrptO1kRV6+xVGZktTh5sjofqbNz6bbh2cx2Sm5vr3bcLNYe3x+9HGvnRej1Ma7Y65SvymWw81EMI00omqe+MEng8FOYo6PYJUcDMdZ0gOCl9WTLHsxSnMipQ2jxYxRLZSqmj+Hw5dg7d+GtcYZDxmJw3XWLr7evb3qte2MY4edjjYwgc3mEJujUetDqQ2htF6FFwgxP+kAXNLdqjE7LgWxbkM7pGDpEIwszW6643MppiPo6BIKibeD1avjq/MjJcaZ+/lvKRjuhRj+5kkYxVwRbJ+sN4s+mKVPBrlQAiSyVsU0bq1LGsDLYAY18tIZAahKkJBMKY1UKlKwCOUMjZ3mJmAX8IT+F+CTF4Ti+tgbq6pygJJl0TsIyX6CLExwymhgtBtgRGKMtEMfQptM8S7Qhx2JOgDM1VQ1wQh11aPV12CPjxFoNJktR+umkS4yBlNTn+pgIdDL8m+NoiTECJZ2EFiXa4GHS2k7tnqXf1IsFPfOZKXGZBrlYPb6hQazGJvxek0jAg9djnDdhrtfrGGr29zvB9vHjTlAYU1rgDYkKcBSKc+RMNXSttQV7bHzR1PP4uCNghGkn4VX4JrgUlYrzQew+1q5dK+vEmr3G2bqDxdrjT03WUBxKoE09wp4/vARow7KcAC7g9XDVZUGyfcN47v4tejZLJVqD2RAgY9XjnZwi9eBRwruTcPI4NaWjCPd+4/XI0XHKyQLHa/cTbQpw6aWLv1alknMRwhHNyssuxb52P9aRY1Bfj+YxIeCfnr8lSU3aaE2NGPW12E/E0Y8foTSeZKLYjK7ZdGQTWC2XztFCyIk41sAQInwR2vS8KV04QYMp8+TQmBrNMdUokVPjxCsGQky3RAsoef34MgmyXj8SgcgnkVYdnlKGVLCOQjCI1ygRTMaRUlDQNISdR5clRKVMruwlFkwT8EYo5DwkJyr42pzgeHTUOV61tU4JNeIrQ7mCqRsIAclykIBRnQC6VLajrs7J9I2POydvoWns/N1tHP9RmvhwAS1cpGwYFDJlzMkxanTBRDZDOZ2htb7CsKwnVKpQGkkw8PM0Qb+NZ9sz05NEgz4iXV4yNz4X+4470bIpjGAMoTuT1M+3MHfbtmrmdXwcJibWrqSsOHdUgKNQPAOWqqHbQ8OUfvjjBQaA5SuexUC5dWb/trYFDverSipVnRB9Nhmi06errc7d3dWAYrH2+FPZBsqmgVYfYEfyYSoP5KG5hZ7jzommqQlqoh5Kvz1BSdjYu7rQNI2sHaZSjFIyDAKJESoPDbMzMIi2rQ0hBIO5OoTXS8EIMjlhozNOaGcHzc2Ln8BcXVFnp3MtNA3jwLOQ8YTjOROrBdtGFoukxksI/zaiV3Yx8OQk1kOPUMnnIRxB9/jRKmVCg0co/3RgjhhX5guki16oManzZAibOU5mmimVLfL5IlqlTF76kUKgUyJAClOUSdv12JbEFhpBWaHi8ZKN1AAQslJk9AYyRgzLMrErNpbuGCnq5TLCyBMkjmV5qdgGPr2M18oxKUKkK36aqAasblu6W0L1Hk1SDBkULR3wgf/MJdTGRifAicerfkv+rla8BzyUnj5OXfI4E+ko/UaEXZ1+SKXojJ+ir24fIwg6ghMMiHqKtQGMqVGO/fcAl97yzCd0CyEId3di+W6a+VJhT66fMNcdtXLihPO69/Q4/1/R6HlbguIMqABHoXiGzK+hL5bhsPMljvdI7NNPoV/jJdoZW3TA5rmw1KiI2UHK2eh6enqcstli2Z757fGpsp+SZQCSbv0E0jSoHD/BiYeSaDW11NU5GQV7LI49NITZWI/wmpRtnYl8FEsKPHoFvB62DfwG/dJdCCHIVTzkKl7Kto6OTdxoojY/zrXdAWChXiGTYWYm1OwOl6WybGMNV6Hv6qb50lqO/usDyEIBUR8jWQ6g2VATKqFHF4pxhd/HlB6DUploJIsuJAJJqgDSlgRlGkv3oE3780aNSQBSVgwLgWlVkKZGxh/Dn06SqY3NvAk0ZzYCRrFILlqDLQVtU8fx1NSQs8MUK5ozgVtKSGcg3EK5LLFOnXaOu3C0Ss44DKeE2j18H09OeRnzB2kPTVLOFtAS8TNmO2pqnDJVIlHVXO18Vj090TqSqYsJUaSIl0xtieAd/w+zvoaYniFRDGOhETQLgI9UoIHQRJzRY1M0X1S3sjfgGdhIwly3GcD9IjE87GR0du5cfXNAxdmjAhyFYhVZLMORKIaIW2FkTCLjCbYN30vwupsQ4pl/IC+mhZGtbZxqeTaiLnZW3VizW8c1rToaYc4289rjg0aBcGmc+vGnqKTSYFU4zi70Bx8l/DuX0Tg9P8DdD68HslkqJYFdjmB5AniEpNGbwKgUZhZatM1pJ1ubp1PtRH0ZLikdRhQX75l3DQC7uhb+bf4JsWL6MJLOCXH8eAI7niAYNcgjKNlO23tXcGzRmUCiPka5tgk5Oo5WbwOCimWRrRgENQgW4sRDTWilMrbPnh46BZYlkeiYxTyFmjCTOzvpfOJhQpNxCqEwlmHiKecxS3lKHh+Z1mYqlkGp+CS1U0OkA7XkNQ27UMAeGEJoXVAsUf7tvRTFcYTHJFK/i8lt+0ilYkSjzvNufum1HP5RmlKqgCxPMebx0raCbEdTkxPgjI1VAxxdd3yHMiJKpN4py4yNjrO9WEZv8BLTMni0CkG9SMgo0lNuIeQrU84JEuMWdSuwIlgpG02YG4k4+rSeHiebc/To2jYLKFaGCnAUilVkfoajJ1WtCTX7k4SbppDDmVUZordYpiiXhYEegRh4hPDvXEnn1St7jNkeN8sZms1vj9fSSepPnUBOD406KS5BFit4kqM0PNiH1eCUd4TfB5Uy9lNPO4MbbQvMCnowSLjBS1hkqBjGjOI6auZIlIIMZ+vx6iXCIk2DL7uoZmR83LmORJYO5GafEE+dBKY9dk4+aEHFIm/UYgMNnhQF6SHitvzPE+MKTUPbvQvSaeyBXiq1DUjLhnIFTzGJ8AsSzbsIDA0SmoxTDIapmCZ6pYxeLFHxeEi3t1GpC9K39xqaeo8RmoyjWWlsHYqBENmGRkStQblgcvLS/bSeOoIxVcIjs1haCaulHs9ECb2QQUYCyFgrWrFAaPAIE2OScfNKovud52p0tOG9zEZL5hGRNop+D55nhc+Y7RDCKZ2m006g4xpAtrU5J++JCacDbzzpYVhrp72Qg4CfsFkVLW8LjnMq4RwU4fFw8iTs2b38YNoLGU1zSlZup+Lp047xo+vlpDj/qABHoVhF5mc4TK2CoVm0+RPOl/kVenWc8XEWyRSNFqKkZABRL2mYPErtqTxy3++f8QQy2xenrq46tHExZrfHC18L9uCwE9yEwwywDTtvoYcDbOvWsAczM+UdWSggp5LYiUlEXS1xswPb9iIyWepzj2OHQmjNTdi5PEJK8paXfNlLohii1pvmquJ9aDsWakakrAqhV6IvkrLaSSYlCI8Hr2lRKpZJ6bXUGDm6AqNzXpzZYtxcDrRYjJrfvRz9RI5Efw6Rn8JXMSnWxRjqbmGy1Eze76FmdJxgIo4nm8YwGpkKN5FvroU6E5MKmbp6MrVOqcool8gTZCrViuG1COpObTEdaeTEVbUUT+vIEkTbI+SmniIkRgjWeSkKD1k7RzQgMDt8yN4ChadOIq+pBg5dXRo9x/xM5CRNep7KaByj6cyBRWurE8yMjFQDHCGcwGZiYlrQHQlTqG2lNPEwng7fnBZvj1YmlB0m27CTgieMNx7nxL8epm3qyQWDaTeTqV1dnRNsnzjhWCyspjmg4uxY19C5s7MTIcScy2c+85ll9ykUCrztbW8jFosRCoV4xStewejo6LL7KBTnizkZDqAzNE57IFGtx6+SV8f8TNF4IUKq5Mxy6AqNUdNgzpRWliObrQY3LS3LBzdQ1XZo4RD2iT7k5KTzyV2xKORtNFNnV1t+jrOsPTZO5aFHEIEAIhohXxBM2VGEprHD6MOeSkIuh/ni30OPhLFODzE4GaQn3URUm+Ky/APokcU1I65/UHPzyjQPY6M2MpkkVh6i/8kkhEP4mqLIdAYTZ9ihm4VwxbhaW9tMYJVIOPdTf3ETnpfdTOE5L8b7O9fD1ZczddHFZCLNlCuSUk2Q4/uu49j+53D8mmczuOtSJrbvoBCKILRZkzGFIB+pIR1rpBgJIzSmB5M6nVm2pYEQlAJBytEQPp9JdiRNJOYhYBQpWB7ylmf6rgRaJIQdj2OPV497C4NYR4+Re6yHyt33MfCdX1L64Y+xBmaNpl/sWIvqeJB0unq7652USkHHNiej1e/ZjT0whMzlkJbldDYNDNFSU0Db3Y03N0nhwcfJDU6RCzSgtbUgwmGs3lOUf3rHGddyoeGaA7pjRnp7q3YLivPHuucGP/nJTzI8PDxzecc73rHs9u9+97v58Y9/zPe+9z1+9atfMTQ0xB/+4R+ep9UqFMvjZjjseMIRg85isRPmuTKjaZnOFPmNIgGzSHd4GFOzndJKeflREZOT1XlX27atvPtDb2/DvOkgWksTslhCFgpQLNIaStHdZaFFptvCptdgD486wVhHO/ruXQzVXIosV9iWfgJRKiHq6xE1NRjbtmHedJBT9c/idCqKrzBF1EpQ312HucgQxXK5OrX7TDO0YNqg72cPUP7N3QTu+H8Uf3U31v0PkqndhvT78U+NIMr5OSfo+WJcV7Tt8TjBnhWqQdQ3UNveiG5oziwnQJiVmeAlVdtIKRCcCcA014tmHkKTCE062iN9VoCDE2z4vBphXZArmZgBAw3QNYs6T2bmPqIBZ+pmJlGaec7Wz+9ATiUxvDqyrpaMp37FgUV7u3M9OG8z12RxZNhGMw1EVxf5WBt2KoM9PIJMZ9C7OjFvOkj3NbXYx3oQ+RzE6hiWrUihOa7K7a3YaSfTJ+3FX5cLmaYmZwwKOB5FR48671vF+WHdS1ThcJjmFXpeJ5NJvvrVr/Ktb32LF7zgBQB87Wtf4+KLL+a+++7j2muvXculKhRn5HwN0ZuvhQkZjrBzhjNkimaPhVjJHKr56O1teG46iIwnwOtFC4emHWVnpVHcNUwb2gmfF03z0+E3IQ8+uxthGkivBzkyiswXyNR0ULy8hYxeojZY5MCzS2gNi5dTTp50rt228OWwBgZJ/vhX2MkwgRoPU/7diIyX8HgPqVSaTNtFRNODNKeexh5OL9l67A6inP17uQwBr0kkAtlxE5DopjWzjW27a3cC3jkZnFkITSKERNoCd4tqgCPxmQYFNEzDhIKTUokaeTxa9bFqZZxJvZbJYpDIrDJmS1OFkbxJ0gpT580iY23Yg4PLDqcER1fi8zkjMrLZqmg2HAaZiJM/2kNb6ilO5xsZNHzsboyiX7QbvXP7jL5GjI1Tk+xlMtJMGQMPFmOFGpr9U4sKuTcbHo9jDjgw4HT7nTixchdxxTNj3TM4n/nMZ4jFYuzbt4/Pfe5zVFwjh0V4+OGHKZfL3HjjjTO3XXTRRWzbto177713yf2KxSKpVGrORaFYK9wMh961HZle+I12NfQGzyRT1NdXDW527Tr3zhatsQF9Z9f0GX5ucDNnDS3Nc8p2QbNMMKKj1UQQwYCTxTFNbI+PoSHoOa5R1+LjWS+Iojc1LHrydadeuyfg5XD1SgOpMKK+jtbaLJPlCMLrpdzQjswX8E+Noh94FrV/8vt4X/FyvH/8/8Pzst+fc6zcb95u2cYduFmZSpEfGsWTGSdgeJ3J17OCIDdIYfq2pUppzn4SKQVIZyM5vW9dVMfQdcYtHVFXhx1PAAuPu5aIo8ViFHyROWXMrpCjxC5UnKndE0kvmCbWiZPYY+PLvn4dHc717On21sAgrY//FHt0nEFzB5FmH8IfYGzExjr0OLJUmjluMl+gzhpHeE08wsKnF4l5Z30GryDbuBlob68KjicmnGyO62IubduZMXbqtGMOugmzWevBumZw3vnOd3LVVVdRV1fHPffcw4c//GGGh4f5m7/5m0W3HxkZwePxUDMvH93U1MTIyMiSj3Pbbbdx6623rubSFYplWWuvjnPJFM1uAxfCaQN/Jl4dK12D1tiworldJ5IxTp1y2m1jseXLTu7JdufOM69TTsQpD4wgQrsRQmBPBw+GZlGwvFghDWNqDDOfRlsmg+Dqb9y26WTPCJX7+qhMZDFKGdqsHgrhfWRqmxCxavghZwKcxTM3cxBOiUpOZ33k9FqDQY1KQSOdsyl3dqGVD+NLDJAPNlIqg1muuvnqe7oBbY7g3aNZBIwiRjGNPTjMZC5PXcGZ3VH66R14XnjDkoG3rjMz6yqfB5/XCRjN7CRa/Q5AUOPJkKnESHq7qJt6BDHPO0h4TLr0fnqtTgqWMyxzhjWeIbWR8PsdbU5vryPS7umBRkYIHr5/gSnoZhNfrwernsH50Ic+tEA4PP9y5MgRAN7znvfwvOc9j8svv5w///M/5/Of/zy33347xWLxDI9ydnz4wx8mmUzOXE7P/iqiUKwRQtOcLMf2DrTGxTMRz4SzyRTZdjW4Mc25k57Xeg1zhMmLCFG1cIjkngPkchqZjBMjXXnl0o/pdk2FQiv098kXGCzEwGPS7o87Ga9intriILJYIK8FwLJoDmeWvR836xUMOhmMxH89hD06ji9gIGK12MEgnskEjad6iCarIl/b0kBKvPks/vQU/tQUyKXLVEhtJrBxyZVzWLZEIrFraxEHD1KzvQaZz5Mazsx5zWt3OFm7vPDPyZzVW6P4x05hp/NOxiwQRHi92MOjZ9TjuJmH/n7mZYacya1jxShN/ikEgtPBi+cI3N1so0hMUO9NAnAy6/gZraYu7UJBCKcs3NYGdjzO4B1PcKxHQCi86cXX55tVz+C8973v5fWvf/2y2+xwVVfzOHDgAJVKhb6+Pvbs2bPg783NzZRKJaampuZkcUZHR5fV8Xi9XryqR0+xCVlJpmj2LKpweOEU7vOxhuXmdnHV1cTzTRw/7mRunvWspYMvKau+Nyt9HsLvo6gFEKUyPjGBNTjM9lSaPm0nllbC5/GCz0OgZvnPCDcmcUteuWx14KYGGKEQRTuKb2qCwMljpK+6DkuCdypNbHAUfzaNsC1aT/SSqY0x2rWbTN1cIYaYzvLYlpjzmGVZolAoYWpedE1QrG2j9hUtjD+cdjyFu8TMa15XcoKxSeponNXSH4sfZrBciydgUEYjno/SVOtB29mJPTi8rB7HMJxyoG1DPllETGeGDM2m0T9FUC9iaDajhRoqupdCTuCZ5R3kZvoiYz1M+PdiGQbJFISSC4XcW4VQ0KZr5Lf0FDRELMYJq55WO0EwoKH5F7poK86eVQ9wGhoaaGg4N6HYoUOH0DRtxv10PldffTWmaXLXXXfxile8AoCjR4/S39/PdUuNF1YoNjnLuboWCtUJ22spbFyJs+xSgdCxHo2TJx3vkIYG53opXMfixsaVZ6ASIoYWqyMy8CRWps/Rh/h8WMJPuQLGZAIz4nW6wZbA1Up4vdUMhghdhMARBBsU0AwvmibQAoJgIu5kakplAkdH0EoWZb+PojdECT+R8VH8mRR9e6+ZE+S4AY60Ncc3aTrAsW1JoVghFPHh8xhksxCJaIholLIAbdZHpqupyuVnlRBP9OJLjEOwkbINFPOkvQ20tpURmoZcgdC3q8sRyJ6eDLN9lsA96poiAtuDY/TFowywjUtnlZxmB7hdA0c5kW5jRPezq6sL88BVW7IUIyfiMDTE7qYwScNkvBBhKFfHrsjwlhBfnw/WTYNz7733cv/99/P85z+fcDjMvffey7vf/W5e85rXUDttHjA4OMgNN9zAN77xDfbv3080GuVNb3oT73nPe6irqyMSifCOd7yD6667TnVQKRTzSKer7b1rPdRzpcwPhEZGnHWWSk5gs3fv0vtWKtU2bVcHsxIm4hrarp3UHf2p47kTDJJKaUiZpCJ9GFg0ZXoo/7eJdsurF/22nExWH1dOFbCLZYia2HaZoD1MsWwxZnuQUuLz2hhZi/DEGO1PPMJEsRVLNzCLBTweD1rIIlMbIzQZp7G3Z848KleILO250ZsELFsS8BmAmHkdZgdBs3FvdwOL8h13YQ0MIjwlpKZTEyoTavYjIn5nB++ZDShNc/p+w2EqLR3op04u0FSZooKZmaTS2EbSiFE7a383wDUn4nRMlBlJBzkVDXNR+9bMTszWSNVoWcJmjlxlVhZxBcdEsTzr9s7yer185zvf4Xd/93e59NJL+dSnPsW73/1uvvKVr8xsUy6XOXr0KLlcbua2v/3bv+X3f//3ecUrXsFzn/tcmpub+f73v78eT0Gh2LDE49XgZvv2jRHczKdYdLx43OzNgQPLZ2VOnHCulxojsRiuT44vaKLV1iAiEUhMMlUOggCv10ZEwph2icrDh6g8+dSi9+MKjCMRp+RVMMNU8gWswiR2OUfAKGNpAQxdI2yn8WKz8+TTRJIJLI+XiseL1HXMYoGasSE8hTyFUJjw5AT+dHLmcdznL6WYyea4aDqkckVyxdJMRsmNxeYHOcGgY2iY6xlAeDwYL/499B2dNLUaaB1tGM0NBLUcMpud1iStTOjb1QUIjYHW65bUVG2vmULb3c3omLZgXa4urfaSVoy6KKAxObnsQ25a5puC6kLOGXWxlcTXa8W6ZXCuuuoq7rvvvmW36ezsXNAC6/P5+NKXvsSXvvSltVyeQnHBMjTkuMyC02Vkmuu7nqXo7XW0QTU1jhNxKLT0tvl8tfvLbdNeCa7TcVskjWXoThtLKERrMEtCD5ExQvhFAWFGkOMTVB56FOOySxdkcVz3CiGA+hiZWBfFE3EiNVm8Xh2BoGj70YQknJuiUq6AlEiPBzwedKDB6gPNRiuUCU4lmGpsQbcqGOXSnMdyu6h0o4JtG9NDRyHgMbFtSTyZJ+A1AUE47OhtCgXnqYEjgA7fe4jEKcGInaTFP4XW2oJWX09oYIhRy0s8n6emdAQ0HREOgWFgXHLxGYW+Hs/0a1AXgxceRH9koabK2H81jb4YExNOkO2aBc6nu9tplR4dXX6O2GZl9tiT5boLt4r4ei1Yd6M/hUKxepw4UfVr2b27+g1/o9Hf7wRhtu2c2C69dPntT51yrlfSFu5i29WgyAz7nC6kZBItEsJjGpQIA4JGRsC2EX4fMh5foHlwv2O5r6XQNCbbL6Z06gFqJnuwa+uQHg+iXELLZrF0A0vXKXr9eAs5DEBqgpBeRkqJ8Du3+/JZpGlizzIimp3BEroNZafN3OOHQMTAquhkCyUKpQrgmAtOTTmvpd9fHcBqpjMI/15y3gjCsLD6+p0oaHQEqdU4UWIwCMUS9vAIWsCPtr19RWLW7dsdXdfpchvdL1tcXF6P4/WSyTjBobHImUYIJ/gZGHAC3UX6SjY158sUdCujXjmFYhMgZdUGXgjnZLFRPxdzOefE19fnZG3OJJ9zSxh+/+InyqUYGnKu29qmvy3X1UGhgJxOFYRJomFjSMfgRUSjSE1boHnITHePz9b9WDW1pHZfRLm9HT2XxZwYRxSKWHUxRndfjK3rlEMhrGCQ2vwA9XIMQ9fQhED3mBhIgrkMhVgDemM9prnwYGm6jRBgCJOmmhDhsIamaSAkli2x7WrWJp1eZACrzws4IxFEW8u0OaBGtFaClKRzuhPYtTQ75oGnBlZkMOfzOe8xywJbLm2F4JYS3dLiYoRC08LtWd1xW4nzYQq6lVEZHIXiAseyHMMwcE4+KxldsF5I6WRvenqcQYTt7VX7/6W2d2fpno32BqqBiVP60jCedRWVxx6DqSQyHKJGjxOxJiCfRXi8aNOlgPmaB9d3p3aWYlbXBFZtDfHLn4c/k0YUi2QnY5R8fkp2LzWGiWFZlBsaqR04jUiXsH0+NE1DFIoYlQq5cJjMJZchNB2fITCFRmE6PtCAYEDDIz00RL2Eg4JC3sa2BZrmPH4uVy3rWdbCAaxhI0+67MeWIPIFpGUhDJ3GbQEyhQ5yso7a4CQE/JDPn1XHTkeHcxxPnarOWppPIFAVO88e8zCfzk4nOI/HnXLlRi2prhVrbQq6lVGvoEJxAeO6oYJzctjIwQ04guJk0skuadriZYnZtvWDT02CtKmvX1lbuLtv/MkhZDJJNFLNSOiXXYKx70rnDFosQSaDViwgamrQuncgS+VFDedc31FXI1Iqgc9jEKsxyJVsyrW1FBpbsEIhDJ9NPhQhUxvDn0lBKERx2zasSAStXMbIZdHzOWRrM6kX3IBsaaY25KOhJkhjbZBIwIvXoxPwe2hvDFAT8qEL54xvemyKJZugzzPTKu5gY08lsU72IdMZpNcpecW8KTqCE2gCZLk6AkezKzREi7Q0lJxRGUKc9bgEVwdVKlVb6Beju9u5Xs5bVYiqkeBy2Z7NzFqbgm5VVAZHobhAyeWqItqmprkZho2EtG3kRJzkRJlCMsip02FiMY3rr1+4rTUwOGMGWClaJNiBFquj9vc6gOXT9bP3Hcy2g6FT21XGOnDNjKOy+XsvQJZK2BMJtHDQOVPrGjIxtaTmYf6ATUfALdixzc/gZI5UtoSwPUjA46mQL0kmd19MbSmHMTqKVVNDcdt29HQabXKSUiBI7Z/8IZ2trQyOp0nmCpQrNpomqAn70Ct+NF+GUiWLbduUy1CuWOQKFQwjSG0ogNsqbg0Moj3QR2EsSaFwGIaHELkcdG7HjIQxcQI8YVY/6oVhUOvJModz6Nhx9TOnTy8dWOu608GXTjslqKUs0vx+J8OTzTrWASucv6xQLIsKcBSKC5BkEoaHnZ/b25fvQFpP3KCjMjDM6Vwbx+ydRKI67R0x/P76BduWf3qHoyGJ1dHv344oVmiaeILyTw/DMpqE2fuWaxsRvlr0ShG77zjleHxmX729Dc9LXlR1VE6llpwcDtU289nmg64nTkuDj2AwxuB4mtODFvlihUDUImJ4kcEWsrXPJfjUk5jjY4hKBdswSG3vgquuItTdhRCCSMBLtlCmYtkYusaYz6S3IKitF/giNolRm3JGEoha1IX9BOwwQZ8HKaE4Eqf85B0EkgbFQAeFpt0EClns8QmsUgl270KLOP4A0u9DTKegpN/P7GTYuXbsuO+5QsERdC+VdGhtrZagYrGlt+vogCNHHNF07bTeVqF4JqgAR6G4wBgfr+pCuro27olgdtDR69/LVDiKJy/Qk5Nsf+JurI5qwDJfIFuyTWRFR/h0wvW1y9rWz993MNuMEBodtWm0uoWW92ejeZg/YBPmtoxHgz4iAS92rkIxKNm9O4pt2xwbjBPXa8k/53fxplPIfJ4sBlp9Hbu3Ncy0BAshCPmrXVSJ6U/kgNfDnu31lFIVbCnZ3QVBn8mxY8Lp6pI21tEe7HSGSHsHU1kvWVsS7twG5bJTsuo9BZdehCiVkPFJ9I42kCAHh5Gr1LHT2uqIuQcHq1PH5yMEtLQ4AXlf39KaHXDez729zuWii85qKQrFAlSAo1BcQJw+XXXz7e4+u66i88nsoCPZuAu74GMoVU+dN8v+1mHs4cycoGO+QPZ0zsnubA+OndG2fv6+9vQkbo9mAYvvu5LRElAVKs8OIueXrIQQmLqJ4YPwtDZld3tspvyU94XQAmGiAR9tDWGiwaXLQIbh3LdtO/fr85gIASH/vA3Taex4Aq2+Dq8+XYZCIiIR9F07oa8fe3IK2dcP4dBMhgpYdB7YYtmrlRCJOAFONrvwdZlNNOqUnkolR9O0VFDu9TrbJpNO+WspDx2FYiVs0I9HhUIxn2PH3BOf43GzGtPA1wo36LDr6okXoxzPNFPrydIZGsNn2AtmH822rQdo8k1RtM3pIIVlbevn77s9NIY9exr3M7C8n3/Sdruo55+g5zv2upmd2eWnoM+cY+a2GLruXFzh7vxRDB6PEyCYdpFSxUJ6vQigPTiBT3MMkEQkgnbJRdDXj/mC56Hv6JyToVrtjp3mZid4GR52MjpL0dXliMz7+pb3vGlpcQKcTMYpEfrnB3cKxQpRUm2FYoPjety4Ooc9ezZ2cAPVoKPP2ka67CNqZjE1m+3BCWeDeV07823rw2aBem+6eofLiGDn7+vRLHx6ZUX7LodrmDj7BJueXlI0unD7+cfELT/VhHyE/J4zBjdQDXBck8L5uGvxBE0wdPLTU2z8enluVqlUQguH0Hd0LujKWe2OnZoa5zqVWnzNLtOmzkhZ1TEthWvoeOrU8vepUCyHCnAUig1MpeIEN1I6XSa7d6/3ilaG8PuY0JqoFCwKtocGX4prY0eqG8wLOlzbejueWDCexRXBLtbC/Uz3XQ7XYDA2azf3xDx7tpf7kKuhhTIMJ4i17arWZzZugKPVRtFidWQSxVV9zueK2x3lehYthdttNTy8fOBimlXdk+tirVCcLSrAUSg2KMWiY2EPzof9UiLOjUg5EiNVs53RpJ+YJ0lHII4+Mxhy4QnYta1faoDjciLYZ7LvcrgBzmyDOrerarb2yZ0FvJxh4UqZXaKqVBaa3s1kk4SGtnsXBV/dqj7nc8UNAqemlt9O06rbuk7TS9HY6GTFCgVmef4oFCtHBTgKxQYkm3U6ScDRJDQ2ru96zpbePo1060U0BLN4J4fxl6bOeAJ+Jrb1a2F5v1iGYbHb3ADnbIaALsXsElWl4pR0oKrJcX8vFECLxbCuPrBhbP7djMuZRi642Z50enmTQKiWqk6fVqUqxdmjRMYKxQZjcnLueILVOHGeTwYGnJOzDNfge9blbB/5LXJofEVdO8/Etn41Le9dMfGsOZgzJ9j5U6/d7MJqiGHdElWlMjfAKZXm3n+lAkgbdAP9qivRL74IfD60YGDdbP4bG522+nh8aUM/l23bHJPKMw3ZNAznfsfGHJdj1xlZoVgJKsBRKDYQIyPVNP+OHXNPsBcC+bzT/TI66gy53L49htd781kFHStt4V7tfWfjam1m+9+45an5AmN3lMNqxBTzS1Qe00Ym0+RPZvE2mNMlPQ1rIo7d04M9kaAkjiM8JlpbK2L/Nc5AznWipsZ5/yYSc1+7+cyeU5XLLR/E19U5WaFKxREyzzZdVCiWQwU4CsUGoa/PKT0A7Nq1MFOw0ZHSEYROTjqtw8Ggm3VYnaDjfOIa/C3mYDz/BLuapRNNq5aoioNjmMceotwPGZnA70s7YmrfZVg9J9ELGWSoBlHXgigWsXpPISfiyzo+rzVNTU6AMza2fIADTvnp+HEnk3MmU7/ubmfm2tCQ46CsRjUpVoJ6mygU64zbBl4oON9q9+y58IIbcAK0crmahbiQRNHzcVvEZ59InRlUzsT2+axW274Q0yWqyRTF396P3n8S4fdTqW9BhMNUTvZh/fYe5FSSUHMI4fVSwosI+NHaW7HTjoGitO0zP9gaIES1w+xMgmPDqI57mJhYfltdd7RoUB0uq1CcCRXgKBTriG1X28BN88LwuFmMTMYp1YyNORmOrq71XtG542Zk5h+H5TI1q+korQmb8ukhStkyZkczwuuljOkEMXU1eDPj2JUynmljv4LlmV7vXNfm9cI1+xsZOfO2bdOJpomJM2fColEn+JOy2uGmUCyHCnAUinWiXHbcicH51ut2jFxo2LYjLI7HnW/ZkcjGnY+1ElzRsFtikbaNPTaOPT4OqeSc7Iirv1lNIbhMpZ2uqEjNQnNAy8IvCshiCb2QI+ZNEzTy1b/PM1BcD4Sovh7p9Jm3dSeH9/Wd+b537XKuR0fP3IGlUCgNjkKxDhQK1Q/0hoa5ZnIXGj09TpePz+d8w17Orv9CwB1kWltbnYZeGBijkmsm5C1RGjAw9l+D3t62qh44LrJUQlo20nQyM62BBIZwzubCMPAZZbAs8iWdpmhm7s7n6Nq82rS3O8H74OCZ9TU1NU7AUiw676PlhPVCOPc9MHDmDiyFQmVwFIrzTDpdDW7a2i7s4GZqyikZjI87J/nlJkVfKLhCbzHiTEO3ek+R8TchYrVEwmD1nnJuHxicyfasZgZHeDwIXYOSU4IKGkW87uiJQAC/H4RVoSDm9qWvh4PxUmhaVau0EpM+1+HY9X5ajlCoOvLhTNodxdZGBTgKxXkkHne+1YLzoT7b8v9Cw7IcncXEhJO1qam58NraF8PRglSnoWvtrWSMWoTQCIWZI+bNZJxy1WpqcEQkjAiHsNOZBWMYAITPhwgGKaRK6+5gvByuyPz06TNv6/U6GjQpq2Lu5XA1XhMTi4+0UChABTgKxXljcLDq8rpz5+LdOBcSx487ZQW3bdfVUlzIuNmbsJXEHhxCi9UhhKBsO21tQswV85I6g8jkXBAaorUVze9bdAyD3tiAcf21aM2NG8LBeCl0vTpqIp9ffluoBi1nGuEAznHYts35+cSJc1ufYvOjNDgKxXngxIlq6/Hu3Re+j8f4uCMunppyvE82Q2kKqv43NZ4sslRG+By1dMyTomDPSk95vcjEJHaxhL7KXW9CgBaJoF19FfpoDntwaIELtJFtQ+7owhvb+Yxdm9eS7durXjdn0stomiPsTiRgcNCmxVzeHDIQcC65nJNJ3AwBtmJ1UQGOQrGGSOmILaV0Tly7d1+YbeCzKZedUtv4uBPcxGILh0JeqLhdP/6ol6LHhEIRAn7qvFlglpjEFfOuYU1Oi8XwXLeEC/RRnIGbG9xAcfZ09ELhzFnLxkaY6IkzeW8P0fRjaOXijEuzK+yezbZtcOSIE2jX1W2OEqli9dhY4b5CsYmwrKrHjc934XrczOfkSedkVVPjPJ8zzR26kHADUVEfc1yD44kFOhhXzGu3dCAi4VXtoJqP0JwgRt/egdbYMJPFcAOFC2EApVt66utz2u2tU6exx8YXNSO0BgZpevwO7NFx+ozdaG2OweFsYfd8XIHyyZNr+CQUFyQqg6NQrAGlUvUDt6Zm86TPh4acb+PZrJO5uVC9exbDFav6/U5gYey/BjkRd8S7sVpHCVssOp1K4RD5vVeD1NZlGKrf7+haCoXVGfK5lpgmyEQc62gP6fQTmOX8olkZaTvCbn9uAq1+OyAoSC/+gIbmb8UeGKLywMNorS1zylU+n+O9lEo5Ore2jSFBUmwAVAZHoVhlcrlqcNPUtHmCm0LBOYmMjjrBTUPD6nYPrTeuO65r8Ke3tzmi3a7ti4p5CxFndsBqZ3DceVTL4QY1rg/PRsYaGKTt8f/AHh3ntNG9ZFZGTsRnhN1doTEABrL1wJldml3vpXS6KhRXKDbRx5NCcX6Rtr1AH5FKawwPO3/v6Fj9k996IaXj3ZPPQ329U8a5kP17FsMVGM8+Znp7G1pry6I6mNx0985q6z50vSpCd0tm83EDnJV0J60nblbGyE6h1e8EBBUMzEWyMjJfmBF2G5pNzJtGE7PKWNPC7qVcmnfscL5Y9PVtnnKw4pmhAhyF4hxwHW7twSHnQ9ljEo/tIbX9CrRYjK6uC3tcwXz6+52TbbHonFy7u9d7RavPUjOohLb4NHS3pLXaJ1LDqGZwbHvxbI6bOdvwAc6srEyHd4LT2QZylo+olluQlRF+H2KOsPvsXJo9nmoXVn+/08Gl2NqoEpVCcZZYA1WHWxEOo7W1MGh2kejPYj30CF3+wU0V3GSzzol0ZMTREzU1XZjTzpfD1bueTTfYWgl8NWGj5dJURscpDU8sOxl8nYaGrxg3K4PPi0+v0BaIEzFm1dVmzc5aibD7TC7NjY1OwJnPXxjlO8XaojI4CsVZ4KbcXYdbIQTH081IQ6DVS7qmHoaH88iOlg3nSXIuSOk40eZyTmAjhDOjabPhuue6+puVstrZG2tgEPvXx7AHNYpWluyhMbSO2kVbpGHjd1HNz8oEjNLcDWZlZVYi7F6JS/POnXO9d1Spauuybp/Av/zlLxFCLHp58MEHl9zvec973oLt//zP//w8rlyxlZmdcgdBT7oFKQVCSHZFRtDrlxZCXogcP+5kCWzb0YW405w3G+6AzWh0Zdu7gcVqZurczKAYGsDwmcjaOuxgZMkW6QvhxH22WZkzCbtX4tJsGFXrAtU6vrVZtwzO9ddfz7Crxpzmox/9KHfddRfXXHPNsvu+5S1v4ZOf/OTM74H16NNUbElmCyGdGyBgFGkLTCtUzyCEvJBIJqvzplpboaXlwndgXgrXZXqlz2+1p4jPzgyazbvQcyYSsLxBZ/bVIi3ShlFd90blXLIyywm7V0os5sypKpedzqoLeeab4txZtwDH4/HQPKt/tlwu8+///u+84x3vQJzhq0kgEJizr0JxvpidchcBPzvDI2hi1jfTMwghNzKzu8Jsj4/hqRjZnDYT2Kw0u3GhsZS4eDncAGe1vlvNzgwamo0ubIq2QUVqC8W404Jnv985gZdKG9vBV29vg5sOVkX588ZOLJaVWUrYfTZ0d0NPj+ONsxnGoyjOng2jwfnRj35EPB7nDW94wxm3/dd//Ve++c1v0tzczM0338xHP/rRZbM4xWKRYrE483tqJeNqFYpFcFPuVu8pNH8r2qyTopty17s6lxVCbkTmd4WdoBtZG4PtuxDBuk3ZNeWSnZ7AcDbaInef1TLZm50Z9FMiYBTA8lKR02ruRTKDgYCjHcrnN3aAA6uTlTnrx9QdD6qRESfQOdMsLMXmY8MEOF/96lc5ePAg7e3ty273p3/6p2zfvp3W1lYef/xxPvjBD3L06FG+//3vL7nPbbfdxq233rraS1ZsQVZLCLmRcLUfdjqDFqtjUotB2sfoiKQ5/TDN7ZehaS3rvcw1w/W/OZsAx/2+tFqHeXZm0AhobA9OcDzdUg1wFskMzvbCuRCya6uRlTlbamocY0opnXlVNTXn9eEV68yqfwp/6EMfWlI87F6OHDkyZ5+BgQHuuOMO3vSmN53x/v/sz/6MgwcPsnfvXl796lfzjW98gx/84AecOHFiyX0+/OEPk0wmZy6nT59+xs9TsXVZDSHkRmF+V5jlC5IoRcnqUZoaLLRCFv9TDy7bqnyh43rJrGeL+Hwxrlsus2xtyRZpN2uz0b1w1htXGD8y4mjKFFuHVc/gvPe97+X1r3/9stvs2LFjzu9f+9rXiMVivPSlLz3rxztw4AAAx48fZ+cSg3G8Xi/ezWRMolh31iPlvhbM1n4IIRjIxrCkwNQqaEKwozGNPZiZo/3YbCzlFnwmVrOLabHMoLQtKqUK9uTQoplB9/FLpSXuVAE4Wba2NkeLc/y4KlVtJVY9wGloaKDhLMYLSyn52te+xi233IJ5Nl+hpjl06BAALS2bN4Wu2JisR8p9tZnfFdbsn+LpVBshvUhrIIEQXuzJzdEVthhuqelcumxWew7XAjFubgoMfVkxLmx8L5yNQDjsZOjKZccSYLONGVEszrprcP77v/+b3t5e3vzmNy/42+DgIDfccAPf+MY32L9/PydOnOBb3/oWN910E7FYjMcff5x3v/vdPPe5z+Xyyy9fh9UrFBc2843YshUfIb2IqVcIGkVk7sLtClsJrv7mbAz+3KBoLdwpZmcGjaOgeT14rgkvmRkUQgU4K2XHDjh6FMbHHc3SZhoUq1icdc+nf/WrX+X666/noosuWvC3crnM0aNHyU33ZHo8Hv7rv/6LF77whVx00UW8973v5RWveAU//vGPz/eyFYpNwWztR9kSJIohALYHxldsj38h4zZU+s4iflttD5z5CE1Da2xAa2hARKPLlj0vBLO/jYIQzgBcgGUkm4pNxLrHsN/61reW/FtnZ+cc98uOjg5+9atfnY9lKRRbgtnaj+P9JiJUpD2ahHzugu0KOxvORX/jtoifD3/RM2Vn/H5nPZa1+eaDrQXBoPOa5fMwNubMrlJsXjbnp5ZCoVgxensbk9e+BK2xAW8xiXes/4LtCjsb3GngZ5O9gWqAs9YljpUEXm6reGFzSqTWhG3bnOtEQgm0NzvrnsFRKBTrS6kESU8TxrUN7KyLQ+HqC7Yr7GyYmnKuz3bA5kZithfOWpXMNhtCQGcn9PU5s6oWUUcoNgkqwFEotjjuQMLOLg3dd2F3hZ0NrsA4FDq7/c61rfysHsO2kck0drGEXceSwaYb4Li6IMXK8Pmczqp0GoaGnFlris2HCnAUii2MO+82FDr7Us2FjutduNGEuu7YjEqvgV22KR4aQGtrxdh/zYJyoRvzKLO/s6etDY4ccYTmdXVb7/2/Fdi8+WeFQrEsxaIzMRzgDBNSNh2uePdsdDTStikNj2OPj+MvTq6Ju7M7NsPqPYUZ8CBitYhwGKv3lHP7wOCqP+ZWxvWc7etb12Uo1giVwVEotii9vc51V9f6rmM9cAO7lepv3KxK8nSSSr4Ww5eidDy0aFblXJk/NsPIG5QrGtLvR2tvxR4YovLAw2itLQscjZUXzrnh8TgzyCYnob+/KkBWbA5UBkeh2IIMTicCIhFnVuhWw9XfrGT44uysSt4fQ8RqCUaMVc+qzB+b4dfLANhSQwiBFqvFHhxETsRX5fEUDk1NTpCYyykt02ZDBTgKxRajUHDElbB1xZVue/CZmsTmZ1XyRgQhNLxBw8mqpDNUHnh4VcpV7tgMpsdm1HnS7AiPYGjT9+31IsvlBWMz3KGbKotz7rhjDPv71eu4mVABjkKxxXD1BvNm3m45ViIunp9VqUh9Zt/VzqrMGZsx/Ri6mHW2LS4+NsPtpHJHSCjOHsOA+nrnZ7d0q7jwUQGOQrGF6O93rmtrq9/8txquUd9KylPzsyo1ZpaAMSuDskRW5VyYPTZDzksjLDc2w3VUVp1Uz4z6eieoLJUgk1nv1ShWAxXgKBRbhHze0RgI4egOtipnM2BzflalwZeiLTBZ3WCJrMq54I7N0MIh7IEhZC6HtCxkLoc9MLTk2IzZZn+KZ0Z3t3M9MAC2ZWOPjWOdOo09Nr4mXXOKtUV1USkUWwAp4dQp5+etXppyhaSmeeZt3ayK1XsKzd+KmFXXcrMqelfnqg0j1dvb4KaDju5ncAiZmESYJnpXJ8b+qxft2HKfhwpwnjm67gT/w4fjPH1fDzvSjyJLZYTHXNKLSLFxUQGOQrEFcIObWGxlJ/bNzNk4Ec8eRmoPDKHFap22s2JxzYaR6u1taK0tyIk4Ml9Y8dgMd7aW4pkRyQ4y+PBh7FyBdH0TkQYLCkWs3lOO1moTz2fbbKgSlUKxyclmnc4pIaBh60xiWBS3e+psxjPo7W3O0NGu7ch0Bnt4ZM2HkQpNQ2tsQN/egdbYsKIASnX/PHPcrrnO0hFEfR1jNGMLHRHwr3rXnGLtURkchWITIyWcPu387LbCbmXORn8zm3PNqpwvlNnf6uB2zRn1tbSYU4zkaxnO19IeSCzomhONW/zbwgWACnAUik2MO0izoeHsxhJsVlwHY1eYezYITduwJzVdVyWq1cDtmhM+L2GtQNHOEDZmiZu8XmRiclW65hRrz8b4+qFQKFaddBrKZefbfWx1NLAXPJs1y+EGbCrIeWbM75qr96bx6rNe1FXsmlOsPSrAUSg2IVJWxzG4ra9bHctyrjfj1GjVKr46nKsXkWJjogIchWIT0tPjXDc1OeULhTNQEc5ef3MhoAKc1eFcvYgUGxNVlVcoNhnJJNi2U5qqrV3v1WwcXIFxOLy+61gL3KyUGhb5zDkXLyLFxkQFOArFBY607ZnuHun1MTwZA6Gxa9d6r2xj4Xb2rtQD50LCfU5qHtXqsNG75hQrQwU4CsUFjDUwWP2mWSpzgm5EXR2t13Wiac3rvbwNgyunUOU6xUrZyF1zipWhAhyF4gLFGhik/NM7sNMZtFgdSa0W0n7k2BiBXx/GCinHVZdUyrnejPobF+WFo1DMReXbFIoLENdx1U5nHIdVf4CJUi3C66V7e0k5rs7D1d+sZIK4QqHYHKgAR6G4AHEdV7VYHUIIxgtRAFr8k2jaXMdVRVWbsplLVK7QWMW0CoWDCnAUigsQ13EVnxeAem+Kel+KkDntsOr1Istl5bg6i80oLp6N2ypeUIdcoQBUgKNQXJDMd1w1NJtaT7a6gXJcncFtnd7s5Sm/z0Ymk2RPDGOPjavypGLLo0TGCsUFiOu4avWeQvO3ImalJ1zHVb2rUzmuUtXfbGZPIGtgEO2+Ryj36STtDCF/HK2tFWP/NUportiyqAyOQnEBohxXV052OrHl8azvOtYKt5tOnOpF+P0U61oQ4TBW7ynKP70Da2BwvZeoUKwL6tNPobhA0dvbMG86iN61HZnOYA+PINMZ9K5O53b1zR1wWqc3q/5mfjed8HqRQkcE/E53neqmU2xhVIlKobiAUY6ry1MqOdfB4PquY62Y303n3OhcCTG3m06Z1im2GirAUSgucJTj6tJs5gGbUO2mE9PddFFPloo9qxfe60UmJlU3nWJLsmZf8z71qU9x/fXXEwgEqFmifaG/v5+XvOQlBAIBGhsbef/730+lUln2fhOJBK9+9auJRCLU1NTwpje9iUwmswbPQKFQXOhMTTnXgcC6LmPNmN9N1+hL0RqYrG6guukUW5g1C3BKpRKvfOUreetb37ro3y3L4iUveQmlUol77rmHf/mXf+HrX/86H/vYx5a931e/+tU89dRT3HnnnfzkJz/h17/+NX/2Z3+2Fk9BoVBc4Gz20QVuN50dTyDnPVm3m05ra1PddIotiZDz/ytWma9//eu8613vYsr9KjXNz372M37/93+foaEhmpqaAPjyl7/MBz/4QcbHx/Es0vLw9NNPc8kll/Dggw9yzTXXAPDzn/+cm266iYGBAVpbW1e0plQqRTQaJZlMEolEntkTVCgUGw5p21RG4/QcB1/IpOuKyKbVJc2dSVYLXi8Ui05wEw4pwbliU3E25+91+4+/99572bt370xwA3Dw4EFSqRRPPfXUkvvU1NTMBDcAN954I5qmcf/99y/5WMVikVQqNeeiUCg2J9bAIKUf/pixf7uTyt33EfzVjyj98Mebtl1addMpFIuzbiLjkZGROcENMPP7yMjIkvs0NjbOuc0wDOrq6pbcB+C2227j1ltvfYYrVigUG53Z2YypwGWIoIeIkcTqPeXM5dqkJ3zVTadQLOSs3v0f+tCHEEIsezly5MharfWc+fCHP0wymZy5nD59er2XpFAoVpn5njDS9CGEhhbcGp4wQtPQGhvQt3egNTao4Eax5TmrDM573/teXv/61y+7zY4dO1Z0X83NzTzwwANzbhsdHZ3521L7jI2NzbmtUqmQSCSW3AfA6/Xi9XpXtC6FQnFhspgnjFcvA8oTRqHYipxVgNPQ0EBDw+p8MFx33XV86lOfYmxsbKbsdOeddxKJRLjkkkuW3GdqaoqHH36Yq6++GoD//u//xrZtDhw4sCrrUigUFybzPWG6QqPM6aBQnjAKxZZizXKY/f39HDp0iP7+fizL4tChQxw6dGjGs+aFL3whl1xyCa997Wt57LHHuOOOO/jIRz7C2972tplsywMPPMBFF13E4KAjDrz44ot50YtexFve8hYeeOAB7r77bt7+9rfzqle9asUdVAqFYnOy2IR1U5tVjlKeMArFlmLNApyPfexj7Nu3j49//ONkMhn27dvHvn37eOihhwDQdZ2f/OQn6LrOddddx2te8xpuueUWPvnJT87cRy6X4+jRo5TL5Znb/vVf/5WLLrqIG264gZtuuonf+Z3f4Stf+cpaPQ2FQnGBoDxhFArFbNbcB2cjonxwFIrNifKEUSg2N2dz/lazqBQKxaZBb2+Dmw463VSDQ8jEJMI00bs6MfZfrYIbhWILoQIchUKxqVCeMAqFAlSAo1AoNiFqwrpCoVBfaRQKhUKhUGw6VICjUCgUCoVi06ECHIVCoVAoFJsOFeAoFAqFQqHYdKgAR6FQKBQKxaZDBTgKhUKhUCg2HSrAUSgUCoVCselQAY5CoVAoFIpNx5Y0+nPHb6VSqXVeiUKhUCgUipXinrdXMkZzSwY46XQagI6OjnVeiUKhUCgUirMlnU4TjUaX3WZLThO3bZuhoSHC4TBCiPVezpYhlUrR0dHB6dOn1RT3dUQdh42DOhYbA3UcNg5nOhZSStLpNK2trWhnmC+3JTM4mqbR3t6+3svYskQiEfUhsgFQx2HjoI7FxkAdh43DcsfiTJkbFyUyVigUCoVCselQAY5CoVAoFIpNhwpwFOcNr9fLxz/+cbxe73ovZUujjsPGQR2LjYE6DhuH1TwWW1JkrFAoFAqFYnOjMjgKhUKhUCg2HSrAUSgUCoVCselQAY5CoVAoFIpNhwpwFAqFQqFQbDpUgKNQKBQKhWLToQIcxZrzqU99iuuvv55AIEBNTc2i2/T39/OSl7yEQCBAY2Mj73//+6lUKud3oVuQzs5OhBBzLp/5zGfWe1lbgi996Ut0dnbi8/k4cOAADzzwwHovacvxiU98YsH7/6KLLlrvZW0Jfv3rX3PzzTfT2tqKEIIf/vCHc/4upeRjH/sYLS0t+P1+brzxRnp6es7qMVSAo1hzSqUSr3zlK3nrW9+66N8ty+IlL3kJpVKJe+65h3/5l3/h61//Oh/72MfO80q3Jp/85CcZHh6eubzjHe9Y7yVtev7t3/6N97znPXz84x/nkUce4YorruDgwYOMjY2t99K2HJdeeumc9/9vf/vb9V7SliCbzXLFFVfwpS99adG//9Vf/RVf/OIX+fKXv8z9999PMBjk4MGDFAqFlT+IVCjOE1/72tdkNBpdcPtPf/pTqWmaHBkZmbntH/7hH2QkEpHFYvE8rnDrsX37dvm3f/u3672MLcf+/fvl2972tpnfLcuSra2t8rbbblvHVW09Pv7xj8srrrhivZex5QHkD37wg5nfbduWzc3N8nOf+9zMbVNTU9Lr9cpvf/vbK75flcFRrDv33nsve/fupampaea2gwcPkkqleOqpp9ZxZVuDz3zmM8RiMfbt28fnPvc5VRpcY0qlEg8//DA33njjzG2apnHjjTdy7733ruPKtiY9PT20trayY8cOXv3qV9Pf37/eS9ry9Pb2MjIyMud/JBqNcuDAgbP6H9mS08QVG4uRkZE5wQ0w8/vIyMh6LGnL8M53vpOrrrqKuro67rnnHj784Q8zPDzM3/zN36z30jYtExMTWJa16Hv+yJEj67SqrcmBAwf4+te/zp49exgeHubWW2/lOc95Dk8++SThcHi9l7dlcT/3F/sfOZtzgsrgKM6JD33oQwvEefMv6sN6fTibY/Oe97yH5z3veVx++eX8+Z//OZ///Oe5/fbbKRaL6/wsFIq158UvfjGvfOUrufzyyzl48CA//elPmZqa4rvf/e56L02xCqgMjuKceO9738vrX//6ZbfZsWPHiu6rubl5QQfJ6OjozN8UZ8czOTYHDhygUqnQ19fHnj171mB1ivr6enRdn3mPu4yOjqr3+zpTU1PD7t27OX78+HovZUvj/h+Mjo7S0tIyc/vo6ChXXnnliu9HBTiKc6KhoYGGhoZVua/rrruOT33qU4yNjdHY2AjAnXfeSSQS4ZJLLlmVx9hKPJNjc+jQITRNmzkOitXH4/Fw9dVXc9ddd/Gyl70MANu2ueuuu3j729++vovb4mQyGU6cOMFrX/va9V7Klqarq4vm5mbuuuuumYAmlUpx//33L9mNuxgqwFGsOf39/SQSCfr7+7Esi0OHDgHQ3d1NKBTihS98IZdccgmvfe1r+au/+itGRkb4yEc+wtve9ja8Xu/6Ln4Tc++993L//ffz/Oc/n3A4zL333su73/1uXvOa11BbW7vey9vUvOc97+F1r3sd11xzDfv37+fv/u7vyGazvOENb1jvpW0p3ve+93HzzTezfft2hoaG+PjHP46u6/zJn/zJei9t05PJZOZkynp7ezl06BB1dXVs27aNd73rXfzlX/4lu3btoquri49+9KO0trbOfClYEavZ6qVQLMbrXvc6CSy4/OIXv5jZpq+vT774xS+Wfr9f1tfXy/e+972yXC6v36K3AA8//LA8cOCAjEaj0ufzyYsvvlh++tOfloVCYb2XtiW4/fbb5bZt26TH45H79++X991333ovacvxx3/8x7KlpUV6PB7Z1tYm//iP/1geP358vZe1JfjFL36x6Hnhda97nZTSaRX/6Ec/KpuamqTX65U33HCDPHr06Fk9hpBSytWKyBQKhUKhUCg2AqqLSqFQKBQKxaZDBTgKhUKhUCg2HUpkrDgr0uk0w8PD2La93ktRKBSKCxZN02hpaVGGgmuICnAUK8K2bW677TZ+8IMfrPdSFAqFYtPw8pe/nA9/+MNomiqorDYqwFGsiNtuu40f/vCHvPOd72Tfvn2YprneS1IoFIoLlnK5zKOPPsrtt98OwP/6X/9rnVe0+VBdVIozkkqleMELXsA73/lObrnllvVejkKhUGwavvGNb/DFL36RX/ziF6pctcqonJjijLjDzfbt27fOK1EoFIrNhfu5Ojw8vM4r2XyoAEdxRlxBsSpLKRQKxerifq6qxo3VRwU4CoVCoVAoNh0qwFEoFAqFQrHpUAGOQqFQKBSKTYcKcBQKhUKxahQKhTNeZutNbr/9drZv345hGLzvfe8jHo/T2NhIX1/f+j2JVeJVr3oVn//859d7GVsW5YOjOK/Yts3g0DiZbJ5Q0E9ba4MyuLqAkFKSLZSpWDaGrhH0mQgh1vQxn/e853HllVfyd3/3d2v6OCtlo63nTEjbRk7EkfkCwu9D1McQa/g/d9VVV/H0008v+XchBCdPnqSzs5PHHnuM97znPfz7v/87+/btIxqN8pGPfIQ/+IM/oLOzc83WeL74yEc+wnOf+1ze/OY3E41G13s5Ww4V4CjOGz3HT/OzO+/l+MnTFIslvF4P3Ts6ePHvXceu7o71Xt4zxrIshBCbNmBLZgsMjqdJ5grYtkTTBNGAj7aGMNGgb72XtyylUgmPx7PeyzjvWAODVB54CHtwCFkqIzwmWlsrxv5r0Nvb1uQx3/zmN3Prrbfy0EMPLei8fPOb34ymaTPBy09+8hP279/PTTfdBEAul+OrX/0qd9xxx5qs7WyoVCoYxjM7RV522WXs3LmTb37zm7ztbW9bpZUpVsrm/CRWbDh6jp/mq9/4EU8ePkFdbYSdXW3U1UZ48vAJvvqNH9Fz/PSaPO7/+3//j7179+L3+4nFYtx4441ks1ls2+aTn/wk7e3teL1errzySn7+85/P7PfLX/4SIQRTU1Mztx06dAghxEzq/Otf/zo1NTX86Ec/4pJLLsHr9dLf30+xWOSDH/wgHR0deL1euru7+epXvzpzP08++SQvfvGLCYVCNDU18drXvpaJiYk1ef6rRTJb4NhAnEQ6j9fUCQU8eE2dRDrPsYE4yWxhTR739a9/Pb/61a/4whe+gBACIQQnTpzgTW96E11dXfj9fvbs2cMXvvCFBfu97GUv41Of+hStra3s2bMHgHvuuYcrr7wSn8/HNddcww9/+EOEEBw6dGhm3+WOz2Lr2ailFGtgkPJP78DqPYUIh9HaWhDhMFbvKef2gcE1edxbbrmFQqHAE088QWdn58wlFArx61//mje/+c0AdHd385GPfIR77rkHIQS33HILP/3pT/F6vVx77bUz92fbNp/+9KfZtWsXPp+PpqYmXv/618/8/cknn+Smm24iEonQ3NzMe9/7Xkql0szff/7znxMMBueUxZ588kmEEDPHta+vDyEE3/3ud3nOc56D1+vlRz/6Ef39/bzuda+jqakJv9/PFVdcwW9/+9uZ++nv7+dP//RPqa2tpa6ujle/+tVMTk7OeT1uvvlmvvOd76zqa6xYGSrAUaw5tm3zszvvJTGZYnd3B+FQAF3XCYcC7O7uIDGZ4uf/de+q+0AMDw/zJ3/yJ7zxjW/k6aef5pe//CV/+Id/iJSSL3zhC3z+85/nr//6r3n88cc5ePAgL33pS+np6Tmrx8jlcnz2s5/ln/7pn3jqqadobGzklltu4dvf/jZf/OIXefrpp/m///f/EgqFAJiamuIFL3gB+/bt46GHHuLnP/85o6Oj/NEf/dGqPvfVRErJ4HiaYskiEvRgGjqaEJiGTiTooViyGBxPsxam6F/4whe47rrreMtb3sLw8DDDw8O0t7fT3t7O9773PQ4fPszHPvYx/uf//J9897vfnbPvXXfdxdGjR7nzzjv5yU9+QiqV4uabb2bv3r088sgj/O///b/54Ac/OGefMx2fxdbT0bHxso/Stp3MTTqD1t6KCPgRmoYI+NHaW7HTGSoPPIxcA++V+vp6Xvayl/HP//zPc27/5je/STQa5WUvexngBJs7duzgc5/7HMPDw/z93/89v/nNb7j66qvn7Hfbbbfxne98h6985SscPXqUH/zgBzz3uc8F4NFHH+X666/nqquu4pFHHuE73/kO3/72t/nsZz87s/+jjz7KZZddNiezeujQIVpbW6mvrwfgscceA+Bzn/scH/vYx3jqqae4+uqr2b9/P/l8nh/96Ec8/vjjvP3tbycSiQBw/Phxrr76arq7u7nvvvu48847OX78OO9///vnrH///v088MADFIvFVXh1FWeDKlEp1pzBoXGOnzxNa0v9Ar2GEILW5hg9J04zODROR3vTqj3u8PAwlUqFP/zDP2T79u0A7N27F4C//uu/5oMf/CCvetWrAPjsZz/LL37xC/7u7/6OL33pSyt+jHK5zN///d9zxRVXAHDs2DG++93vcuedd3LjjTcCsGPHjpnt/8//+T/s27ePT3/60zO3/fM//zMdHR0cO3aM3bt3P7MnvQZkC2WSuQIBn7Ho8Qv4DJK5AtlCmZB/dctA0WgUj8dDIBCgubl55vZbb7115ueuri7uvfdevvvd784JFIPBIP/0T/80U5r68pe/jBCCf/zHf8Tn83HJJZcwODjIW97ylpl9VnJ8FlvPRkNOxLEHh9BidYseMy1Wiz04iJyIIxobVv3x3/KWt/CiF72IoaEhWltbAfja177GLbfcMnM8QqEQfX19/M7v/M7Ma3nq1KmZ7V3uuOMObr75Zp7//OcDsH37dq6//vqZx3nta1/LX/7lXwJOVugNb3gDP/nJT/joRz8KOMGM+//p8thjj8257dChQwSDQb73ve/NlM9uuukmrr322jmB865du2Z+/ou/+Av+4i/+Ys578QMf+MCCAKe1tZVSqcTIyMjM55Di/KAyOIo1J5PNUyyWCPi9i/7dH/BRLJXJZPOr+rhXXHEFN9xwA3v37uWVr3wl//iP/8jk5CSpVIqhoSGe/exnz9n+2c9+9rLiyMXweDxcfvnlM78fOnQIXdf53d/93UW3f+yxx/jFL35BKBSauVx00UUAnDhx4iyf4fmhYtnYtkTXF/+40HUN25ZUrPPnxPqlL32Jq6++moaGBkKhEF/5ylfo7++fs83evXvn6G6OHj3K5Zdfjs9X1Qvt379/zj4X4vFZDJkvIEtl8C3+P4fXiyyXkfm1KS3ecMMNbN++nX/5l38B4OGHH+bxxx+fKU8BPP7440D1SwdAPp+fc3wAXvrSl/KZz3yGgwcP8k//9E8zJaAjR47w8MMP8453vGPO9h6PZ0625NFHH53zPwoLg57HHnuMl770pTPBzalTp/jZz37GJz7xiUWf36lTp7jzzjv53Oc+N+e98prXvGaBbsfv9wNOtldxflEBjmLNCQX9eL0ecvnFU7T5XAGvxyQU9K/q4+q6zp133snPfvYzLrnkEm6//Xb27NlDb2/vGfd109mzyy7lcnnBdn6/f843ZPfDbCkymQw333wzhw4dmnPp6emZSbtvNAxdQ9ME1hIBjGXZaJrAWCIAWm2+853v8L73vY83velN/Od//ieHDh3iDW94wxzdBTgZnLPlQjw+iyH8PoTHhMISZZFiEWGaCP/aiMOFELzxjW/ka1/7GuBkwa6//nouvvjimW0OHTpEd3f3nONUX1+/QMPyvve9j6effpobbriBv/3bv6W7u5ve3l6eeuopTNNckPU8fPjwTNCUzWY5ceLEnGDGtm0effTRBRmc5z3veXN+93g8XHnllYs+v8cee4y6ujoef/zxOe+TJ554gl/84hdztk0kEgA0NKx+pkyxPCrAUaw5ba0NdO/oYGh4YoFOQ0rJ0EicXTs7aGtd/Q8AIQTPfvazufXWW3n00UfxeDzcddddtLa2cvfdd8/Z9u677+aSSy4Bqh9GswfgzRaiLsXevXuxbZtf/epXi/79qquu4qmnnqKzs5Pu7u45l3M5IZ8Pgj6TaMBHrlBZ9PjlChWiAR9B39rMKvN4PFiWNfP73XffzfXXX89f/MVfsG/fPrq7u1eUXdmzZw9PPPHEnG/3Dz744JxtVnJ85q9nIyLqY2htrdjxxKLHzI5PorW1Iepja7aGN7zhDZw8eZL/+q//4tvf/vacUiAsXjrat28fhw8fXnBfu3fv5gMf+AAPP/ww6XSaw4cPEw6HsSxrzheP3t5efvCDH/DqV7965nfbtmeycOCUvOLx+Mxjp1Ip+vr65gwTNk2TSqWyZNbFNE3S6TStra0L3idtbXO705588kna29tn9D6K84cKcBRrjqZpvPj3rqOuNsKx46dJp7NULIt0Osux46epq4vwohuvW/X26vvvv59Pf/rTPPTQQ/T39/P973+f8fFxLr74Yt7//vfz2c9+ln/7t3/j6NGjfOhDH+LQoUP8j//xPwCnlt/R0cEnPvEJenp6+I//+I8VGXZ1dnbyute9jje+8Y388Ic/pLe3l1/+8pczdfy3ve1tJBIJ/uRP/oQHH3yQEydOcMcdd/CGN7xhw540hRC0NYTxenRS2RLlioUtJeWKRSpbwuvRaWsIr5kfTmdnJ/fffz99fX1MTEywa9cuHnroIe644w6OHTvGRz/60QWBymL86Z/+KbZt82d/9mc8/fTT3HHHHfz1X//1zHOElR2f+evZiEMShaZh7L8GLRzCHhhC5nJIy0LmctgDQ2jhEMb+q9fUD6e1tZWbbrqJN77xjViWtUBIf+jQoQUZkoMHD/LUU0/NZHH+6q/+im984xs8/fTTHD16lP/5P/8nsViM66+/ngMHDlBTU8OHPvQhTp48yX//93/zkpe8hFe96lW86EUvAiAWiyGEmHl/3Hfffbz97W/H5/PNZH4ee+wxdF2fUyo7cOAA0WiUt771rTz99NMcPnyYL3/5yzNNCAcOHCASiXDLLbfw2GOPcfz4cX7+85/zrne9a8Hr8Jvf/IYXvvCFq/KaKs4SqVCcgaefflpeffXV8umnn35G93Osp19+4e//Tb7jfX8t/+ydt8l3vO+v5Rf/4d/ksZ7+VVrpXA4fPiwPHjwoGxoapNfrlbt375a33367lFJKy7LkJz7xCdnW1iZN05RXXHGF/NnPfjZn/9/+9rdy79690ufzyec85znye9/7ngRkb2+vlFLKr33tazIajS543Hw+L9/97nfLlpYW6fF4ZHd3t/znf/7n6utw7Jh8+ctfLmtqaqTf75cXXXSRfNe73iVt216T12G1mMrk5VO9Y/Kep/rlb584Je95ql8+1TsmpzL5NX3co0ePymuvvVb6/X4JyCNHjsjXv/71MhqNypqaGvnWt75VfuhDH5JXXHHFzD6ve93r5B/8wR8suK+7775bXn755dLj8cirr75afutb35q5T5czHZ/563HfDxuRyukBWfj/fihzX/x7mf38F2Tui38vC//fv8vK6YHz8vg//vGPpa7r8q1vfeuc2y3LkoFAQP7kJz9ZsM/+/fvll7/8ZSmllLfeeqvcvXu39Pl8sr6+Xv7BH/yBPHz48My2v/71r+VVV10lfT6f3LFjh7zttttkpVKZc39/+Zd/KWtra+W2bdvk6173OvnBD35QXnPNNTN/v/322+Wll166YB2//e1v5XXXXSeDwaCsra2VL37xi+XU1NTM3++//375vOc9T0YiERkOh+VVV10lv/CFL8y5j3w+L6PRqLz33nuXfI1W6/NVsRAh5Rr0dio2FUeOHOE1r3kN3/zmN+ekes8F5WR8YSPXwcl4LfnXf/1X3vCGN5BMJs+on7pQOd9Oxs+U//iP/+D9738/Tz755AX/2fAP//AP/OAHP+A///M/l9xmNT9fFXNRbeKK84qmaavaCq44vwghVr0V/HzyjW98gx07dtDW1sZjjz3GBz/4Qf7oj/5o0wY34JSr1qIVfK14yUteQk9PD4ODgxvSY+hsME2T22+/fb2XsWVRAY5CodgyjIyM8LGPfYyRkRFaWlp45Stfyac+9an1XpZiHotpWS5EZrfFK84/KsBRKBRbhg984AN84AMfWO9lKBSK88CFXeBUKBQKhUKhWAQV4CjOiCv0W8zoTqFQKBTnjvu5eqELqjci6hVVnBF3Tsyjjz66zitRKBSKzYX7udrS0rLOK9l8KA2O4oxEIhFe/vKXz3QD7Nu3D9NcG9dahUKh2AqUy2UeffRRbr/9dl7+8pcTDofXe0mbDuWDo1gRtm1z22238YMf/GC9l6JQKBSbhpe//OV8+MMfViWqNUAFOIqzIp1OMzw8vCHt6RUKheJCQdM0WlpaVOZmDVEBjkKhUCgUik2HyokpFAqFQqHYdKgAR6FQKBQKxaZDBTgKhUKhUCg2HSrAUSgUCoVCselQAY5CoVAoFIpNhwpwFAqFQqFQbDpUgKNQKBQKhWLT8f8HxxFwTkRraB8AAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -377,7 +377,7 @@ }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAG/CAYAAABlpLwqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9d5AkWX7fCX5ch47UOkuL1nJ6erqnR2EUBsTOgVyC5C2wIGlY2i45xILkmd0St2Zn2Ns9gmu0u4OR3DWQXBx5WAIEDjyCAEEOgJE9qrundVer0iIrtQgtXd0fv3rlnlGZ1dWi9PuYhWVkhLuHcn/v+37SiOM4RqPRaDQajeYmYN7sN6DRaDQajebuRQsRjUaj0Wg0Nw0tRDQajUaj0dw0tBDRaDQajUZz09BCRKPRaDQazU1DCxGNRqPRaDQ3DS1ENBqNRqPR3DS0ENFoNBqNRnPT0EJEo9FoNBrNTUMLEY1Go9FoNDeN6y5EFhcX+fmf/3lGR0fJZrM8+OCDvPTSS9f7ZTUajUaj0dwG2Nfz4JVKhU9+8pN87nOf4+tf/zrj4+OcPHmS4eHha9o/iiKWlpYoFosYhnE936pGo9FoNJqPiDiOaTQazMzMYJpXt3kY17Pp3d//+3+fH/7wh3z/+9//QPtfvHiR+fn5j/hdaTQajUajuREsLCwwNzd31W2uqxC57777+PKXv8zFixd59tlnmZ2d5W/9rb/F3/gbf2PH7Xu9Hr1e7/L/tVqNPXv2sLCwQKlUul5vU6PRaDQazUdIvV5nfn6earVKuVy+6rbXVYhkMhkA/t7f+3v87M/+LC+++CK//Mu/zG/8xm/wV//qX71i+1/91V/lf/gf/ocrHq/ValqIaDQajUZzm1Cv1ymXy9c0f19XIeK6Lh/72Mf40Y9+dPmx//a//W958cUXee65567YftAiohSVFiIajUaj0dw+vB8hcl2zZqanp7nvvvu2PXbvvfdy4cKFHbf3PI9SqbTtptFoNBqN5s7lugqRT37ykxw/fnzbYydOnGDv3r3X82U1Go1Go9HcJlxXIfJ3/+7f5fnnn+cf/IN/wKlTp/id3/kd/vk//+d87Wtfu54vq9FoNBqN5jbhugqRJ554gj/4gz/g3/ybf8MDDzzA//g//o/8+q//Oj/3cz93PV9Wo9FoNBrNbcJ1DVb9sLyfYBeNRqPRaDS3BrdMsKpGo9FoNBrN1dBCRKPRaDQazU1DCxGNRqPRaDQ3jeva9E6j0dwdRFHE4tI6zVaHQj7L7Mz4tkZX7/W8RqO5e9FCRKPRfChOnlrg6994jlNnFuj1+niey6ED83zli09x+ND8ez6v0WjubrQQ0Wg0H5iTpxb4zd/6I7YqdWamx8hlPdqdHm++fZql5XW+8Nkn+OZ3X9z1+V/8ha9y8MCstpZoNHcxOn1Xo9F8IKIo4p/+s3/Lm2+f5siheQzDuPxcHMccP3mBZqtNoZDj6KE9Vzx/4tQCM1NjDA+XOH324jVbS7SbR6O59Xk/87e2iGg0mg/E4tI6p84sMDM9hmEYxLEJsY1h9jEMg1Ixx5tvn+bpJx/CMAzCYAjDiDCtOoZhkM14fPO7L7Jv7zSHDsztaC0ZFCPazaPR3HnoZYRGo/lANFsder0+uawHQBiMEwQTxHFMrd6k3mjR6fYwTLGERFGBKCwCYhFZXF6j0+0xNzNOsZDDsiyKhRxHDs2zVanzJ998jiiKLr+ecgO9+fZpRoZLHNw/y8hwiTffPs1v/tYfcfLUwo3/EjQazYdGW0Q0Gs0HopDP4nku7U6PYiEHsU2z2eLEmbfZqtRpNFu0Wh3efPs0ruNQKsQoP3C90WZ9o0ohn8V1HaIoC8SYZhfDMJiZGuXk6QUWl9aZn5skiiK+/o3n2KrUt7mBlHA5cWqBP/nmcxw8MKvdNBrNbYa+YjUazQdidmacQwfmWVreII5j6s06p88usbZeIZNx8VyXoaEi6+sVXnntXZrN9uV9e/0+zWabUjFPr+fTqOcIg5HLz2dzGXp9n2arA1zpBorCImFQALhCuGg0mtsLLUQ0Gs0HwjRNvvLFpxgZLnH85AXOnrtAr9+nkM/SbHbI57M88dh9jI8Ps75R4cLCCmEY0Wi0OHHqAr2+z8ZmlRdeeot3T5znxKkFNrdqAHTaXTzXoZDPAle6gaKwRBQNXX4vg8JFo9HcPmghotFoPjCHD83zi7/wVfbMTbGxuUHf96k3+0xPPMCjDz7Bwf2zPPbwUeZmJ6jWm1y4uMr5hVX6fZ9yuYBpmQwPFXBch1qtyauvn2Bjs8rSyiaHD84zOzMObHcDAcQYQJLwNyhcNBrN7YMWIhqN5kNx+NA8H3vsHgwzwjJNwsig041ZWOixsVlldKTMU08+yPTUGM889TAH9s0yMzXOp59+lFIhT6XaxCAmn8+wVa3z7A9fxXFsvvT5Jy/Hewy6gQBUMnAcx1cIF41Gc/ugg1U1Gs2H4uSpBf7jn/6QRrOGaQwR9GOazQ71eou3TrzFkx+7n2w2g2lMUK83OXVmgbHRIUZHSjz68IMsLvrU610azRabWzUs02Rjs8qffvMFiCGXy9BsdXjkocMsLq9J/ZGJcdyMSafRYmllk5GREj/5had0oKpGcxuihYhGo/nAqGyWza0aURTQ6/l4Xg7Pc4ijiPpSi289+xK2ZTAz+TFOnVnk3MIbjI6UmZ+b4sjBjzE8BCur5wmCAMOAMIpYWd3gm9/5MX/6reeZGBvGdW08z2WoXGR6YoJGq0OvUsW0Gjx4/0F+8gu6johGc7uihYhGo7lmVFXTeqNFs9Wh2Wzz+rETtFodDDPEsmwMY5QgCIhj8H2fIIgwDBgudzEMi3anR39lg3qjTatVJgxser0+hhEThhFRKK9xcWmDjOdgWyZPP/kgnW6fpeUNhocmefrJh5ifd5mecnRlVY3mNkcLEY1Gc02oqqavvn6c8wsrdDpdTNOgUm3i+z6O4zJaKlEq7mVj8xStziZBIAXJ4hhMw8I0TEzTIIxi2p0Ola0Gtp3Fsix8P8BzSuTKRRrNFbq9PoYBm1s1VtYqeK7D5MQwGxs+Z88v8eUvPk2hoAWIRnO7o4WIRqN5T1RV0wsLK6yuVwjDkFIxz1alTrPVJopi/CAkjEJs2yOfG6HZ2by8/3B5D0OlPdQaS5iGRRgHBEFEGMUYUQSGQRSHTE3eh+vY1BrLuI6N7wdUa/Diy29j2xa2bTExepTTZy+yubVOoTCpe89oNLc5WohoNJqrcjkOpFLDDwLCMGRstEy706PRbBGGYvXw/YBut4NpWDhuZtsxxkePYNtZ9s4+SRB1uXDxxUvZLyZgQBwzNryPbKaEH7aIQ4iNGM8tM1TaRz7XIpcL8IOQTteg2VrnnRNn6ff7uveMRnObo4WIRqO5KqqqaamQ5+y5JSZGD+AHPRaXFghDG9uyCcKAQn6cMPLFyoGNaSbDS6k4RT43hmnaFHMTWJZFEITYloVhmFiWgetlcN0SQQdMK0u7U8UrZojjmDgOMU0bzzXJuBlq9SZ/9q0XcGyLSrXBzPTYNTXN02g0tx7afqnRaK6KqmpqWiZBEJLNjtPr5uh1DcZH78N1Szh2ltHhA2QyQwBkM6OMDu9PDhKDYVqASRzHRFGEaRoYpoVlWkRRTKvVwfdDwjAgCPoAOHYG0zBotcG19wHQ7fqMDJV4861TLC6vc+TQ/DU1zdNoNLcmWohoNJqroqqaRmGEbVs0mm2abSmlbgAZr4xtiyvGMi0AbNslKTkGMRGunSOTKRPHBlEUY1kWhmFgGAZhGBFGARgWUQxh6APgOFkcx8ax89QbbTa2qriew+joENV6k5HhEoZhEPSniCJ5D7r3jEZze6GFiEajuSqqqmm92WKoXKRebxJHEBs+YDI2cpCJsSNYpotpOhhGem/5J45jLCt5zjAgjsVa4bkZHMcijiLMS0LGMAw8t4DnZrFME9vK0Ov2GSoVOLhvFvPScUqFnBwfmygcuvyquveMRnP7oGNENBrNVVHN7ZaW19nYrBJFISA1QkaHD4AB/a0WI8P78dw8ANElkeHaWfywgwSkinqwbZtsxsP3A2zLYmpqjFbbodU0sUwbyzSZm3kEy7QxTYPh4RKmadDt9Tl6dD/5fJaVtSWGygUs2yKOZRgzjN7l96x7z2g0tw/aIqLRaN4T1dzugXsPYtsGlmURhiGmaeE5eSDGthwcWyb+OApxrCxzM4/j2B6lwhiZTB4D2TeKY6I4xnUd4sig1w0oFefIZcpgxDi2g21ZmKZJt9cHYhzbxvctllc2GB/L8oknHmR5ZZMolI68htmV19a9ZzSa2wptEdFoNNfE4UPz/Fd/9ausb/xHjLhAbFzEsS0wjEtxHB6WaWEaFoYBlmXj2BksywXDRrlpbFvKtQdBSL/vU6k2yWWnKRcmMEwb4ogogiD2yeeylIp5arUmmYxDFDrsmZ/iiz9xgFwu5Dd/6484v1BjfCRLJt+m1Wrr3jMazW2GFiIajeaamZ+b5Oihac6e6/D0kx9jdTlHrdElDEMM08K0XLJemXa3QkyM43ipNN4YDAloDfwAg5i+79Pr+RTz9iXRENH3+4RhQBTHdDp9Go0mszMTfPaZRzh6dBzPHebQIRPXhV/8ha/yh398juWVTZY3LuK5ju49o9HcZmghotForhnTNPniTzzC7/zea1RrFSYnhgmCTTzXlkDTOCImJo5iDGIs28U01ONgGCaeO042lyeb8TAA0zDp9n3Khk0cxUSRT3zp9cIwoN5sY61vsVWt0u9P4DoxK6urtNpSSfWnvvQ0W1t1Rscf0ZVVNZrbEC1ENBrN++K+e2f5wudcXnvjDVZXIjAgjMC1PRzHwjTAtCyiKMRzypiWRxTHEMeYhoNtO0xPTNLrNwn6FqZpY5kOlpUhiqWOiGEYZDwbMLFtk61KjX/zb/+YH79yGMe0COKzlzvy7pl5kscfPcq9R/fd7K9Go9F8ALQQ0Wg07wvThJnpcWZnPseJE02+/9xbbFY8MhmPOA6J4/hymm6xMMHE6AEc27z0uCEBrm6eQn6ESsXAsm2cS3VH4ijCMGNGhsYJAp9mu05MjOc5bG41WVpaw7ZsesEqH3v0XrJZlwsXV9iqVJmbe1i7YzSa2xBtv9RoNB8Qk0w2xsDg3qOH8DyHIOyjUnUty8RzM3hujiiKL5c3i6KYQn6WQm4Ky7bwPId8oSC1RYwQz3PIuqNY1hAGFlEYEvghnV4Tvx8xNFQgDEPOnl8kn8szPTVGrbGpK6lqNLcpWohoNJoPRBwDRpcgDBkqjzI6UmR8fALHyWLZNq7r4NguYyMHME3rcgEzA4MgiGk0O5QKefK5wiWhYhKFAf2ejx9GlAtTjI0evFQa3qXbbar+eAyVhul28jTqYmUZH3V1JVWN5jZFu2Y0Gs37xrgkCEaGbWzLwu8bxHGH0I8xDQ/bcumEMYbh4Dg5MpkMSVlV8P2QoXKe4aEiMESznsEwDfygRxhGWKaJaVlYhoHjZClkp7GszCUxE5DLTdLrufiXaohkcjG9dV1JVaO5HdFCRKPRvG+UENm/b5iM57JwcRWMCgZZcpkstuVg2Q5gXKorYl6qA2LgOjZjY8P0/S5nLyzjWBGF/BxZ07yUtiuVWw0sMGxsI4NpmpfiSCCMOjhmHssEx5YS791OU1dS1WhuU7QQ0Wg075tMBtptWFpeot3uAi69Xo1iIYdpmPiRCbGF7TiMF0fxPAeLHJ1eSDbOsbhcp9NtYdsWnmPh2h6mYWLEIaYBQRSR8UpgmPR6W1imiedJQGu/1yQMMpTLRXLZ7OVKqg/ef1BXUtVobkN0jIhGo3nfZDIQRRHf+M5LRMTMTk9RKNgEYYBpmtimhWFI35h8LotJTK/fx7VthspFxseGePjBQ5SLeQlOzWWxbYuQkEw2IzVGTAODGNN0cFyb8dERbMtgcWUR0zSZmhim2+mxvLKuK6lqNLcx2iKi0WjeN5kMbG3VOH9hg6H8XhzHY8/cKL1eHsIcUQyG2SQMQ2qNHo5tkst6ZNw81fomUxOjRJGPaQ6T8bKYho1h2mS8DNUaFPNZMhmXKDIZHR0jjiLCyKFULjI2McFIeZhmu4tj2+zdM8VX/9wndOquRnObooWIRqN532Qy0On1CXwLy7GIiTDiPLZZJsbBAkZGRmh3YmamhxgZytPtZqg3fMJImt01210MI4PrFDCwsAyHfLaEbXt0ewG5XIxhxowOleh2fe675yif+eTjfPWn9/LCCw36fg/X8ZibK7Nvn7aEaDS3K1qIaDSa943jQNZz8bwCYRBg2xExkmarmtvFETiWRblUIJfL0utJOXfLtOj3Q2zTwTRDSd01TcAEI6aQz5PP5QkCn8D3CXyHmekxnnziPqanxoljGBsbxnHA96FQuJnfhEaj+bBoIaLRaN43hgEjI2Wmp6ZYXm4zVM5cCjgzLm/T64fkclmyGe/yY7Ztkc3m2Kg0mBwvkc1E+EGAYVhAjB+EFIsFZqYmqNXFqvLwgwcYH8szMSGv0G7LseJLDWny+RvykTUazXVC2zM1Gs0HwjRNHn/kHjKeTbVWxe/3iWOIEXFh2w7D5SJqmJEyIiYT4yPkc1lW1yoU8llM0yQIQsIowjINSoUc9UaHbMbhgXv3US4Vcd3kGJ1LpUJUEdVM5oZ/dM0dQhRFLFxc5Z3j51i4uKor894ktEVEo9F8IAwDpqfGeepJi2NvrVCtVjENH8tycByXsdEinpfZtj1APpfj0598lGNvnWBjAzKui2na0pnXs8EwKBeHmJqYZGwsTxSBlxhVLltE1JxhJEYYjeaaOXlqga9/4zlOnVmg1+vjeS6HDszzlS8+pQOfbzBaiGg0mg/Fvr0jeO4QjUabxUWLMLIwDZtMxrzsPlFi4VITXiYnRpmceJJz59rU6xGVSoZuz2Rs1GV8PKbTyWMYJrYN/T5kL9Upy2YTi4hG80E5eWqB3/ytP2KrUmdmeoxc1qPd6fHm26dZWl7nF3/hq1qM3EC0a0aj0XwgHEf+5vNgGCa5nASleq6LYWwfWixr+74iUEyKxQLlcgnbdrEtm1KpQKFQJAi27+9KUVUymSQ2BLQ1RPP+iaKIr3/jObYqdY4cmqdYyGFZFsVCjiOH5tmq1HUDxRuMtohoLhNFEYtL6zRbHQr5LLMz47pAlGZXMhmxVqgYjSCQv2nrx+X2MkZyX43vaddKWlyAZMM4TvK4EjKD8SBKoGjufD6q8WlxaZ1TZxaYmR7DMAyiKEscZbDsCoZhMDM1ermB4vzc5HX4JJpBtBDRANpfqnn/ZLNQr4N9aRQJQzDN7ULENOWvEhLKNaPu74Z6blC0KCESRXJsnTFzd/BRjk/NVoder08u6xEGo0RRFoixqACQzWXorW3pBoo3EC1ENNpfqvlApEUBiEXEsra7S0xTnk9bRBSDlu+drCaD2w6KFy1E7nw+6vGpkM/ieR7t5iSZbAaIsZ3ly8932l3dQPEGo+3udzk7+UtN09P+Us17ojJZej35G0ViHVFWkLRrRlnQ00IiDK88pnLTKPGi/iq3T7ebvBYkQayaO5PrEc8xOjLOnpkn2Ko2MIwejruIYcj+qoHi4YPzuoHiDeSGCZF/+A//IYZh8Hf+zt+5US+puQYG/aUAgT9J0J8FtvtLNZo0SlwocWBZIkQGXTNRtF2ADLpbdjqmYtAColJ3d9tec2eRHp/AJAzGiGNVU+b9j08bG3DhgtS/yec7nD73Mo1GiyAMaTRanDi1oBso3gRuiGvmxRdf5J/9s3/GQw89dCNeTvM+SPtLFYYREsc2gT9LJnuR3uomp85c1EGsmh1Rlg/TlABTZRFJWzUgsXao2051QAbdN4OBryp1dzALR3NnosanbCZP4M8ABqZlAnLyXGs8RxzDmTMSBA3w9NPjzM1/Nok7WdvCcx0evP8gP/kFHRd3o7nuQqTZbPJzP/dz/It/8S/4n/6n/+l6v5zmfSL+Upd2p0exkAPAdlYIgyGiqMDGWoGFxQb/+ve+jm1ZOohVcwVKJChxkI7z2EmIpPdL35SYUZYUdV+lCUMykewUc6K58yjks2QyQ3Rao2SyYFkVDCO4/Py1xHMEAZw6JfcNA44ckb+HD81z8MCszhS8Bbju3/jXvvY1/tyf+3N84QtfuN4vpfkAzM6Mc+jAPEvLG8SpWcKyq9QaZzlxegHXnqWYn2F8dIjhoSJvvn2a3/ytP+LkqYVreg1dRvnOJR0DYlnbU26VwAjD7fEiivRpoJ5XYkUJEUiESPrYkGTraO4MBseJIAioVg3K+f0sr25immuYVuvy9tcSz9FoJCKkXIajRweDqU3m5ya59+g+5ucmtQi5SVzXS/l3f/d3eeWVV3jxxRevafter0dPRb4B9Xr9er01zSVM0+QrX3yKpeV1TpxaYGZqlGwuQ6vZ4dvfe55G0+fQ3idYWOiB4WPZCxzcP8vmlgSJHTwwe9WLV6cF39ko4eB5EiuSDlRVpN0wg7Eig8XJoihJzVWka4j0+7LCtW3I5a7f59LcWAbHiX4/gGiUTHaEIAg5fuo5Tp6NePC+g8zMjNNpd1la2bxqPMfSkqSXA8zN6S7NtzLXTf4tLCzwy7/8y/z2b/82mWvsSvVrv/ZrlMvly7f5eT1R3QgOH5rnF3/hqzxw30G2qg3eOHaKb373RdbWKvS6Lc5ceIE4cvDcEUJ/jtfeOE02475nkJhKu3vz7dOMDJc4uH+WkeHS+7aoaG5d1KXteSIYBjNhlAgZFCaQPJ52zcSxCA1lYYHtVVUhcc/o1N07g8FxYqhcolEfYW29z+ZGldHRKo8+fACAl18/zutvnmKr2uDB+w/yi//llam7UQTHjyci5NAhLUJuda6bEHn55ZdZW1vjsccew7ZtbNvm2Wef5R//43+MbduEO+Tu/cqv/Aq1Wu3ybWFBT1Q3isOH5vnb//Vf5C/+zOfJ57MEYYBlWQwPlchkMqxvncbvWwyV9pLLHGZ5tUG31981SEyXUb47UOmznpek2e5mDUnHiAzGh0BSDE0VRlMoQTIYoKotIrc/g+NEPpdlfb0Iscn4WJm1rdc5c26J+dkJvvLFp9i3Z5qD++f4u1/7K3z1pz5FEIbb3L3dLpw4kVjp7rlHu/BuB67bT/T5z3+eY8eObXvsr//1v84999zDf/ff/XdYO4S9e56Hl26zqbkq16Mk+2tvnKDd6ZLxPLJZH8N0KBWmyWbGaLRWiekzOnyQzQoQ+7sGiQ2mBceRB8QYZl+XUb6DUFaKdHXVQbGhHlekLSJRJM+FYZJx0+ttr9Carsqq9ku/pub2ZXt6rkWjNkm7dQ7HDWm0jzMz+TDtboN6o0WpmGdyfJh33j3L//rP/39sVmt0Oj1yuQwP3X+YZz7xafK5aQAmJmBk5OZ+Ns21c90u5WKxyAMPPLDtsXw+z+jo6BWPa94/1yP2Qg0KI8MlLi6uMVyeIOPNYpo+URgRxxHV2gqeB547w9RYnigcZeHi6hViaDAtOAjytNsGfrCB47bI6TLKdwRKiCixEIbbM1oG40XSDBrDlDsmCLYHFEaRPK7qlei1yp1Dkp5boN89QhRF9PwWhrlIqXCvBDs3K6xvVHj3xHmWljdYWtngjTdP4nkuhXwW13WobBU5e+ZH/B/+3Kd5+ulxPC8Jfj1zbgmAA/tmdEDqLYpeU9yGXK+S7GpQGB8dwrYtTBeIpHaDYfiEkQ8xLK8sU8jVGR35KX7v919jdWOBZvvdbWIonRbc7/ucPnucOJwhDGOCqAZsMDxc0mWUb3OUVUKJDd/fOR5kJzGihIiyoigx0+8nrp70a6RriOjU3TuDQj5LNjNCp3kEx/EwzQ3anQXGR+8BoNPZpFo/S/ukQxiF1GpNgiDAdTzCMKLXi5ifeYQojDlzfpHv/vDf86lP/XWe/f4b/Kvf/o+8+c5pur0+jmUxOlLmqScf4r/8K1/RgfK3GDdUiHz3u9+9kS93RzLoU1XVUFXsxYlTC9eUzbITSjxYtsXIcImz55bYqlxgavwRIJTB3zCJohjTsnnznefJetMMD88wMlxiq/oOP3rhDU6cPM/X/uu/yKED8zz342NUqnW63T6FQofRofsIQo+N9Zj1cJ12p/uRf0eaG8dgb5jdrB/KUjIYG6LSe9P9aNS26vRVr9HvJ/9rIXJn4NjjTI59io3NCuOjbfLFTWanH6RWa2I7VdY2zxOGIYFpks9luNBew7ZtisU8I+UjmEaJTqePl+mzsvYm3/iOxcLiCj947g3anc6lIGgDx7HpdHv8pz/9IevrFf7eL/0XWozcQmgb1W3GTi2sw2CcODY+UMnjNKqmyPLKJqPDZSrVBn7gs7z+Kt3eJjGSu2+aDtValdNnL7K6fpoLi4ucvxCysOCyudXnxy+/zf/tH/4mRw7NU2+0WF+vUMhncByDreoxup0uIyPjzEw8xp9+8wWiKNK1Ru4QlIAYFChpoTKYNaP6yFhWIkzUsdRxlIhRz+mMmZvPh71mz56FY8dgamKYnn+WE2ffoN3MMzUxTLN9mlNn3pFFj2mR8Rw63RyzU49TKpbZP/9FpiefxHGznL/4Ou+efJlao8XC4hrf/O6LtNsdwMCyTCAmCELanR6NZps33j7N17/xo8vvV489Nx/tmrnNuLIku0EUeUTRDJZVJZMNqJxb4o23pIrP+wlgVTVFLi6t8r0fvkYQhhiA7/sEYUinUyWbHSIIfCzLxjTBD31q1Q5x3GF4eA+5XJ589hynz57k9/7dN8llM8zNTtBqd+l0Y0aH7sGy1zmw72Ecy+P8+Tbf++HrvPHmSV1r5DZFiQQlQpQQGawfMji+DwafWpbsq4RJOhhVHa/fl0wdnTFzc/kwMWpxLALk3eM1Wcxs/JhuF1pNh3dPXcCxl5mZLjMyksX3fc6eX2Zq4mEs02Gr0mVu5mco5qcJgg7nL/6YenOTUiFPFEZEUQwk/kDbyrNv3ydY3zzNVvUsnbhHpVLjtWMnWFxap9vt6zpHtwBaiNxmDJZkN8022DFhMEKt5nFhIeT0uSV+9/f/jD/95vM7XlTvlW3j+yGbm1V8X2aEXHaUYm4Mw7QIo4g4DgmCCNvO0Wp1KeUnsCyHfq9HsTSFabpgFNjYvECr1eETH7uferMDWIyU9pHNHcW2awRhwOJqm3/370/T7Z9jZnr4I4t30dwcLGt7lku6Pkg6gDX9VwmRdDVVZQ1R+2Qy0vBOCRFtEbl5fJgYtSiCV1+F02dqvHHsOBvVF5gaP8L4qEOt1uDEmR/iuln+2s/9Z3zyqYf48csn+K3ffo0YA8ccxjR8splROr1Nzl14jnpjVc4vDHp9//LrjJT3cfTQlwCDOI5YXjmGAYRhRLPVYXF5g7fePct3v/fyRx5rp3n/aCFym6HcJ2++ffpyjIhpdqg03+HMaag3uhza/xT3HC2xuXVRYjZOneeX/pu/xNHDe6+6kgH4zd/6I5aW1yVY1ZC1xdjIQcLYwDE9iDoEYUQUhRiGDXFI4F9gZGgPnjuKYw4R2QblgoWbGeVHL/wnvvuDV3FdB9u2GBupcWj/ExQKw3Q7a6yunmZ68hD79z6B465jGP5HEu+iuXG4rqTcQlLmXTFYxj19P53am7aoKJdNWoioY6p9VJEzzY3lw8So+T68/DI0GhFnz59lo/oce+cfotXqcfb0SRaXjxEEIUvL6/zG//vfMVQe58I5i+WVLUzGyWYMHHuIxeVjLK28RK2xShhFTI4fJZsZplJ9EcMwOXrwixza9xlMw6HTq3Jx6RXqrdXL7yOOYrY2a3z9z37E5laNhx84dPm96rHn5qCFyG3GTiXZM1mPt945zcXFNWan9zAyvIdTp9s0mx7rm1XOnLvIr/6D/42//vM/zbeffWnbCqDV7vLjl9/izXdOk89laTRamKZBu9MlujTo9/pNxkcO02itYJkOQdgnmymTy44QE9HurLJVvYAfBOTyJSxziEzGIAjyTI49TKd/nuGhPH4QsbK2Sr3xbR598CfY2PTxvBGmpiIMw6Df3YdpdbCdi7rWyG1EJiNCxLYTi8hO5doHA1l3KmgGVzbKS++ra4fcXK6oDxQbELsYZu+q12ynAy++KBatOG6ytPo8U+P30GqZXLhYZ3n1NJmMh2NbeK7N2hr8v/7pt9nabOK60+Szk8REnL/4HOcWXiAIe5imzZ7Zj12KlYtw3SKPPvCXGBs+iOvkaDSXOXH2O/K+Jh9mafV1AKI4ZmOryvd/+CrFYp5+3+fQgQOMDOcwjFiPPTcBfVnfhqiS7MqyUTm3xNp6hfm5SWamS5w++woZby+e57J37mNU60ucOn2Cf/Tr/zvjY8M89shRDMNgc6vGqTMX2dqqsbFVo9Ptkct41JttwGV0eJat6ln6/Sat9jpjI4epVM9j2x5B0MO2pYjEcGk/G5XTRI0len6TffMPk3FHafqbzM3cT6s9QqV2CtvpMlQusLFZ5bmX/hMP3PNZhsojeO4UlrVOFO0j9EeJIw/HO33NLb41N5dMBmq1RIioeiD9vlg7lDAZFB7prJntjciS+0p4KGGiLSE3l3SMWhxbBP4UhuFjm2sAO16z1apYQuIYxseh268TR6OEYZELC2eoNyoUChZ9P6Db7TM6/CDdjs/aWodsZoZSYZJev835iz/m7MKP5HUyw4yPHsY0TTqdGpbp8KmP/02ymREc26Pfb7BZOQPExLHUQEoTBCE932cqnyPwZzh7FqDL6Ii36+fQXD+0ELlNSbewfuOtU/zu7/8Z99+7n1deP0EYZcjnPUzTI459yqUZiIssLL1GLifiYXOrwdmzDq1Wjky2Rz7fZ6siabbEYFoO5dIM+dwo65snCMMAA4ODez/NZuUcm9XT9PtNRob20epsMjZ8mGrtPJ3OFksrrzI9+QiuM4RjF8h6Y2S8Oc5f/CErq2fJ5zLYtsVnPj3GD3+0Rr8XYJrjOO5Z/N4horBAv3uUvv/ae7b41tx80kXNlOhIC4/BGBHFoDBRj6VLwadri4C4aHTq7s3jcoxaOyLjTQFgmklH3E67u+2aXVqCd96R33N+XoKMX3ipR6tZ5MLF16nX66ysvQIG2JbL7PTH2disU8zvIZ/Lks0M4YchQdCn1lgGYHL8XjJeCcv0sG2P8eGjTE7ci+fksZ0sldp5wtCn060SxxGN5iqV2vltnyOKYgI/Q+jP4hUz9Ht9Tp1ZZGT4XgzDuOJzaK4vWojcxqgW1gB/+s3nWd+osVWpk830iWMfYk8G9MgBw2R26mHiuEWt3uLUmQVCf5bxkXkw5lhd+yEABoBp0O+3sEyH4dG9uE6eXGYYy3KI4pix0QOUS9OsrB6j12/g2nnCyKdQmKbba7BZ2aDZ/BF75h4n4xXI58pks3lKhZ9iafVV6q23GRstc8+RvSwurfPu8QvMzx4iDCZwvNP4/b1EYZZ6/RCH9m/t2uJbc2ugKp3atlgz0jVDduszoxi0kChXzqArxveT57J6brhpzM6Mc2DfQc6c8ZmeirGdCqYpVoM4jlla2eTB+w8yOzPOyZNw4YL8lvfeC80mHD+xzmuvblJvnqPbrbO89jL9vo/njnJw35ewTZMoNshkRrBMhygK6XQ2MU2brFdmz+zHMQyDcmGGTHaIsaED2E6WrFfGMKDV2SKKQs5ffIG+32Jp9Q0cO8Oe2Y+ztvEuvX4dy7Q5uO/T+H6fVrtLrXGBQkHywuuNNqVibtvn0Fx/dBTOHYAKYF1cXsf3AxzHpNk+RRi1yXoz9AMoF8pkMzlMo0irOU6t1qUXnCaM2vh+xIG5r3Bwz6eIgVx2CNfJslk5w+bWGcqlWVw3TxD2qTUWLtUSsZmdeZx2t0ouO0QxP0khN04uO8TI0AE6vSaLS68TRpuEURfLzJLJDLN/zzOMlB9la6tNIZ/lK198ilKpx4WLb9LtdOn3R+n3TrCxeYFcNs/0xOfpdPRpeiujXCnppnWwsxsmfX+wM28YJp131WNK5KjUXdAZMzeTTsfk4fs+SSGf4/zC67RaGwRhSKPR4sSpBUZGSnz580/xxhsmFy7IPo8+Co0GtNsRL7+yQr11nsOHyvT983S6PqPD93LkwBcxsHHdUfK5CVxbfvhmaxXTcqnUFji0/7Pks6NMjt3LxNg9HJh7Gs8rkssM4Qcd2t06W5VznDz7LRqtFS4svsjk2D1MjB2lVJhieuJBxkeO8OiD/0dGynvw3AxLKy+zsXWRSrVO3w+oVOuXP8dPfuEpHah6g9AWkTsAFcB64tR5zpy7SKvlkstlqDcu0Gz2KJVmmJ2a4cLiO/iXuqMWcofIZALa3fP0egYZ53EmJ44wPLSXpbU3GC5l6HQrLK29ThQHlIuz5LJDhJHP+sZxhsp7ME2bqfH7CPwuhhmTyZTxvAKN5grjo0eoVM+yun6GuRkPbAtiG/AYH3mIfLdMu21y7z1JvMvpM6eI4ykcu8C+fSYP3LsPgzLPPw+PPQbDwzf7m9ZcDeWi2anB3W7/Dz4XhmIFSTe8U9aUXk9eQwuRm0O9Lq6W6elxfu4vP8y3vieW1d7aFp7r8OD9B/nSTzxFrTpPvS6/40MPweqqCJGNzSZrm6cYGelRLBocPvBxxobH6Pdb5HJj5DJDBKH0COj7LVqdCnEc0e832Tv/CWr1pUuiYoZ8fpQ4DMlmh4miANt0OX/xBda3TrK28S4Ae2afwLZchof2sVU9y+zUw0RRQD43zPL6a5y9+D1c18LzXIkFiaHT6fHwg4f5yS/oOiI3Ei1E7hAOH5rnl/6bv8Sv/t//BafPLtLr+9h2m5Fhn5npaXK5LuXCHmyrSrvTk1ofOGS9Q2xsHuPM5r/loXt/GtctMDPxCGEYsL51kimvzNLqG0RRQKdToVyew8sUabY3sEybUmEax8limS5+0CabGYY4otFeZ2T4AK32GpXKFpbZolicJwx9XGeI4SGX11/zGBneHu9Sq3epVkqMDJcpFk2CAM6ckWC3+++H6emb/U1rdkMtHtPBp4NVVAdJP66sHoaRxIakm3SrFGHd9O7GU63Cyorc378fPG+Oe+/9i9vqEY2PjfPyyyadjojFw4dFhKyvy/+O06HVPs3ExCS97jy2WcBzG5QLc0CMaXkEQZ9mew0Dg26vyWh5D9nsEJ5XJjNWxnEyEMvJZVnOpZMnptpcJpcdZmPrNMPlPbhunkJ+kij0aXcqzM88jmU6WJbN0tqrnF94DtuycF2X6alRlle3+MSTD/F//uWf043xbgJaiNxBHD28l1/9v/wN/slv/H/Z2KoxOz3G+LjU6zh/8Syjw0f42KMPc+58la3Nl2k1DRzXZ276ITxnhdfe/COymTH2zH4aDIM9Mx9nceU1ZiYfotlao1JboB902Df/FL7foe+32KqeY2LsCJbtUihMEARdrPw4luXR69eAmHanjesGGK0lysU5grCNaxVYW495/nnxHx8+LPEu88jkdOIEtFoyKd1/P7z1ltw6HThw4GZ/05pB0jEg6n91U2m86nHFTsKk399enRWSuBMlSnSw6o1lc1PEBMDBg0lNl3SMWqsFP/qR/M7DwzA1JfssLcHEhOwzOgael6HT3EMUz9LprlDMTxETYWDS7TaoNhbJeCWiyOfg3mdotTcpF+fAMHCdLN1uHdvOEBGCadDrNWg0V2m21ljbPMHk+L0YwPDQPirV88xMPkQYhdhWhmZrlbXN46ysvwFxgOs6uI5No9GmWMjxl//C59m7R690bgZaiNxhHD2yl1/+W3/lcmrv2fPLeK7DA/fNc++Rcd548yT1doex0f1UazXanVXAIJMpMTf9BAvLb/Dqm/+Gew5+BZBiZq5boFI9RxB2OX/xecZHD+M5BfK5MXq9OvXmKkPFacKgi+cWiKKAGHDsDLadxzDqOI5FGHXBqGEYDlHYplIpUS5FvP66SbcrgkOlf95zD5w6JTED1ar4mV99Vawj7TY88MDN/JY1g6SDVNMiAhIhMhgnov6mM2pU513X3e7iAZ26ezNYXYVKRe4fOrRzHZfNTXjtNfkdJyYkmLhSkX2np+X//fthY2Oc6fHPUqmaQBfPGSYmwrayGEA/WCaTKZFxCpRLs/T9DuPDBzFME8t06fsdMpkycRQRhRHV2iLEMe1OlWZnkyDsMTV+P77foXPZCuKBEXLu4vNsVs5Qb1zEc10sy8QwIJvxcFyHTz39MM889fAN/GY1abQQuQNJuzqU2bTd6fIv//c/lmJmU5OMjhTJFwpcvGhQqS0xVBrGNJvMTNxPEPZ49/TXKRVmuP/IT+PaWfbNP0Wv16BUmGFh6WVGhveRz45TKk5img6dXpNur85QeR7bcslnR2l3qxSdHLGRxbIiTFw2KsfJ58YoFUfo+S3a7TJR5PHuu2J6f+CBJCvi0CFZUdXrsLYGjz8uLpqVFdn2scf06vhWIZORVXG6gJliMCh10BKSrr4ahrK/be8sRPTvfeNQ1x6ImyXtJlMsLIj1Mo5FbPi+nAebmzAzA2NjUjvk3Dl4/XWTsZF9bGxcpN2OiAHPLRIEHeI4xrI8Rob2AzGGYZHPjmA74q4Jwj6uk5fiZQRUqucxDKjWLmLaDt1OnYfu/Qu0OxVsyyOKAjyvwGb1LBeXXmF1/XV6/d4lt5/Jnvkp5mbGCcKI2Zlxfv4vf0W7Y24iWojcoaTNplEU8U//2b8dKMu8TC5nEUZVxkfnyWUzlMsHqVZ61NuncZwM6xtnuLj8Kntnn6TbreO6eSbGjuI6eVqdTRZXXiOKH6BcmMbzivT9FpXaBcrFGVxi8tkRsYJgEYQ+URwwPnqUfKGP60CjsUUQSC2CSkUmpGYTHn4YRkbkc8zMyCS3tiYC5PHH4Y03ZPsXXoAnnth5gNTcWHYSIoNumMGaIXBlOXdV7t22rywB77o6PuRGceGCWB4Bjhy5UlwCvP02LC/Lb3PPPXLt1usiRmZnYW5O6oa8+aZsW6+3WFhsXHK/Obh2jl6/iW1naDbXGR3aTxSHOHYW4hjHydDzW7hOFgO5yDvdKs3WKr1+gzDq0+lVGc7s5f6jPw3EFPPjWKZLTMzF5Vc5cfab1OoXGB4qYncNTNPA81zCKKJYzHPk0B4dmHoLoIXIXcAVZZkjB4yQfrDJxtbbzExZNJse5dI4ET2GiochLtDvB3humfOLPyafG2Vq4n7AYGriflqdCqPD+3n31J8QBn2ymRKjI4fw/Tat9gZxNo9pjpHLFjEsIGqBCZZhgtEnClqYZouRkfhyNkStloiRBx+Effvk/Y+MyAS0sCBi5L774ORJ2e5HP4JPfGJ7fxPNjUdlzERRUk8kjkUkqmyYnWJC0uIknfprWUkXXhXE6nm66+6N4MyZJFX66NGdC9G98oq4TEFcqrWaxIRkszA6KrEkYSgWzLNnodFocf5Cg2arheMUsG2bdquGYViEQZfR4X2EUYBjZTBNE8Ow8C+5elWg0Vb1ImHUpVq9QCYzxPrWWY4e+hJB2MdzcvSDLoZh0u3XWd88yY9f+/9gWy4zk/cTRGv0+z5HDu3l4IE5tip1fv6vfIUnHrtXW0JuAbQQuQtIl2UGCMMCcZzDtTNY9jssr71GoxHT7tXxnBny2SlMI8/8zOMsLb+BYRgEUZ8LCy+wd8/TNJqrZDIlspkSD97zM9SbF9msniMI+4yPHsK2M2QykCt0CH0b18qAWQQupT1EBp1ei2Jhnrm5DLWarL5MU8SF78NLL8n9++6Tx/N5CVI9cwY2NmDPHjEd12rwve/BU0/pSepmkq6u6jjbV9AqYHWn8T4dI6JESxQlsSZq/25XhIhO3b2+nDiRfP9HjlwpQoIguTY9T67JalWuxbExeezIEbkuX3lFrtUwjFhcatL3e7hOiTj2ieM+lgWulSeOTUzTxjBMHDuDH3SxLBPHyWJgEAQ+rc4a3d4mPb+DlymTyQxx9NCXMYCsVyaMAwzDoFq/yKtv/h5b1TNMjt1DNjNENutycfk8uVyW++87wFC5QLvTpVTMaxFyi6B/hbuAy2WZOyIELKsJSL+GybFH2Nzs02iu0+qeIIovsrrxKqbhASYT4/cwXN5HFAZMTtwr2TL9BsQRrdYGI0N7GRs+yv65T9BonSKfbzA8bJDJDDE6MoJh1un5DWL6QJY4Nun5PrZZIpvJc+pUj0wmYmhIJhnXFSFSq4k599VXk9WZ64qvGqQuwfAwTF7qR/Xcc7KP5uagLFLKGgLJJDYYqArb76uJz7blvrKKpN0w/qUO77qq6vUhjuH4cfn+LWtnS0ivBz/8oYiQYlHcppWKiJCpKSiXxUVz4QI8+6yIkFYLqlWffj/AtYv4fhvbyhCHEYX8NK6buyQ4wLGzRFGI5+axLEdKrffr+H6dMKjgunmi0Gdy/B7y2VFymTKOncM0bcIo4PzF5/n2D/9nOt0t9sx+nEymjO2YrKy/SS7n8NQTDzA2Utbl229BtEXkLkBVXn3z7dMSI2L6OO4iYVCCGIbLB3FdG89dIYgXCeKQ5bUGWW+S0ZGDEMUMlR6h22vgB1VGhvZQqV1geGieenORrDdCoTDG2MgMhdIWGC2q1Sam8QizM7NsbK7T6VQwKQEOBnnCqMnmZptKpc3SMuyZLzMzU8AwJNANYGtLBsZWCx55BEqlZJA8cUJWZ44De/fC+fPS3fOhhyRyX3NzcF2xXsD2lN50QOqgCFEWEZWRoVwyysoCiRjVC9iPHiVCQMTf/v1XblOvy/UVx+IqzWZlMaCCUsfH5XEVDxKGYimxbRVrksGwAxwnT2xYeJkxiGMMw8QyDeLYAuJLjTRj4jik1a4Q06PXa1DIjxGEUCxITJnn5jFMA8s26XS3ePXN32N57XX27/k4lmkThiE9fwPDanJg/wT333OAsdHyFWXoNbcGWojcBajKq0vL65w4tcDM1CjZXIbV9TOcOv8G0xOPkM/niKNJDMPD771FvXeSZmudTm+T+ZlHsEwHjAxhGLNROUchN0a3VyMKAxwnhxXFZLPj+L08YVQl5nXmZh/E9wu4zhyG0aNW67C55WPEJhlvGIMeMTHdTpdTp1fxfZO9e3OEoQxymYwMZlEk/z/2mKQDGoaIERVQFwRiDj5xQgJZDx8WcaK58Xhe4mKBna0hcOXzqmNvury76yYiRafuXh9UzR4Qi+T8DjGbq6siMOJYrj/LEmHS7UpQ6vy8CI4XX5QFQRiKddJx5Lo1TROMGAMHx3GIQqkbEhNgmQ5+EGBfsoBggIFPv9ek290ikyliGBaeN4Yd9iC2iSI5QdqdLZqtRV469i8o5se45/CnKJcKFApZCoUNmi2TdqfEwX0z5PJZGo0WSyubunz7LYgWIncJhw8lpdRVWeZut08+l2F2tkuhkCUKRvDscYJoL+fOv81G9Q1WVl8jm7HIZWYpFefoRnWKuTFa3Sp0IsZHDuD763juNLXmIllvlDDMY1uPA31mZqBWM1la8mi2KgRhk6w3BPhABoMA143p+SaLyxU8L8PUlInniQnYMGTQ6/fF/XLvvWL+NQyJE1lfl1VZqyXxJG+/LYGs7bZsq7lxGIaIRyUmVGxIOmNm0NyfLnSm6o+ox1w3CVL1PJ26+1EThnKtgLhVdqpafOaMBJvGsQSPB4EIk0xGLI8HD8p1+txzch32eklRulpNLCfttoVj2YShh2lGxLFJGAWYpk0cg2MrH1yEYXaxjAgv0yPTyxPHMFTeA1GAbXqXAp+71NsbvPzG/06zdYG984/hOC6e62DbFUZGe/zif/kXAC6Pdyvrlctl6HWWzK2HFiJ3EYP1Rd5+9yy/8Zv/jjffPo1hGNi2xczkPUyMPkgmM8WB+Rky7muY1jKmNUarvYbjFHDdDGHkkc8ViNjANkt0ulUwQkzDwjQdsvYIr77aY26uxZNP5un125y90CPjFcDogBFDZCJhSllcJ8LvQ63mE8ce5bLEgDSbMmB2uxIncOyY3H/wQVmFjY/LJKUCV5UYWVyUKqyPPqonsBuJ6hOjRMduabsKVewsHVei9rOs7am7OxXT0nwwgkAKBoK4VHZyZ77+usR5gAiOXk+uK+WaOXpULCCvvirPqZojvi//iwiBMDQxzSJh2Jd6IaZNFIWAKVl8QBT1iOMOnh0AHpZpUixMSO0PDGJsICYIG3T9BU6d/1327NlD6M8RRhG2FVIoVnnskaPbhMZgPaXZmXFtCbkF0Zf2XYaqL3Ly1AI/fuktwjAkDCPGx8oEQcSFxbdYWj1JuXA/+dwse+c+zux0jm5/jSAwiaI8cWgTx3vYv2+CE6cWqVQ2McyQYmGeMN6imJ/FMloEQY7l5YgXX4wYm+jS6y+S8SaIoyGIYsQq4gEBBgVi2vR6SWl3x4GhIZmIajX5u7EhE1e7LSIjl5PYEdeVokm1mlhMTp6UGJMXXoCPf1zHFtwIXFcmICU81He+WyGzwcdU0CokVVrjOIkF0hkzHw2+D6dPy/3xcUm3TRNFknZbq8lvuGePXJOrqyJYSiVxxxw7Bu++K7/P1haXrZiqIJ1aPACYpoXrOoShRRiGl3pdARZk7IBs1qPVbtHrWcRxgGWO4jgxjlMGQny/ix/V6Uff5fCRMn/+q/89nuvQ7vbI5SsMD3mUivkrhEa6npLm1kULkbuQKIr4+jeeo1Jt8OTH7ue1N05SqTYp5LMMDxVY3aiwvv4sw0OTPPPkV4njHJ6zn2y2CvRYWdlgcuIApWKebCZDdnoE08xgWiG2OU2Mj2lEWBj4gcH6eo5Go4zrjBGFq9hOn9AfuzTTtAGXmD6Wkb0kdmQQLBbF9TI5mZh6LSspftZoSEGz0VExFR86JKu8RkMGz+Xl7bVG9Ir6+pLJiBBRFg7T3N5nZqesmbRrRmVspMu7x7FMaMWiTs/+KOj1xNUCkukyNLT9ed+HH/9YrImZjFglGw0RGtPTIkSKRdnmwgUuLxw8T+K5cjnZV6Vcp910Mt1EWJcqEJomFAoGhYKD70O5PMnaWgyERJF1yUprks9HjE9E3P9ATLPxVYqFMqZpptxJUzfo29NcL/Q68S4kXeBsbHSIRx9+lAN7nwZKVKpNHMtmaKjInvkSa1vP4QdniOKQXjvHxrpBvgCf+PgE2VyPvt8gkynjeS62VQRkSRvHWQyzRhT3yOX69PsWGWeWXn8e02pgu+uYZuPS9n36fhfLAsuyLseGtFoymC0tyeQ0MSGDoyp+trIiNUTU6s62xVwM4qceHZVBs9uV7VT3Vs31QWW5qDLtahK6mnsmnU3j+0kdEpU5o6wsoC0iH5ZOJxEhs7NXipBOB77/fflbKIgLptkUgTE7KwHgliWpuefPi0BptxOLZS4n12wQJJlTKh07itRvagIWmYxFuWwxNGTi+3KMtTUT27YA91KAqsnICPzsz5p8+Ut5LGMf5dIwpmly8KDuxH0nodeIdyGDBc5GhjMU8xPSeyEIMaxV1jdX+c//s89x/NQFTp05Rb9/kkL2XsbG9nFw/yOUimVsuwrGGlFoYzlFjBhiowtxDowecZgHWpRKMZ5nYpoeYThGs5HBdeqY9hoxWfp+CdvKUCrZhKF5Odre82RycpxkpaxWaCpupN8XM3KrJX1qVMM8VR2yXJb9KxX4wQ/gySdlkNV89KSFiMqASZMOVh1segfbhYgSKKaZpO7qUv4fnFZLKhODuFUGRV2lIgXI4lgESqEgrhjHkQn/4EFZELz8sgjDrS15rtuV38p1RZQoV4yybikRojBNESz5fOJ2azSS88L3ZUFRLMqi4tOfloWGOkcmJ2UM0NxZaCFyF5IucFYs5DCMAMddwrLGiaIs3Y5NMTfM0SP7+eLnP74t2CufG+f4cZNqFeK4xMTEGBcXzzA1sYeIPMRZoAuxR7fXJZfLEIYZxsfBslxcN6RaLROELt2+QxieZmTEZLg0ThR5WJYMbmolbFkyMFWr4pv2fRmIVCn4MBT3TTpuRFV8XF5OVmqeJxaUF16QmiSDfnHNh0cVINvJIgJXio9BK4kSIiptF7bXItF8MOp1EREgVo3BonBLS/DOO/I9T07K9ba4mAiSQ4ckfff4cRGFlYr81rWaiE/fl9+810t+r0G3HMg+mYwcs9uVbTc2tpfz9zyxxPzMz8hxVUCtbcs1rWO97ky0ELkLuaLAmWFIiW17HSMy2Ko67JmfptMeZ2nJZG52clvmySc+IRaHc+dM9s3fS60asbJ6gZGhEpY9SRQ4dHodHMdkZGiGbNa8LAjy+SyuG9Fs2gRBiWx2jv37LUolETcrK0l11SCQwarRkMGrXk8GvrEx2a5alYFMFT9rNiVuRPmPs1k5pmVJ3MiFCxLlf++9Ym7WfHSkM1+U9eJqQmIwpTcI5BiqSquKNdFddz84lYpYNkAKlQ02DTxxQiwlcSyFyaJIhMn4uFxDU1Mi3i9eTFyljrPdFaOsHmmhOdg5WVlBTFN+ZxXUCiJuXFesIA88INfvuXPJvjtZcDR3FlqI3IXsVuCs0+5eLvjz5S88jWGYNJuyEkqbRA1DTLVzc/D222UM7ufU2SE2Kxfx/bO4TpbhoXmmJ+fIZTOXVz+ql8zoqIllmQSBDGhnzsixDx6UAef8eRmsCgWxcriu7KeCGF1XahZksxI3oppvVSoyqLVa0qV3bk5Wda4rAiQMkyqs77wj2x05crN+hTsXJRYVu3XdTf9NN8aTWCE5B1THXb0Sfv9sbGxPv003hoxjEeSVivw/NZW4XKam5GZZ8J3vyDWqtgsC+W1dV66ffj8JNE6X9FdYllzT+bwcPwwTEeL7sk8uJxbKn/kZeS0lQgoFWSxoEXrno4XIXcpOBc62F/wRc8Hmprg+Vlflljbtep64Qubny0xNFdnY2EPf9xkfM5icLLG2ZtJoJKveel32bTa3Z0B0OvIa3a6sfvbvlwF0a0tWSZ2OmGZ7vaQCp+or4/siNup1eZ12WwbHH/1Iaorcf7+81sGD4msOgsQyoiqzPvLIDf3q73g8T34zJSzURJKeUAbdNarzrprQVAaOsojojJn3x+pqIh4OHdqeMRZFkvXSbMq1NDYmoqLZFKuIKhT40kvyG2xuJq4Yz0sCT1XszmCVXEUmIwJExYq0WslzSsyUSlIx+b77xOqiOHBAV9O9m9BC5C5msMDZTgV/RkfFZ7uwIJP2+fMyURw6lJjfx8bgk580OXGiwMWLMhhVq7Lf8LAMapWKiArfT0zuzaZsoyqnVioy8A0NyWBYKsnrqmDV9H75fCJsfF+OY5rixun3YW1NtldFzRwnKQMfhuK2WV4WwaNqjeiV14dHiQj1e6nHBlN3d8ugUSb9bFZ+R1UYS5vmr53FRbkOQM75tDVJVSj2/SQ9d2NDtpmdlUXAO+/IddLpJOJDpeaqlgpwZQ0YhWEkVpB+P+kXpawgqpv22Bj8+T8v71eJkLExuWnuLrQQucu5loI/qpx6ECQR7CdPbjedqmyVvXvhrbdk4FL1H1SsRq0mA6TrJr1k2m15TmVcdDoiIjodKSl94IAMVPW6CJNmU0RFq5W4ajxPVm1qcNvclMFPFT9rNkVo5PMSia/KWk9Pi0hqNKSr6FNP6cyMD4tpyu9pWVdaQNL/D/agUX+DIKmk2uvJLZ/XFpFrRVn54MoOuq0WPP+8fL+5nAj+pSWJBSmV5Np9+WURBdVqIhpUbJYKDk+7YuJ4uwhR1it1bXY6SdCqcquWShJnNj+fCJDBxY3m7kJ7XjXXjKrToRpjqfgRZQIGmYQ+9jEJOlPNsVZXZXDau1eEgloRqch605TBU9X98Dw55vHjYrWYn5d4jyhK9rMsWW2p6o2qpkGrJfEs2awcZ2tLBudvf1sEjmHIKrFQkONMTsogq2qNpGMbNO+fTEaE4uBklWanrJm06yAIkjReVY9Cm+nfmzNndhchGxuJCCmV5La8LNfj5KTcvv1tuVZWVxNroooBSbvalPAYTM3N5UTUgPyGyl2qgs7zebmOf/7n5TxZX5dtZ2bk/WoRcveihYjmfZPPi/VDmVBXV6XUc6eTbDM1BZ/6lAxwcSzColaTffbskdWYbSfm3kxG9leDWT4vk9D6ulhhstnE121Z8r8qJa18z7WaHGNzU4RGJiMTWL0uVpXvfS/pNDo3J5kBIO8pn5eB9XvfSwZzzftnsOmdIj1hwZVZFent1MSVDq7UXB2VWmsYSVNIxYUL0jdG1QhxHLlmJydF5McxfOMbco2urMjz1ar8Ve6xIEiEyKAVxLZF2ORySX2fRiP5LVVK7k/+JHzmM4kVJJMRAVIq3chvSnMrol0zmg/M2JhYMS5cEAFw/rxMQgcPJpkPDz4o7pU335TBqduVAWt+Pik0VizKgNXpyKBVrydVHbtdERadjjy2d69YOdbWZABrteQ47bYMiKoQWhDIcR0niUFZXk4KKD36qLx3z5OBcXg4scQ895wE0OnCSe8fFcisTPjpxncKNZGlH1NxDKaZiBTH0TVE3os4FnGtso0OH97+/FtvibhQIkRlqk1Py4Lg5Em5NZtyXSgRks3KtZXOblIWzDSeJ6I/CBI3KCRF7QoFWZR85SviBlKxK/v2Je5YjUYLEc2HwjBEHPi+mIajSAa2YlFMrsok++STMuEfPy4DXKcj26hgua2tpE+Fav+u3CvKNKxiR2ZnZd+FBdlWrdZUaqGqVaBqUqi4kXTxs1YrqbK6f7+Uvs7lZJ/NTfGV33+/LiP9ftmpzHvapL+TWyadVaPSdnUNkfcmjuV6ArlO9u/f/txLLyXNIicmEhfq7KwsBF59VcTBxob8RmGYVDZVImSwF5BCVUhVWTQqNVdt43myUPj852VbVVBtaEiEiUaTRgsRzUeC44iZVZWSbjRkkEw31pqbk//feksEQbMpA9b0dGKNUCJCCRKVWaPEy+amWElGR2XgXVqSFZyqOQKybS6XPB5Fsr1qU765mcSVPP20iKHDh0VAZTLyeltb8j47HbHoaK4NFeuhVsTpku7pbrxKlKTrTxiG7G8YSWl/z9OBqjsRRYmbMZ9P4rZAvvvnnkuql05OiiumUJBzfXJS6oM0GiLuVU0QZUlUAcPpGJ+0ZcpxkusqDJN+M+o3V+/nM5+R61xZxw4e1I0nNTujTwvNR4qKH1HFlFZW5KZMsbYNDz8souDNN2UQ6/WSsuyrqzJ45fMiAhxHJiXPSyapZlMe63QSobO4KBYUtY8ahBsN2bbfl8HTsmSbalX+fvvbEly7d68IqRMn5DgjIyJYVADgAw/c5C/2NiOK5PdS8SJKeKSLXqXdM+pxlaqt0j51DZErCcMk8yvpQCt0u1JDR/V6GRsTsTEyIjFRUQR/9mdy7qt+MdVqksGWLjC3kysmm5VbFCV9ZpTQ8Dx5P1/6kvx+Khh1py6/Gk0aLUQ014XB+JFz57bHj5RKki577pxM9qqB3fCwiJDNTRncgiBJ8W235XnVYGt9PYkdOXxYYlRUtUY1yamiWKr4kiolDfKaFy/KPtUqPPSQiBFV2XFyMhFSnY4IFu0muDaUCEmvqHfrNaO2UQGqti2/VxAkgcsaIQiS/isjI+JyUdTr8OKLSbzI0JCIkIkJcZNeuCACplJJAstVbJVyxaSF4WBAqmpUp6wg6v2oWJADB+QaUcUGdX8YzbWihYjmunEt8SP794vP+s03ZYVWr8sApqo7rq5ur66qovAdJwmwU9aR6Wl5bG1NBkZVSlpl5dRqIoDiOHG/+H5S/KzVkj4X+/YllSlnZ5PmeT/6kYgnPbBeHbU6VjEicGXQqYobUSg3gGqMpjJATPPK/ih3K/2+XEcg1o1048bVVbmGVA0W15Xze3parrVjx+Q8Xl6W71Q1lFSxVXBlQLEik5Fb2gqiREomI4Lny19OCqCBXL/akqW5VvSQqrnuqPgR5cdW8SOqR4zrSpbK448nrpiNDXn88GEZ6NRgqDq0qgqs5bIMpBsbYhGJ48Tqks0mk5myuNTrIjhUto4SNRsb4pb51rdEwExOyiBumjKQm6YMtM8+mwzcmt1RrjTY3RqSXnGr1bhtJ7VIVPaMtkLJ+a5EyPT0dhFy+rSIkChKYjdaLTlvJybg+9+XuK2LF5PaPqplgqqbk47VURiGHE/9Fqo4mQoEL5fFivilLyUWlmJRXLNahGjeD9oiorlhqPiR9XVxvQzGjwwPS+2RkyfFjKy6dE5OysS2sZFUVVXZGK4rg2qzmaTptloielRpeddNBkrVqlzFIHie3NptWc11u7LtJz4hA77risCZmZHVZBiKGPnkJ3X64W64bhIPlGZQjAw+ptJ9s9mkrbwmSY2HJGNM8dprci0FgVhJtrbk+1Pn7re/LdeG6lKtKhurANOdfguQ6yyf3zkjJpuVRcBP/qQcRxWd0/1hNB8UbRHR3HDGx8VCoibyc+dEfKigtyNHRJAoN0q1mkTdq3of+Xzin+73ZWBUvWw2N+WYnicix7ZlZQeJdcT3RbioYk3KOtLriQh69lnJmlGF1EAGd1Vk64c/TMzQmu1kMknBuXQMCCR/d0rpVfeVlQq0NaTZTETI/HwiQuJYMmM2NuT8V+m5Q0OyXasl8SLr63L9BEEi7JQo361Gi0rLVVYQ5b4xTTn+44/Dpz+dxImMj8sCQ4sQzQdFW0Q0NwXDEJGg4kdUJkCplKTzPvGEDKRvvinPVyoiKHI5edyyZDBUDdZcN3msXpdj5/MSp7K0lPS/iOMkqLVWk+OpwMhaTQb21VXZttGQ96Ea5k1OSkxJvy91Gh56KKnQqhHS9V92a3C3U9l3SFJA4zhJ371bqdeT+hvpAmBBIELY9+X+xIRYQlS59uPHZb/lZfkuu10597vd7b1iBlGpt1Ek53e6G7LKJPv85xNBo/vDaD4qtBDR3FRU/EizKT7sel1u09MiDMbHpR7BO+/IwKpcKqrC6spKknqorCPlsqzk2m35v9+X4xQKIiI8T15PuWhUV1Dl6lHpv+vr8nirJa6Ye+6RjIWJCVllNhpSOvvIEQnO0wiq++5OabtwpQUknTWjitIpt9ndmjFTqYgYhu0uj05HgqZVD5dyWc7FqSkRCi+8INfP2lrisrRt2W8nF4wi3TG510vSch1HrDCPPCLXkBIhgy4ijebDoIWI5pagUNgeP6Ii/NVK8P77ZUB+/fXE5w3irtnYkPvdbpKloawjrVZiHclmk26+uVyyfa8ng7Xvy+CqXDnttkwIqnfGpz8tK8DFxeQ1VJBrqwX33nujv7VbE2XFSBcxGySdNTPYk+ZuryGiavCAnN/KTVWpwCuvbK9e2m5L/JLnSVBqtZoU7kuf37C7K0aJPSXG1fFzOQmKfeaZ7fEhe/Zol5nmo0ULEc0txfi4mJjPn5eB9Nw5ERQHDsgg+IlPiNn5nXeSgmXj4zJoKutIs5ms7oaGZLButxNT9vi4CIe0m0ZVk6zXk34pxaI81u3K+/mzPxM3zcGDSb+bqSkRJouLcsyPfexmfnu3BumiZSoANf142g2jHk9PbCo4slBIfou7BZU2DpIxptwei4vSWDIM5TtR6bcqZV2l56oy7emicLvhOEk/H+WKUS6xYlEy2UZHk2Po/jCa64UWIppbjqvFj8zMyG1yUgbfjY0koO7QoaSnjKpDAUlmTdo6ksslsSO2LUJFuWgMIwnQUxNhvy9C5/nnZdX52GOyCl1YkOOcPSuPq1ojesUopC0i6aqd6VoUg9sq14xl3V01WxYXk6ZwR44kn/3dd+U53xdXTKsl18L4uAj1lRV5Xp3Hqnrw1c7BTCZxhanePoYh18X4uAhqlfWkysJrNNcLLUQ0tyzvFT/yyCNJnAbI/UxGetqsrYmYULEgcSz7tNsySPu+3KanxbqhXAIqbiQIkhWlqhzZ7cpx031qDhwQsbR/f1KR9dlnJetHB/HtLiSUEBnsRaNiS+62rrsXLiRZKEePJuesalzX7YqlsNUSK8XoqDRmrFZFfEMiptV5vNN3aFlJvIlyxSgrSKkkGTGq67TuD6O5UdxF6w3N7YqKHxkZkf+Xl2WV2OuJCfmZZ8SMrXrR9HpipRgd3d6jBmQQVkWf6nVZSRqGBKCqLA2VVdDvi+BQroJ8PqlY+e678Cd/IhPE4cMyAezfL/8HAXz3u3d3HQxVSG6wsqr6O9jZFWSSVL1mBt01dzKnT18pQqIoSRFvtcTF2GrJeZrNimVueTkJqFYdqFWBsp1EiEqL9v0kbR3ECjI/D5/9bCJCpqflvWgRorkRXFch8mu/9ms88cQTFItFJiYm+Jmf+RmOq77VGs37ZGJCTNYqGPLsWcliCUMRHp/5jAykmUySJbBvn6z0CoXtMQmlUlLorFaTleWePbKvOr7qBNztimWl05FJQJWXP3sWvv51mRCOHpXtDx1K4k1+8APZ7m5ElWZXPWdgu2smXVlV3VdVVdXkp4I072SOH0/cgffck1jkvvtdOd+aTRHbvZ7EI7XbUsRsYSHJCoMk02Un1G8x6IpxHBHrn/qUxD7ZdmKFLJdv1Deg0VxnIfLss8/yta99jeeff55vfOMb+L7Pl770JVqt1vV8Wc0djLI8HDiQDKwnT4oYsG0xLT/xRFLds9cTk/bEhKz8VLCd6muSzyf1QhYX5bGhoaRXCshg3+vJNq2WHFtV/1xcFFeMSuPN5ZIiamEo6ZTKdH43kc0momK3SqqDLhjLSiZMx7mzM2biWESIKmt/9Kg83mxK9osqQFYoyPcxNSWxICdPighRDR2VcNkN1URQxU0FgeyTz8t5+rnPJZ1x9+wRV8zdYonS3DpcV8Pbn/zJn2z7/1/9q3/FxMQEL7/8Mp/+9Kev50tr7nBcd3v8SK0mNxU/8ulPi8n73DlZ/fm+DLxbWzLQquqSpimDfa8nj6mmerOzEg+iLCKqroJy0TiO7NfpyASh3DhPPy2vAUkTv1dfldXu3NzN+rZuPKo3UNrFMmgRGRQlppkIF9e9c2uIKBEC8h3t2yf319fhjTeSKqaZjHwHw8OSJba5mcQzpW+7odxcKhZEPVYuS7C1csOoIHCN5mZxQz2AtUs1sUeUs3+AXq9HL+VYr6uEeI1mF1T8yNqaDNKq/sj+/eIm2bNHTNkqdsTzZNBVIkMF+KmVY6uVZG0UiyIw1OoTZEBvtZIeNSqVcmtLKsA2GrLKVO4d15XXevddES2HD9+0r+qGkhYiu1VWTdcRURVVQe7ncnemRSSKpO4MyLmrxOn58+Jm7PXknFKp564r4mRlJRHKcPXGiyrbSJ23qlhcLiev99BDibUvXadEo7lZ3LBg1SiK+Dt/5+/wyU9+kgceeGDHbX7t136Ncrl8+Tav2rVqNO/BbvEjtg0f/7hk2Ch3Tb8vFo+RkaS9OcjgrAqatdsiLlRVVtURFmSibLfFGqPqOSgRc/Ik/If/II/v2yeTzeysbH/+vFhH7gZUv5m0RSQ9eQ66ZdLZM76fFKS7kwjDRISUy4kIOXZMztVmMxEhY2MiPN58UzJqVH8k1f12N2xbbmo75foZHZW08kcekfNc9YfRIkRzK2DE8Y1JlPubf/Nv8vWvf50f/OAHzO1io97JIjI/P0+tVqNUKt2It6m5A+j3RYioM7tcFpdNHCc1GVSrdNcVk3evl3QRVdUlVS0G1Rm41RJBka5UaVmy0lQDuvLbT05KvMqhQzLJ+L78LRbF3P6JT9z5vvh33pHGa0tL8puoInOmKROjasamrFFzczIBx7Hcv+eem/0JPjqCQH5/kM8+Pi6f84UXkkrBpZJ8R2NjUthseTnpefReAkRlKcH20vkqZun++5NtDh26u+qzaG4O9Xqdcrl8TfP3DXHN/O2//bf54z/+Y773ve/tKkIAPM/Du5u7XGk+ElT8iApAVfEjMzNShn3/fimVrcpfDw/LwK1iPSARGP2+CJSNjaTWQqOR1BhRgiaTSYJY+32ZRJ57TqwqTz4plpKjR2VyjmP43vckW+FOnhCUNWRQcO3U82RwuztJpPX7UmsGxHI3MiLi4gc/kPOvXk9EyNCQnCsbG4lQgyTGYydUZpLKhlGPqU65Khh1bi7pQq3R3EpcVyESxzG/9Eu/xB/8wR/w3e9+l/3791/Pl9NotlEsyqpalc1eWpLb/v0SVLqyAm+9JYN3rSYDdaWSlIRXmTUqYFUF/Y2MJF16lXBRJeRzuaRuycZG0qfmc5+ToNr77kvM89/5jgTV3snm8bR7JS080um7O4mQO0Wg9XpinYMkkLrXExGiYpTK5aRj8enTcp6o2jXKunE10taSnawgqk7InSTuNHcW11WIfO1rX+N3fud3+MM//EOKxSIrKysAlMtlsndbEwnNTWNyMimHrdw2jiOCZGJCggFVKnA2KwJmbS0pTmbbMpj3enLb3EwKo7VaiavG9xM/v6pg2WiIO6hehy9+Udw9R4/KezBNsYw89dSdGZipBMVOAatqglUuhHRmjWneGRkznY7EBUHSrbZeF3eVEhqlkjzeasn5ubSUtBe4WkaMqkKbblKnLHZPPpl0xt2/P4mb0mhuVa5rjIixiwT/l//yX/LX/tpfe8/934+PSaO5FtJmchCz9dSUTASvvCIThCoLr4qdqVRftfJUAaqqOZhy1ahVqWmKCHHdpOS2acrrPP20vObqqkw6vZ5sl06nvFM4flzqq6TLlyvRMTws1qc4Ttxas7NiHXBdePjhxKVwO6LSykEyt1RTxrfeSjK1Mhn5vGtr4spTbQnS1qLdMM3tFqV8XkTHfffJYyMjIrI1mpvFLRMjcoPiYDWaa8Z1xV2j4keqVbnNzEjMxoUL4qMPQ7GITE6KBaTbFUGh/PG9nlhAVLde100qXaqOvVGUmNx9X4THd78rJvP775f3s7Ulx3/lFXlsauomfjkfMao42U6kJ9p0d94oSgrN3a7U6/JbQ9Kx9tQpsY7Uakn2VqEglrGVFTkf09aNndipJouygjzzTJIufejQnZdxpLmz0Z0ENHclV4sfmZ2VNFvTFEExNCQD/+amWEdMMwlKVT07bFsmlnRWjWqtrtJYTVOKVr38clL8DGTfxUVJ1Wy3pWrsnYCydOzUryRd1CzdayYMZZ/bNW6mUpFzCuR3dF0RmaoYWbGYVO09c0Z+d3WevJcVRAk1dT+fF9GhqrKqGBSN5nZDCxHNXc1u8SOPPSbm9ZdflomjWpW0y2YziQuxbZk8lRhpt2XytSwRMKoSqyqApmo81OtSO6JahS99Sd6H6yZBrK0WPPjgTfpCPkLSfXtgu+hIFzODJJbEtm/fmIaNDblB0rX2Bz9IatIMD8tnU12cl5aSZnXvRVqEqOqon/qUnDeeJ5YXHYyquV3RQkRz12OasnpV8SO+L6JgaEiyXc6cEYGismby+SS7Rlk8er1kZWtZiXXE9xNXjeru67qy/blz8Id/CD/xEzKJ3HuvpPeqlOBPfOJmfzMfjt2qq+5U4l116lVi5HZjZUWEJUj13CiCb39bfn8lYnM5sYitrIgwuRYriIoFUSKuUBALyMGD8vzevXL+aTS3M3dIkpxG8+FR8SOq70a1KhkvY2OSZjs8LPeVX35kZHspd9dNMh46HZl4lFsGZFJqt+V5VdxraQn+9E9FmAwNwQMPiMWkVpOMmts5zMrzRIgogQE71xBRmTKOc3uu6lWsEUh1315PfjsV7Kwq8y4siKjd2Lg2EQLb64KMjsIXviAipFyWc1WLEM2dgBYiGs0ApZIM8iprY2lJ6js8+qi4bEolmRQ8T4RJoZCkU6qJQblqQCwoKngwihJLiQo+XFsTE/7p03K8+++XbVotWVVfraLmrYyqYZGuCaIESHoSVmXIlWC5nVKZz5+XeB8QS0W1KoXsGg0RIqpS7IULsm2zeW1Bqen7xaJ0lP7850XsHjwo8SAazZ3CbWgE1WhuDFNTkgKp3DJnzshE8NnPSmaNacpkE0Wy4lWTj+PIZKPcMmEoz/t+UgBNuXFUiq/q0lutwic/KW6a06fFpfPss/JYJnMzv40PhuoAe7XmdyBCRFmKbpeMmdOn5TcFESGLi2JB29qS33V4WH7XSkXSc1Vq7nuR/k5GRsQaZ1lJVVaN5k5DCxGN5iqYpqxAVYXMfl9EyPCwZNi89JIIhEolqR/SaCQWElVfRLkfslkRF0qgdLuJ+6LbTZqf/cRPyOtevCj///CHsiq+3crpDMZ8DKafKmxbJvV8/vYQIsePJ3EbR4/C22+LEFlfl9/I80SQrK2JuLyWgFSFsgo99pi4CS1LzoU7pdqsRjOIFiIazTXgeeKuUTUiKhW5PfSQWEGOHRPRsbUl2zYa4l5JW0dUV1TVAVitkJUVxTTlscVF+JM/kYqrc3Myua2uSkXOBx+8vQpVGcbuZd7Tj6VTd133xr2/90scSyCzcicdPCiN66pV+Z3GxuSzqAJlymJ2rdi2HOOZZ+Q48/O3hzDTaD4MWohoNO+DUkluKkticVEef+YZWSU7jogQNVGpnjXKOgIiStIWk/RjKmBzdVXiQx5+WIIdPU+ybF5/XQIi9+69GZ/+/WNZV2bBpDNmFCrb6GpF0G42cSy/MYiY3LNHglJbLREhMzNi/VpakvNCuW2uBWUFeeIJSSnP50WE3qrfhUbzUaKFiEbzARiMHzl7VoJWDx4Ud00+L9kRnpcEniqLiIqFUE31VAG0KJL7SoyoviSHDknMiOOIW+j4cTnmvffe3O/gWlB1VQZJuyrSBc1uVWtIFCV1XgoF+e2/8x1xm6nKvJWKWMRUVsy1YttyPj31VJJKfqt+DxrN9UALEY3mA7JT/MjCgrhwgkACF9ttqapp2zJpBUHSsl31FHEc+asmL9+XSVkFwx4/LlaWJ5/cXmuk2ZQV9K3MYPquIm0VUcW6VP+eW40wFAEIkklVKMD3vy+iw/clg2VlRX7nev3aU64NQ2KGnn5asrBGR8X6pdHcbWghotF8SHaKHwH42MdkAstkZJIyTZm4Gg0RJqryqsoocZzEnK+ec5xE4HQ6smq+/35pnhZFMiE+88yta8IfrK6qGJyslQvnVssMCgIJIAYRCnEMzz8v8R+uK266ixfFlabcbNeCbUv8xxNPyHmh+8No7mZ0HLZG8xExWH9keVlqQDz+uJjyVSv4UkkmXNtOrCGQuGzSKGESBDL5fec74gpQQbLV6q1da0QJkcHPNShEVGXVW6mGSL+fiJCJCRGQr70m1o9iMQlKXV5+fyIkn4cvfhE+/nE5J44e1SJEc3ejLSIazUeMih85cyYREPv3i+A4d07Ew8ZGEswKSQaNSglVGTSQBLIGgVTqfOEFmbxUrZF6XWqNPPPMrden5VqsHOmusrdKhki3K78ViOvl3DkRHKurktVSq4lrZmvr2l0xKg330UflO9m799a1ZGk0NxItRDSa64Ayt6v4ERWQet99UmGzWJRMC9OU53q9JJDVMJIsEiVGlNUkDMUS8vbbMhnef79Mjpub4qb5xCckhuFWQvWbSZPuIpsOVr0VSpa321IJFcRi8cYbIjhWV+X/paWkE/O1ks9LIbxCQRrU3WouKI3mZqKFiEZzHVHxI7WarKhrNXHdzM9LMGuxKBPcoHUEdq4/oR7r9UTQdDpSW8Tz5DjPPSeFsEZHb8jHuyZUwOpuKLfMrZC622xKzAdIJsyPfywxP/W6pNVeuCAi5FpTcy1Lfv8HHpDffWrqur11jea2RQsRjeYGUC7LTYmRRkNW16p778aGTHD9vogMVY1VkS6RHkWJ1WR1NREje/bIRPnyy2J5mZu7OZ91kGz26jEiKpX5ZqesKrEIIhh+/GOxWvm+WDIuXBDLyLUWKCsUpEJuNivWsduxq7BGcyPQl4ZGcwOZnk7qj6jg1L17xWpQLouwqNWSipwqCHUwDkH9r9rMv/yyTHaHDkmA5ZtvSu2So0dv6MfbEdPcfRJW8TA3O2OmUpHvHqSfy4svJpaqKBIriWpu915YllhA7rlHrCjDw9fvfWs0dwJaiGg0NxjL2h4/EkVSP0JVUFWTYq8nQZOq5shuhKGIjnffFReCCmI9eVIef+yxG/fZdsLzdhci6QZvNys+ZGNDbiCxHK+8IpkxhYI8ruqFXAulklhB8nkpTKb7w2g0740WIhrNTWIwfgQkLkGVkV9eFnfNTq6aQaJIrCiq3shDD0mmx4ULEnvyyU/evPiLTGb3QmXKInKzuu6qUv0gouiNN+R7Hx6WoNRa7dqqpFoWPPKICEzdH0ajeX9oIaLR3GQG40eKxcSKMDQkfUva7USQXI1+X9KFf/xjyajZ3JTJ9jvfkayNm7FCv5oQUTjOja8hsriYuFvqdfl/c1Pex/nz1+6KGRqCz39e/t4qcTkaze2EFiIazS2Cih85c0YCN2dmJIsjlxNxsbIiYiQMr24dCQJZ5b/yiqSK2ra4GL79bfjMZ258GXVVvG0nlEXkRhczUxlHIN/r6mrSrPD8+WsrUGZZUj133z7dH0aj+TBoIaLR3EJYFhw+nBTUKhRkgnYccRdcvCgio9+/ussgiiQ+5PRpETdTUxLr8J3vSOGzGznpe97VJ2nDEKFyo6qLnj4tMR9hKFYQFY/TaokYuRZXzMiIxIJMTkqBM41G88HRQkSjuQXJZLbHj0xNiTgxDHEBLC3Jit73rx7I2u3K/t2urNqrVWld/+STNy6bQzV32w3bvnHWhOPH5fvq98USsrQk/9frYn16ryqpliX9fubmdH8YjeajQgsRjeYWplxOAldBSsWvr4ulZGVF7vf7V3fV+H4S9HrokEy6zz0nwZUzMzfkY+wqROJYJvPrXZo+jkWEgAi45WURIYYhbq9rccVMTEiczdycxPFoNJqPBi1ENJpbHMMQwTA5KfEj4+NJJ9jhYcmUaTSuHsgahmJdeecdiWloNOCllySg9eDB6/8ZrmYRud6BqmkRUq+LCFlfF2F2LQXKLAs+9Smp96L7w2g0Hz1aiGg0twmD8SMHD0pcg2mKyFhZkeeu5l5ot6Xg2fi4bPf663KMhx66vu99NyFimuKWKZWuz+tGEZw4IfdXV+VWrYob5lqyYqanRYTs36/7w2g01wstRDSa2wwVP1Ktivg4elTcDKWSBLPWale3jqj4iGJRBMm778rE/PTT1+895/Pby9Sncd3r06gvDKWoG4hw29iQz1mpvLcrxrIkGPXgQbFEaTSa64cWIhrNbcrQUFJ/BCSGIYoko2NxUWIhdrOOhKEIGd+X/S5ehG99Sybf6+F6KBQGhUgMRMQxRMRksxbw0b1wEIjlJwzlb6UiFpBK5b0DUufnpQDcwYO6P4xGcyPQl9lHRBRFLC6t02x1KOSzzM6MY+r6zprrTDp+5PRpaXZXr8sEurUlVoCrpaO2WiJaxsfl/z/7MynO9VFOwHEcE+FjmBYMxGNEccxWs8nb57vsmy4zVPjwdd77fYml8X35TjY2xErUbF59P9uGL34RjhwRkafRaG4MWoh8BJw8tcDXv/Ecp84s0Ov18TyXQwfm+coXn+Lwofmb/fY0dwGWJROoih958EEJYlUZN1ebhJWrptcTa8E3vgGf+9xHExNRa3VZXG+wttUHJoDt4twwY4LI5/xaldVakwf2TTA79sEDRtTnb7dFjGxtya3Xu/p++/eLFeTAAR2MqtHcaLQQ+ZCcPLXA//Zbf8ji0jrDw0XGRoexbZM33z7N0vI6v/gLX+XwoXltMdHcENLxI4Yh9UdsW/5fW9vdOhKGYjnodGB2VsTIpz8trp8PQhzHrFSanFmq4IcR+ayLbUO4Q+yK50HGten2At46t04h41IuZC4fp9X1CcII2zLJZxyMXZRCuy29dapVESOVinymq2XFWBZ85SvSKPBGl5jXaDSCFiIfgiiK+Ne/+3VefPltDMPg4uIatm0xMlzi4P45Nrdq/Mk3nyOKIv70Wy9oi4nmhqHiR5aWJCNmaytxU9Tru+/XaskkPjcnJeGffvragzWVaKg02ixvNFmttgiiCNc2iaIYw44YtIhghARxn27fx7ZMOn2fMysVHjk4Rb3dY3G9Qa3dJYpiTNOgnMswM1bAtqxt4qTVMrh4Uaw/CwvyeWu1q7/fQ4ekLsjs7LV9Po1Gc33QQuRD8IPn3uBbz75ETMzIUImRofsIQ4NOp8e5czaz0/fyzjttTp/+AX2/zcT4NNksdDqtKywmGs1HjWHIJBuGIkKGh8VdsbgodTR2y6zp96XfyuSkVGF94gmpPXI1as0uZ1YqrFVbNNo9wlAiQi0TAgMa7R4QMDjkGGaE6fbo9EMsM8S2LBrtHiuVJhfX6/T6IbmMjWWZhGHEaqXJwkaNrGtjmSamaeDEOQy/yMqiy8qKiK1ud/f3atvw0z8tVhDdH0ajufloIfIBiaKIb3z7BTrdHnvmJjBNi15/lYw3S7GYpdHosFmp0Wy2KZeKHDxwWLIGIsh4sH/PIZZXNvjDPz7HT//kLIZh4rqXzNQZ+as6sGo0HwYVP9LpJOLk7belpsZu1pEwFGtKqyVxI80mPPDAztsubtR569w67W6fnh8Sk+S/RIAfSnaM6frA9sAT04qwPVFEYQRhFNLu+ixtNOj1Q0p597Irxo9i+kFItx9gGgYTQxnqdYPl1Yi1iz5h32Jj/eo11++7TzKDRkev7bvTaK432m2vhcgHZnFpnaWVdQr5LH4QUciN4dglgqBGGPWIaHLq7DqdbovHH7mfWqOHa+3Hy5QwsYhjm5FyibW1FufONRkfL2HbUmXyaueg6laaFiyZjKzsdJCd5mpksxI/UqnIObO+LhVHV1d3t47UaiJgokjSX596KnkujmOWtxq8cXqVrh8QX8qLNS+l6cYAMcTEhBG4XkB74PimHeO421+80elJT51CIlriOKbe7hFEERlX3DKVLZNOw2HlQoZmw6DTvPSCO6QBOw589asiRO6yMV5zC7NTosPB/XM8+vARxseG7xphooXIB6TZ6mCZJuPjQ2xs1CgVPCwrJwNvHOA6ZRw7j1eErc0iG+t9stl1ykUYGS6SyThYjonf7OMHfUxThEQYSoS/7yc3FWxnWTKgOk7SKGy3pltKlCgrS1q4aCvL3c3wsMSQFItSc+TECTh7dvdKo/2+FAYLw5hKNeSpZwK6vs9mtc35tRqtbh/TMAjjmJjtdTriGC55aTCdK9WOZYUY1vZo0jCCarNHPxDRMVTIYBomPT/EsUwMA6obFrV+zMr5DO2Wjd9VA3UEbL8oHnpI0nKvR9E0jeaDcvLUAr/5W3/EVqXOzPQYuazH4vIGf/gfn+X3//232DM3ydjo0HvGE94JFhU9JX1ACvksmYzH7PQErWaXpZXj2JZFuxPQ60MYWhg4ZDNFSsUcxWKeIGyysrbFVtXh4P4ZbMvENLsUi/IzdLuJ+NgtuyGOLw3uYVKwKgjk5vvJNkq0KCuL48hjO52faSvLoGtIW1nuTJSLZmpKzo29e+HYMSlstpN1JIpCTp2C0pDPxbUeU4dXMQ2DIAwxTYMoircJEINLFpFLFLY2yJ05y5bxU9uOO7l4nOJUk+b49ojYGOj2fXp+QKsbMJT3iOIYI4LNdZtG1aGyVKDftrgiAPYSrgt/6S/plFzNrUcURXz9G8+xValz5NA8hmGwuVXnxMkLhGEEMfh+wPBQ8arxhLtbVI4yPjZ02wgTLUTeB2nlmct6HNw/x1vvnOGRhw5z7O3TnD23hB+EOI5Nt9vDcWyC0KZ66gT79kyTzWaIjZjltTphdIZyucCD9x/lsUdLBIGsPHu97d1UB6tAhmEiVpT4UAJkcDu1fxgmPUiUcAnDpNKlYSRiRYkX277S2qIGc8e5UrA4zkf7XWtuDOn4kVxOsmXeekvcNwmJKq5XXbpd6PUnmD+yTp+IKLqyVOmgCPn4v/vXrFvzvLpvuxAptzb41O//Gy7c9zBvffYr256LInBs6AcBW40Q0zDY2MzRqLpUl4YJ+7tHmj76KHz5y9e/q69G814MzhsAp84sc+ytBWam9hBHRaLY4My503S6PcZGy/R6PpVqgziOOXJonhOnFviTbz7HwQOzl0WFsqhsbtUoFnIElsnq2havvPYuv/8H32LP/BRjo+XbIkNTC5FrZCflOVwuYhoGFy6u0uv2yeczFPI5mq0OcRwzMzWGY9s0GkXCYIiMW8a2bVy7R7PZYW5mD4888Ay1mnlZBORy8ncnt4sSFUqspIVLWngoomi7WFH3B0VOHMsxgkAmpPTrRdGVq0llZUlbWwbdPYYht0HB4nnaR38rouJHJifFSnLsmGTaBMGVIqPfdamu2oSBwej+ZRw3INuoYft9AselUywnJ00c8/gf/S5ev0vJ2Ro4UkwuEH/Qnrdfp5/JcvITn728X6ZRw+73KPR7+G6Gre4ElW6O+mqJONxZ+Vp2xF/8Sz5zcyau6/BRlo3XaAZRY2q/v/2vsmovLa/z8qvvsry6SaPZo9XsYxg2rpeh1ZrFNoaIRkcxDBOTKnPTs9i2TSEXsVXZYnVNVgTTkyOcPL3A4tI683OTly0q5xdWqDdaLC6u0mx18IMQw4CM51Iq5Tl8cO62yNDUQuQa2MmX1+70OH7yAqvrW3Q7PdY2KpiGQb8XMDd7mOnxEYaHCoBBNtPHNEaIQoOQGMfOUy4FPPLAQ5RLE9RqIjquxXyc3kaJgEJBhEvaqqFEjLK0DAoXZX5PCxd1UaXFy6C/3zDkeVWFU1lV1HaD7h/T3NnaorZRn8e2twuWTEYe0yb168+2omGewZ59YLkwN2/y2qsmm5tX7hOFJrW1ImGzy1O1/8DepdfINeoYxISmxbl7HuLkM58nu7VBrt3EADLhYKhqTMnfuCwVDrzyPCc/9gyFepXJ428y+84xMt0WAKeHPsba+GepeTlgZxFSnF5n7z0VVttZKmctSlmPkXKWrOu8ZzE0zd2LWuClRYQaLwcXbunFnLIsK9f4oPV6a6vG68fO0ekZ2PYUUVDDsnyiKCDoh8RxyGZllUZzg7GxPL1ek2Ihi9/3aTR6rG+t0u6YLC37lMtlPKfMqdMx7RZsbNZ4592IbnuSTqtFsTCKH1yg21uCOKYV9jh1eoFDB+Z2tajcStyVQuT9BPfs5MsD6Pd9KtU6tWqDkZESEwyTybr0uj69fo1Cdoxao02j0aLXD4iiNRzbIpfLMjM1Srlc4MCBUTwvuRAGT+wgkBNeneCGIRO9uin3ibqpgNfdUM8ZxpXCRf1vmlzhJlIXJCTvRb3PdFBt+mJMb6eEjxIsKobFNBMBpt77oJUlLarU+x8ULNrK8sFRJdhr7S69fkCnH2AAnmvjlm0O3J/DvWCxerFAFFx5cjUbYxzrPEk5WKGIVBAzo5DDb7/KgbdfpZPLXxYabnRlcY/h/url+xZw+PlnmX3nDbJ+73KcyaniY7xR+jwXzUMQuhISkn4rps/+x87j5nxiDDKuTRTHnF+rcXa1Qj7jknFtyrkMs+NFyvmPoHa95pZjUFCkxy4lHtKu6cHxNgyvXoU3jRqLbVvGn3x++yIsDCNefv0Ezc4mE2NDnF9YodVpkc9liKOQWrOG7/ewLJMghM2tPpZl0u70qDda9Po+w+UpSiWJsG7WezSNHhsbbUaHoVL1qVYCgiCD52XIZi2CoM9QcRrTtAmjiFpjmbfeOcOeuUlmpka3WVRuNe46IXLy1AL/6c9+xLG3TtFqd8nnMjx4/yF+6ktP72i2Wlxa59SZBWamxzAMgzjyiCKbs+cW6HZMRodHaXd6ZDMlCvksI0OwvrHF2Y3nqNakwYdlSqR/NuuxvB5xfsHliz/xNPfdWyaKrnSdKN6rSyjIhaOOkb6g1H3Y3vX0akJmt2BWdQzYbuFIu4/SYkpl/ajH4/hKS0t6paFQbqcwTG7qPSvRko5dSbuIVBZR2oqiBonBWJY7fVF8rWXRa60uJy5u0uuHWJZB1w8JI6n5QT8g69rkhhtMWD5jEz4XThepbaasESGAwUr+fr5plXlq84+4p/7C5adNIN9uXf7fIMaIe8SG+MmtqEs5JUQADr7x4uXQ0xiDE8XHeXH0p1jPHuCy+kglxpRnV5g8UME0Lj0bQ98Pafd84ji+5F6M8WyLrUaHVq/PkblRLUZuYZRYGLRQqL9qzFPjyaCouFYxAcnix7KSEghxLOONck2nEwfSY6a6rxZR6e23KnXW1rcYKhTodg2CfpZiPodhBpiWw1CxQLfbxXE9iC1M06GQtak3WpSLIQYGpmVgGiYxYGVjXMfmxIkA145YW7fJZffi2DGWZWMaNp5TZKtymjAKME2LKPRZXm1Sq7coFLL01rZotjq7fhc3k7tKiJw8tcD/45/8DidOnZfI5EucObfIO8fP8X/6pf/iCjHSbHXo9fqXg4zCsMTaWpNKxcWxZ4ljKOULDJWnZdDP5xkpBWxs1YjCCNOUwdOyTGzbIQoD/DCk0ypx7Fi8zRqh/npekna7U8bLoDVi8H76wrmamFEXTVo8KAGwW12JtFVmJyGzk4tJZeRks4lQSFtH1EpEPb6Tiyj9ftT77vW2D0DyPSeiZXCwSFtblIBRomanWJbdUqNvZdIWjnRZ9EFLQBzHLK5L0bBizmGz3iGMIjKODQb0+iHtXsBoMUOj3SMubjB3b53MUo7VsyMQbB86qpk5vjn1CzSdET62+XXgyswZgC9c/Od8f+Jn6dl5DjVeIRttd9ckhdBM3il/gh+N/QxN98oVnONXuOfRUwSjBS7Hyl6KS2r1+oRRjOdYRLEUQYuBUt6l3uqzuN6glPO0m+Y6MygoBi0Vg9aIQUvw+0XFpaUtw4aRWELUGKD+qvtp0aEWdZ1OYhFOj0Fp8ZMewwbfb6+XxbMexsCBKGK43MOyTaKof2lBG9LNtCnmc/T8CL8f4jgulukDBoZpYhomYeQTBj5RHGEasLIWUMg3cF0P1y4Qhj05VlDFD7osLr+CH/UuzW8xnutQqdSxTAPPdSjkP3x36+vBXSNEVF+Y14+dwHUdSsU8jmPh+yH1RovXj53gt3/vT/i//sovbnPTFPJZPM+l3elRLOSoNc5w7J1zrG82KOYLGIZNu7OFYfgYhks/KOPYLr7fxXM94tjAc4uUS3NiGTHBwCCKQo4dazEyUto2eaYnUXWRqAvsvdwu6iJU+6cFjhI26rFULOG2C0z9HXSxXPl9bh9AlPtlUAjtxE7iJS0a1CChSFtH1GurgUUNNOlsovT7j6LkvalBJj1oDFpa1OCkviclXlw3GcAGXUO3ipUlbeFIl0XfyRLQ6vrU2l1yGZsgjFM1OuSDOLZJzw9o9wIMEwI/ArvL8FyH0e5Z+m9GnCo9AUbyQ/lWge+P/jRW3OfRrW8BV4aK3t98ifuarxACDlfONgYQGhanC4/wnYmfxbdHrtjmMxf/Gfe3f8yPH/4F+nFSHMSyTBzLIgxjHFs+i2lAEEaS+msY5DI2tXaXVtenkNX13a8FZdHcSUx0u9tFRHox817jQHqMUSJAxZ2lrbiDLty0FUJdd2nLclpApMWNEg/qddRrwvbxcKf3lx5P3mtsBIhiC9PMEmOAGWIYYqEzDYcoDgmjEMu0sV0bx43pWhG5nMWFi6uEfkAQBcRxiO93aLVXsGzwAxElfX+SL33+MSrP/ZATJ88AITEB7U6NTr99ubCgYUDfD3j35HkmKsM89eSDzM6Mv/cPfhO4a4TIwsVVnnvxTUzLZGy0jGUVyGX2ADA6HFOtNXjneMyPflRlYmKISrVGr9clm3HYO/cQZ85eIOsVubCwShQZuI5J3+/S7YZEcY9Od4Nmq0GMj2WZNJsdysU8xWKOmcn7yYcmYWRg2RZxFNJotrHdEuPjpcsXRNoaoS5w9Xj6Qt1ptZBeAaRXBVcTOGkLxm7xJYMCJ21NUG4aZSVJB6+ms20GL9hBEaNWILuthNRx0+9pJxGTzSZFq9LHVgNietAatMao+2r7RmNnn/FO1hYl9JR7SH0/O8WyXC8rS9rCkS6LbtoWpbx5hSUgCCXt1rJM+n5IFMc4RiLATdPAD0Lq7R5xxOUU3RiY6Zzi6PK3Odh8jR+M/wVaaYuFVaBtFokxMK6wh4jQMIh2qfwh1J1xltx9V4iQTH+N/+rMf48T94kAu5/EnMRAHMXYrkkURQRhRBBEqHdtGollMuoFBOEOJ9odjhIUO4mJbje5JgfdHbsdC5JxKX1LX/9q7EgvLNQ6b3DMSwsGtWBIX5/p46v90+9np3Fm8P2m/x90Q6v3OzhGpq/5wXi8wbEoGUtNjr21zMbWJvmcw2ZthXanRTHvERNSq9coFh0mp2a4uLTM2EiRxdUNjr1zXBYB/YB2p0mrs4ll2QSBZGIahsHqZpF68xiTk8O89Op5/CAgjmLCHQZP27JY36gSBCH3Htl3Swaqwl0kRM6cW6JaazA+OgwYZNxJspm5y8+bhk+n0+X7P1qGeJlqrU4QBsQROE4exzrE4mKMbRxl//y91OoNLLOEabnYpkEQBYRBSBSHokjjGMexKBaz2LbM0pfqTuIHEZZpk81kL1+Igxdd2mSYDlZN39Q2g7e0OVJN+mmrQfq1BlcJV2MnsaNeL32BXotlZ3DgUBexCphVx0y7UNT3MegT7vWkJ8rgwHM1K4V6f8rSkTaxpgParmZ+VdaWZnP3AXsni4uyTCnRksmIiEoLlvdbsj9t4Rh0OexkCZBiegZhGGEaBqZhEMUx1qV9oygmjGLavR7hQJ2Q0LFx4oD76s9zoHWMl4a/xEujXyI2czj+JrOtd68QITG7J9IOPlfurzLWu4AEg5hAzFfO/SPu6by1bb/A3V4kJIhiOpfcMpdXrYBjycDuOpZ8XtPAtm7NAflaSK/+lajodkXMdzo7uzx2Ir3ASd/fSUyoa1ZdE+pvOshz8Kbea7q0wE7XUtrCkLaEwJXXwKBl+Grj0KDrOC2K0uPS4DWatsAoV3J6sTU4Vgw+JjeT8bEiZ89dYGVV4jKi0KFWj3Ecl3JxntGREhubIcOleyC2MOIC8zNDhGF42d1YqZ4lCH2qtQXqzWVMEzIZj7fePcOZ8xkR3cHOg08cg+c5HDk0j23bvHPiHD/x2Y/dkmLkrhEilzHkF+711zBNj37fp9ls0e706fX7VOtnKOSzjI0NU2+0aDQ69P0Qx7bJZ6VGiOt69Ho+cVwn45UwDKnuaBoWhmlhWw6mYWOYNoHv4DoZLFsthWP8fpfx0RyeO8LKSocgCLFti2zWwzC2nyTpCXU3oZC+OAdNlrD9gkkfU11sgxfV4HHUKn7Q95oerNSknLY0pC/Qa/55dhE7gwJnUNgMxp0oMbdTUFl6+7TgGGS361V9PnX8waC2wQEq7feOoqtnN6WD55RQymYTF1AuJxH6g40R0xaOnRi0BOQzDuVchq1Gh2LOwXMsOpeayakYkb4fXiFCiGM6+dLleNFM2OKZjT/gwdr36Bg5HAJK/mC9EOi5Hl6/t6sYSfflNYm5p/kKE6f+e6I4YiJYxRwQNiEQOFdWK/PD7duZQBTHrFRaEryKwUgxSz5z8yvwKUGRFhPttvxNxz8NxkjB7iJiUBTstNBJ34cr3Q47Ta47uSzS7CQY0vfT/ytL6qB4GLxW0zd1HHVfXXdpy+/gmKH2SS/sruU3UYu3D8PWVo1z55fwPBfTMuj3QwzDpN8PCIII27EBk/HRMhPjoxx76yS9nlSZjGPodCr0gw5xbGCaFp5XZMIrYVk2lumQzYBl2MxM7b/c6TqNaUCru8b4uMnHH7+PVqujs2ZuBQ7sm2GoVKBWa+GNuwRhk7WN11he3cD3A4IwJAxChoaLrG/FvHKsiWkaOI6NbZn0fUlrDIKQbNaj2+vjOg7dnk/Wm6JUnMWxHWzbI+N5xFFEPwgBg0I+y/BwiV63T6vdI5NxefCBh7iw+DobmzWCIMSyLMbGytx3dB+TEyPbJstB02TairHTY4MDziAflXtAXfSDg9Ru7pVBMTQ4MOwmWgb3U+wUP3MtsTSD/++0b/rvoHDYSQQpV1U6zmTQBJ0ODk5PLoNCZidLUXrVl/7e1GtHsU2tPUQmA5ksOG5MJheSzUQ4XgxmgGEklgDDMJgdL9Lq9Wm0fbKeQz+IpHldBH4YEAyIkMLWBvNvvcrcm68yeAqV/U3K7FBwBPCB17/4VWbfPcbE2ZM4UXJSdl2Ptz79ZYqbaxx59fnLQsUmZMJf2vF48jw8+u/+NW/8ub9Ic2Jq1+0sy8QywQ8i1mptZseKzI4Xr7AafRjS8Ulpy4QKeBysNbGTIBi0Ygy6R9KCIi0QDGO763EnF8TVGDzf1f30+JM+lwetD7tN/unrJp2av9PiYPDcfq9reCcGF1OD7uzB8VLtMzh2pRd+adGzm+V5cPyR40ScPLNEpxcwMTEOGPh+QBTGmJZJvdGhVCxw+MBestks1WqTXi+D53o49ihBEFMq9vD9NoZh4QedS4u9CD/oXHpvJp5j0/ODy9b2OIZa/SJ+0Ma2HfLZPN12haWVTSzLZGl5g29990X27pnmwL4Z5ucmbxnriBHH73Wqfnj+l//lf+Ef/aN/xMrKCg8//DD/5J/8Ez7+8Y+/5371ep1yuUytVqNUKn2o9xBFEb/6D/43vvHtF/A8h0I+x/pGhUarIz0zgpBcboiJsSEWl9axrRJjo3vJZjL4gU+73SMmwrEsYiMmDALCKCaKQor5SbKZMvncKLlsmYxXJIrEBNwPAohikahRhGkZeJ4LxDiWST6fxTAlirrT6+HaNnv3TFMq5Xe1dAxeKIOWip1cHzudb1ezVqQn/sELeafn0o+lzbKDZtbBAe+9nlPWlsGBZCe/cfr/3T7D4OfeSeQMDjBXuw1+54PfbfqYg6SPkw7STQ/w6ntMr2AH0xTlNWJ6fZ8ginFsMC4PmLF8h3GI61iUCx62bWBZYmmJTR8/ahPbPUyrR2j6mGZIL/Txw+iyMMhvbXDPD7/F2MLZq8Z3DBIDFw/fx7Ev/GcAZGtVhpcXsMKA5vAYlek5ME0KWxs88e9/h0y3/b5qocbA8sGjvPblP598r4BtGfL9GGCZBjLMGTx2ZJr9U8M7HksJACUmlHWi09keV7GTqEw3qkwLip0mREhExE5uid2uVdh9Vb+bRWKnc3bQ3ZB2XajzT1ks0oJhp/e000Jpt8fV507vm574d1skpL8Ttb3aVv2fHmsGXUHp9zF43e8kSq4m5q7FwhuGIZ1OB9MyMS7njkWXbhATEYYR5VIRx3GoVutU6jWIYqJLBw+CnsSFAH6/Rc+XUhDVxhJxFOE6Do5j02jV6XVrhHGEgcHS6ht0ezVs28JxxM4wMlSi1mjS7fRwXJtcNsPkxChPf/xBfv6vfOW6VVt9P/P3dRciv/d7v8cv/MIv8Bu/8Rs8+eST/Pqv/zq///u/z/Hjx5mYmLjqvh+lEAFJ3/1//pPf4fip83R7fTY3a1iWhW1bjI3MUMjvIQhCGo0WxcI42cwwruviWAXi2CRG0iGl1oIEDkVRiHJym4ZFuTSJZUk0vus4BGFIFEY4jkPGc/Fch1arQ88PcBybXC6DZSTry57fx3MzTIwPQWrIfy/T6LVuo9hNEFzrPoOPfZgFZvoY6YEJdh4cBh/bSVSo/wctGYOvqe4PTgq7HXdw5TX4/G4iKP1auw2mO73nwf0GH9v+/kP8ywGa6iAxcSTCxLFMTNPaNgnJ/YgojjCIMS+tWtu9HmEcYZlgWCFjGxcYWztH0d+i1N+g5K9TCCp4UWdX4RABlelZ3vrMV2iOjO2yVUJha4PH/8PvkWs1rlmMqK/kwr0P89bnpFdNHBnEoQGhQ+BbmLFLFFp02ibjxTKulaHbTQI1VdBm2oWWdtcN/rbXyk7n8bVY7NILjUGXQ9pCprZJ75cWNDu997TFIP3eribad/scg48NPj547N0eu9pY9UFmp2sZiz5Cg9jl4yXfdYjv9wfcpIkQgRg/CCkUcoRBSK1eJwwltlAsGzFB0CG8ZDkMgz6dXlWejwPAwDCg3dkiCAM2t84QxWJeVbEkaVzXod/3JRbMNDAtSxbjuSwfe+y+HctWfBTcUkLkySef5IknnuCf/tN/CohlYn5+nl/6pV/i7//9v3/VfT9qIQJJQbMfPv8Gx0+co1QuMDUxwvTkGO+e3KJaaUnjOtsmjGKmJo5QyM1Lym0MpmmTzw5jmBbxJQVrXDoxVBEawzCxLety8zsMg5HhEvalznD1WvtSoGCM69oU8kXUxBHHEb4fMjU5huO421bFisGVRPqxQXZ77Grb7jQZ7jQJXm3lcC2vdzV22menSfy9jvF+Xm8QtdIafH43wbLbcXcatK9lUvtgV+bVciZ38sm9R47l5c0iuBRsDWDGIWYcsq/+Oj+9/BvbhEMMdGyHlXse4uIDj16TCEl2jslurHHft/4jo1tr23zHNXuIN4c+w1LmKG23hG9m6BsOoekQGzah7cI1SZjbsEDMbcTVFizXss973R8U4Ds9v5PleCc3yuDiZNDS+V7vYfD4AP1+jwsLyziOnRIj8eW/YRjQ9wMO7J/hnXfPUm+08Pt9wjgiTt2iS3/7/RZB2IUoIiImiiMsw6DWWKTeXL0kUGLiOKTWuEi0y+Diug6OYxH4IaZp4jgWIyNl/vJf+AJ/+7/+2Y/cTfN+5u/rGiPS7/d5+eWX+ZVf+ZXLj5mmyRe+8AWee+656/nSu3L40Dy/dOBn+cTHH+B//Rf/lrGRIaanRgE4fe4ildoCqhCUZWcIgnlW1t+EWK1I8oTlWbLZDL1ujGXntg19hmHi2DZOLkOr06Lf87Fsm0o1IOOVMUwTCUByMI2YOLIJYxvrkvXDADD8y71c4NompPfa5lontd1W8h/kWFfb9v18pvdzjPdafX3Y13yv/W4+FokpWGGy8wR9jSLk8jESIsMmMuBU6RG6q1myUVKxMQZe/6n/nMrs3ve/9DQMOuOTvP1Tf4Fnfvuf///Z+/MgSdLzvBP8+RUed2RG3ldlHVlHV/V9ooEGARBoHKIIiVxqOBrNUNRyOKMZaWdtqVkZsWM2szSbNZpM1I5WMhuZtMOVSF3DkUiKIIXGQRAkCKDR6G50V3d1dR1ZZ95XZMYdfu8fb33lkVGZ1VXddZc/ZlFZEeHhEeHh/n3P977P+7wQyffwMfgP43+LSu7Qze0vwR3HThGS3aJ5O11vHxQd/CB8EBG6VdGQ3fdjkbJG8YPwSmoGuiOUQRBiWzpLiya2NcXQAFcEp9GVlIyDF7R3HVPEI1Oe3C7jimg014mi7UTE81u0O5voms765mlWN05IBAao1Zocf/fsXRex3lYisr6+ThAEjIxs/4IjIyOcOnXqmu0dx8FxnKv3a7Xabflcuq7z3NOP8MyTj3Di5DnGGEDTNA7sneTipSXaHQfXC8gYHstrb1GrNTEMAytl0mp2KOQzRJpOf+ERIkTl3H1OpuwUmbRNu+MQhREjQ2XCyKblWLgOtNsexWIG25ZtprOT2BkxmvIcFy/wmJl5gmIxtWsIc7fwZvdjvcR4t9fstjq/3nvsdJHsJlLt/VzX28f1og27pUN6/9/7eXrDzTs9/kGplRv5zjdKjK63j97tPhzB0fjgVf+tYk4hNWuIjHN5+7uHwUca8U3PResaUCMialbp+i8KuMFgR8SNRU4SfFR8mIXIjaZt7l3ogCXmfEEkUXJUOiy6cuaJhsk0swCYhnzRlJUliqTVQhh6O+38amQ4igKCqzm26Mp+Urheq/shgrCIaWbQopCOU6WydZYgaGGZJq7nUdms3XXr93uqaubXf/3X+bVf+7U78l66rvOll19kcWmNM7NzjI8OMD4+yMTYMBcuLUh9dhBgBgHpdArf92m12oRRSKPVIghC6vUfoWma+IQgjfAkmqKTSVuUSgXsVIqWU8CPJC0ThiHnLy8QaRF7JkZwXI/9+x8nnc0RRRGXFuZ47NgBfvqnX0TXdy+pU7cPKrnrfWy3Mj+1fa/Arlepr8777vLcXjFYrzfJB5GGm829fxB223/v59nt8d222enz3wiJ6UXv9+0WG3e/tldc1/v9Pho+zEGPrv7ViEgFLZ5f/0MG3KVrt/qIn9G3Utt2YRHywua3+eHQlwn0LL0RmptDyP2cnrmZFf9Oourd9nEj+1b3P2y0ohe7ncsfdd+3Sify4SMyBp7n0mp1cD0Xzw+IwghN17BMA8NMYZo6jUadiAj/qsJW/nH9No7biB/TJBKiaxoREWEY0mxVcL02UShp0yiK8IKO6BaJcL0WzdZ6/LmAamMex22hXdGLRKH0sLnb1u+3lYgMDg5iGAYrK9sbW62srDA6em3J3Ve+8hV+5Vd+5er9Wq3G1NTtUfSCpGl+6Re+zCvfepXZ83M4qxXGxwexLIMgFGWz47jU6y0q7Q5RFJHLZmi1pY8HRJT79tFXHKFUHMPQTXTdAE3H0DUO7J+iXm/TbLbJ50voiA3pcNmn3XbRDZ1CLoup9VPfDGm1XUbKz1LITPL7vy8D7a2epG8ldopefBiCtFtE40bft1evstv+bkRsu1teubesUFU9qPfoFjjuVi1xvcjK7cJO30PdwsgnZYdYqQg7G5HNhuTyIaHeodZpoJk+hhlimD4v/vt/QXl95fpv1oVUdesjzVZtM4WPhtVFR56vfIPnK9/YcXsfne9/4a+yPrIPz7EIOilc1yTwLIwwhRZZBJ6O64Gpm1fcYq8lnLudhztphW41dtMd9N66t91JJ9F7/3pRiQ/6Pmqb7n32HosbITu7aTF6bzvtM4p2fr36XLtdw7uND93YSeDb/doPg/i1KdLpgOWVKuCTTluYhkmEh+O28H2DiI6Y8HWph+V89HHd5rb9ZtIpcvks1WoDPwiwrDSmkabeXMEPXETuKREXP/BotjZYXD6OHzjb9iML5RSe56MbOo8ePXDXrd9vKxFJpVI888wzfPvb3+Yv/+W/DEhE4Nvf/jZ/+2//7Wu2t20b277WpOh24uDMFAf2T7CwuEaj2SafE6LxjT9+jdnzc1Q2a1yeWyFlWwyWSywtb1BvxI26NE1HNzIYRu5K2aBJEIUEIVQqTfpKBXxPIwxzGKaBrmvYRAR+B9u2SNtpmk0PXdcolYpMjA1SyOevXmDX8/zoHhA+aDXePQnudKGqv7035RNyvVWRem/1/50iJTt95pudnG9E8NZNJLo//07HQZGF7maBu5UD7zZI9Q6eu233Qd9vtwml28BN2et3N+ZTjRGV8Vk2GxugZTLx/7u7EgM02gHvXljFtgwsc/tJVqm1cSvdoVqNE5/6Ip/83d+6cu/60IEnvvctDr3xfd74S3+VxsDNDXIf/53fpLSxdlOVM246TXPfBBndJ5P3gTa6BrZlkLJMhvukHN7xAh7bN0I+c/2ISBTFpbyOI+65jYZY/7dacutujNbbsG034q32fb3zYafzZqfX9F4P1xNe9m7ffZ3s9Fl2er16vJsY7EQ6bnQSv52kfCdy1GuB0Gt/0G162Pvc9Qjg7kQw5N2Tc7hejWIph+fVcUMfQ9fJ5gw2N6uYps7W+hr1apOQ4CqZiK5ERjRNu/J5NNLpMm3HZLMmkgdTN+i4HbZqyzsKVFfXT11DQgCiKKTjuJiBwb694/wnP/u5u+4ncttTM7/yK7/CX//rf51nn32W559/nn/4D/8hzWaTv/E3/sbtfusbhq7r1wh1Dh6YYmFxjdnz8/zr3/k6E+ND5HMZvvntd/C8PGCg6wYZu4TnttmqzqNrJinbxjLEyKHWCAiCDgP9fbQ6NVot/2qduGEYPPXkMfpKearVOmjQ31egWMjuOOF2T4Y7pRh6J/VeK+befV1vn9eb8He6EHvNiNTfbvfEnYyLdnI63Sm6ANubanU3seu2mt7NsGgnrcxu2G1F3P1472qt+3j0ukQqi/ruzsq9nT97n0+nryUSqgPzrUK3s2oxFze7I4KOK7lpXZP7IdAYGWPpwGHGzp2+of1rQLbT4sX/459z/Is/w+q+gzf0up/4l/8ruXrtpkhICJz8zJeuMbvQNY10SqrfgjDE9cIbdlXVtNiGH2DoIy4Y1bncTW6Uk2qjsZ3cqHLiMNxOcHa65ne6htX7fZgozk4TdS8p7j13d7u+d1pk7PTY9SIWCr1Oqb3jxW5kr/ex6y0qul93K+D5Ae1mmbRdptnw8X0fPxCfHNM0sMw8ERGPzOxncXkNx3EIw5CIkCj08YMOmiYRjlw+QzaTwg9CLNPHMoEoJGvkabdreL5zpcomIopCgqANmouh69f0oJFrXaOYz/F//+/+c7LZNO+fvkg+l2FifOiukJLbTkR+/ud/nrW1Nf7H//F/ZHl5mSeffJKvf/3r1whY7zUoctJotjEMnVw2TbXaQouGKRUllFYsjFPMjwAykGvoaLp+pQuohWmZ6JqB49oMlIv4niiVPS+gr1TE90ucOrVFq+MQBRG64ZFONxnoL5HNZnYMU+406e802avBYqeVwE5M/trvf+379HoY9Nq4dw8M3Z0w1eDbPah2mz5dT7vRjRtdRfbe32lVp75j92qo28xJkYVUavtzO7ks9jbAU/brvURC/Sb3ArqdVWtN92q33nbHo+MFWKZOGEYEXQfy7S/8DN6fvsLUyePXKDQaZh9Zv4beoz0xo5Bj3/mPtEp/jUapzOiZk+x5+0cU65uEmsbW0Bg//sLPQCbD8KkT5Oo3J1D3TJN3Xv5LOxKdMIqu9p1pOT65tHXLXVVvFJoWn1OZW5SOV0RFeaE4TkxwFMlpNoXc7NSJtvf6g2uvwe7rcidCpLATgd8pitBrvtg7dvW2ceg1VdtpwbPbGKge6x1He6McvYuvbmLXnWbtXuzsNOb1kp56w2VxeR3XFbdiwzQIgwaO5+B0PLGDMAx8v49iwWDV8a58Lg3DSBFGbaIoIJ22SaUM6o0WmXQaXTchAt00CUOPgf49hIDv+xKt0sAyTfK5IcLQIYzkuVZ7Ez9YIp/L0lfKky9kef3HJ/naN3+A47jYdoqZ/VN86eUXb5vJ2W64I86qHxa3w0dkN4RhuC09o5jh3PwKf///86/QNI2T71/g3IVlDMPC9wMmxp7EThWJIv9qzbedKlIqDKET4YUiUEKLSJnmlf4CkMumeeTwXjY2qrQdR3QiV+q7680WGdvmyccPMThY+sALrfvx3olUXdDdEYPuNET3wKTMnbodIndqpd3r6LnbKqwbN3uG7TRZ7xRF6U5ZdDew26np1W7HTb1fb1O67qiEIhLqfe7CPHbbUG12WFirU22J7ikIQ5ptj4xtsF5tX2PzDoDr8vH//Tfpa1RpGQUWszOspSY4VXgWM2jz8/P/gFQUN+uIgPXxKfqWFrCia0+QCKgODJHdWCPV9fgfjP4y65k9HK69wccqX8OM4iqClm5y/It/mc09+3dldxriqprPpJgaLjE5VKSUS3+4A/UQQI0TKiKjiI36v+sKqVFEpzcltZOb7AdVrvU+t9uColuT1Y3eBdZOJKd7AdXbnLN7wda9/90it9eL6na/1jShXq/yb/7d12h1GhQLNmHo0+k0WFlbw3FaOG4LQw/55CceY229wuz5BUrFIq4f4LsBmuEShTprG1s4HR9NM9A06WlmWhZpO0M2m8ayLAI/ot7ooGk6lpUik87gOD6GLn3QTNPE81d46slJyv0FOo7Ln//gONNToxw5NE02Y9NqOywurVPuL/JLv/Dlj0xG7hkfkfsFZ2fnYsFqDzM8sH+CUjHP17/1qvSk8Tv4focIWFh6G8tME0YBYegThgGGbjE5fgzT1HFcB89z8X336sk5PNSHYWb57g9/SD5n8/ixfWi6BJjDMCKT17lwcY255XVeeOEL+L5+dWDoHSC6SUR3yqJX89Bb4QIfTB7Uc70pCLg2J9qdT1Xpg26ioJ7rJky9f7tf0xvN2aknRTe6BxWVDumOSnQTCZUuSiAo5dIUszbNjocfhLhewOziBrWmu+trMp0WegDnCk/RMAq8Xv4CdTvuZP3nQ/8JP7n6b6924NWAwcW563+OjbVt9xtajvP9nwA0Xk/v4Z3yT/LluX/EZOccAGYUsTl94LqsUEiqweE9g+wd6bsrkZD7CSqdalnSWPGjQo0tqkN299ilFjzdj/WOYd09d3YjOOp9eiMy3eNa9/i302e8HsHpfb6b8CiC022P3012Oo5FKX+IXDYgbaeIopBKsMTIwCSmZeB5AYahk89MUd5vYenzRBFMTY4QBiGVrTqLy+v0F+WLXMmUXv0euhbSdlYJgjaaGZDN6DiOQy5rYlk+tfoGGiF22iKMYGqyzPTUOBBy/N2zBIHPwQNTFPJSQlzIZzk0M8WZ2Tm+/sevcmD/xB1L0zz0ROTs7By/+dtfpbJZY3xs8CozPHHyHAtLqzx+bIYfvn6CzS0JGatupIau4QctIiJMI4VlZcik+8hnh8hmiuSyRTw/QsxtUhTyecIwIp/PU+7vZ2Fxk1w2T6cxQBilAAswIdIYHYDKKvzbfxuiafqOF9lOF9ZOudbeiwh2Ttl0T/Y7hUe77/dip2jDTkRit3mgl0gYxvaoRC+R6BVeJvho0DSJGgBEUcTCeo3FDdEt2ZaO43X5eUTQ3MwxZx/hUuZxThefBj27bX9Nq3Dte1zv/eEaZw/PSNFdZuuYffy7ff8Dj1b+hE+u/z5m0CRTr9Iu9u2636xtkUun6M9nEhJyF9CtmbpVNQg3ErXpjtR0E5rex1R0d6eUS+977qRz8bzdo726Dp5nUMhP4Qf+ldJdKOUzkIcIDV3TCMI2hEWiyGJ0OEe1VqfVSpHNWrRbbbLpfoLQJwp9wsjF9300TdKOETrZTD8pW66/drtDvdFmYnSI0dEBFhbXqDdaWJaBbdsc2DtO4GdpNdsE3hRHDhwlm96H52qY1jKaFqBpGuOjA3e8U+9DTUTCMOSVb71KZbPGoZmpq4NVIZ9loFziuz94iz965fv4nkY200cUGZgmDPbvI22XyOcGyeWG0dHQdINsph8QXxHZk4muW2L3nkqhYxISEQUmg+UxNE3DCy0MLOgqU5TXBvj+tSv4nbQg3emYnZpVadr2RlCadq04dTcycT0CobATkTDN3YnErRZeJrh10DSNQtaGiKvXg66Jw3unkcF3TELH5weDP0PDEn1UN1JhnU+u/N7VaEgvTheeZ8mapG7mSWnwhZXflvft2S4deRysvc7Z4se6HjU4UX6Z84Wn+dTSv8Jwd4/a6BoMlTIEUdxxOMH9j9sRtVGkRKWnlbZNEZudBPIqyrJTo0P1t90GoxZimCa+H+L7AaaVuzJf6Oi6RhimQbcIAwPbsshnDexMinarQypVJmWJR0gQOmxVL7K4+jar6+8TBiFBGJJJpxgZGSCTtsllMwwNpTl4cJyNyjyG1aTtVAhDm+mpfeTyAZ3OJnOL60Say8TEILqmoekdun2FMtk0zmrljpqcPdRERKpi5hgfG0TTNKLIwPdGaTRaXLzQIpM6hFWW9uloIjK1UyWKhVHJ05k2XDmpABynDmgEoU4U+uh6Fl030WwLIwgI0fB8B123aLUb+L7DwEAO09LRtBAI0PUQz3VxfZenH3mMQiG/LRS5U0lsFH04AqGwG5FQKY6diMS9JLxMcGuRTVsYhn41wua1U7QaNmGgUVvL06rkIGWynWuE5PxNfubSP6DfX9txvyE6Pxj+MlupOPf80tq/Jxe2rtk2EzT4yeV/jR12ONH3CSRiKGhZA7yy57+lb3Gdcr6Jlb7WgTKdMnG8iIHijVXJJHg40S0iTqfhVkgR43SUxf/2L97g7LkV9kxMsL7ZZu7yKpl0nkw2j9MJJEI+MIqGhuu4oPsce+Qop89Uubw4j6lbWKYl84hh0XG2UJ3dgxDSaZtHjx5geLCf5dUNHj82w3/zX/4sS8sbNJpt1tY3eev4Gc5dmOfi3DJ2ymL//n7QOtjpFcxU/ZrP3251sFPWHTU5e6iJSKPZxnFcspkrccPIRMNjcXmdZrtJGHVw3Bau28GyMvQVp3A9h82tSwSBRxj6V5dy8qPpOE5AJp3BNFL4IeQyWQYHBnB8U9i0H1EoDuL7mzRaHYaGxjEM5SIp7nj1VpvBgX4KV0p5u1MiO5GL3sqNnVIcvUTiQRReJrg1KGZSZFImjWaE08gR+BFuw6S2XsBtCflGp6tVjUe/s8LPXP5/U/I3tu2rO+Wylp6ibm2vg50tPMcT1T/b8XNkgxovrf470kGTE30v0TFK8ZOGxdbWGLW3PIb2r1AcaqDrchWZOoRhhGFod61KJsHDC11Xpd86f/FLT/Cbv/1Vzl9+i7GRMpp1gYsLGxiGQTabZnLPYdLZNaIo4uIVV+2/8nPP8R+/sc5v/c4/Iwylc7tlSurU8x00TbtakptJ2+SzaVZWKwyUS3zxcy9imubVlMojh/fy0otPbCvEGBsd4H/9//4eJ06e25YJAEnNLi5v8NixO2ty9lATkXwug22naLUdCvksmu7Q7Fzg8sI7EMHy6hquF1DuO0A2M4CdypCyCoRRiGmYVwY7g5SVIp2xicJIWjdfLcvysUwDTbMA7YqQKE3KMjBNjUzapNmqo5PFTGn4nsNWvUYum+LpJ0cZHdWvekvsRCQS4WWC2wHLSOHU+qhX27huRGsrS209B2HPcGEAgcNI+zJ/+fL/QjZs7rg/haX0fkJt+z4u545sIyIhQlzU0JgJGjy3/kfYQZMTfZ+gmprY9vrQs1g5PUFna5P+8SrZoqzmTF1nz3ApqZJJcFfR696dSqVA0zAMnUMH9lAq5ajXmywub1AuF/ni515E13W+8NkXGBnuZ35xDT3wr5b16rpOyjJptR0MQ6eQz+B6Po8dO8AXP7dz2e1OPlm97U0y2TTtVueaz3Gn8FATkYnxIWb2T21jhq7n4fuBmMf4AYXcKP2lYRzXw3E75LIDBEEougoDTMNgaKhEvdHADx2yGZN6rYkWhQyUc1QqaywsNwgCB9s26O8fZGHlPUp9af7iS09w6uwFzl2Yw3E97JTFwQNTV06ogbt9eBI8ZAgCuHABqg0P34d2PUNtI4vTSNGrBQFAi8jnW3zM+yqpsLkt+hEBjm3j2FmKtU0A1tN7rlGObFrbB8hasR8vX6C8ePlqN5h02OLxzT/BCDocH3uZqjHW+0GorpRp14ocOFajVHKx7JC+/N3tn5EgAVzr3t2dLjl/cRE7ZV1DJFKpFH/z//yz/L9+41/QajsEYYRpGoRhiOf5mKbBsUf289/9Nz/PsSP7btqIbKf2Jjt9jjuFh5qI7NT4ztB1oiiiWmuRTtvUm8sYZp0wCNkMLAw9h+M5hKGHho9la2SLMzzzzGGefPwQQ4P9rK1v8dbx05y7cAnNqtLZ2gRNoy/TRxDpHH1k6uqP/fJnn9nRvyRBgjuFKILFRbEvj6KI+SWHrfUU1ZUsbmeXc1EP6RtsMv7IKqfDL3HphRfJVdbpX5oHXaM+MMTyvsMcePs17OOvYzsd2maeXllqx+qPPwewfPhRzj/zIpnqJiPnz1CsrBHoBtXRCZw9BxkKDYJLHRpb10Y63LbJqR+X2VxtcvBQSOZwog1JcG+gOyqxU7pkp3H/F/7aTwHwj//Zv2NtbRPHEXF2Jm3z0see4Cv//S9+JMKwU3uTuzX/JIZmXOsjcuHSEr4fMDUxxBtvnSKKIJ1JYWga9UYbXdcYHRlg//Q4XhDw3/7yz/Hc049s+wG7DdKUBqXVdhKykeCewsYGrF3RlrouVOser78ZUN802a3oVjNCRiYaPPpsnbbr0XF9HM/HC64dSvKVdWbe+B4Dl87z1fH/G/PZI3RHV6ywzd8+/TeJgMroBO99+ks0yoNXn9c1SFkGtik9nAzdZHkuzdZyifpmiijYufyqVIJPf9pgehr6+3fcJEGC+wKu6/LKt37IqTMXKRXzfP4nn2fv9Pg9P4fczPydEJEr6CYOa+tb/Mevf4/KZo1Lc0ssLm9AFOF6ovnYt3eCx47uZ6NS47FjB/hb/9XP3fMnRYIE3Wg0YH5e/h9FQkIWFuD9UwGNa4X0V2FYIcWRCtNHNklZxhU/nZAgjMTwL4jo9cfLV9YZOXGSHzpfomJO0k1E9Mjlvz39X7Gx5wBnXvz0VRKSSUkzPsOQTta+H9J0PExDR48MwmaZufMZqjUNv53iWhhYFhw9CkeOwIEDoqlKkCDBnUHirPohsD10BmMjA7zyrVfxfJ9avYXruoyNDnBoZo904V2p3BVRT4IEHwWOAxcvbu+/0W7Dm2/C+ro8vyM0SNk+g1Nb9E2skzItDF3DC0I0pMutpkHkBRCEmLqOF4REQLM8yPtPfpH6e4PgbY+yhJh87z/9Zdp9/VdLuAppCzSNlGWgoRFFER3PJ4quNADLmgwM+9gph0uzNvWGS7OqyIiOiuR4HrzzDszNiT/E8DCMj9/a45kgQYKPjoSI7ILu/Nl7py7w+pvvsba+heN6bFYbd03UkyDBh0EQwPnzsbGdrsv/z54VgerWliInOmyLaUTouoad8xjes8ngRI0oMgkjsHQNWzNw/ABT1+nvT9NoueQyKUb6cpxf3mJtSyppnHaagJ3SPTrN0gC6FtvBB1GEqWs47pXme1GI54foV8hJMZtCN2FsSnLm85dSmIZLtZK6Zv9RBJUKfOMbcPiwREf27IFslgQJEtwjSIjIdaCiJFOTI3z+J5+/J0Q9CRLcDKJIUi6NhtzXNJmEV1fhxz+GWi1+7soW8f+MEA2dQsljcM86peEmlmlQyKaot9yrRME0NNquj67r5LM24wMFFjfqaGhkUhZBFOI5JlG4s55DC0xSqYCUIaQmCCMsU8M2dTpegOsJe8pnUgwUM9gpGbZ0HcanXNBg/rJBLpNifU3STL3odODECUlHffzjYly1f/d+eQkSJLiDSIjIDWKnWuwECe5lrK/LTWFoSISpb7whE3KtJg6Q18IgZQeEoU6+5LDn8BZ6tkHWTlHK2dgpk5RpUGu5OJ5PGIb4YUQhm+LAeD+L6w0cN6C/YBNGIW3HRw9NdiwBBrTIImvrBFFEIZUCIjpuSNY2KaZMLEOn5Xj05dJY1nYyo+kwNNbGcS1SYQY7ZbC5KRGeXgQBbG7Ct78NMzPy3YeHoVz+kAc4QYIEtwQJEUmQ4AFDvS5REIWhISEd586JZqLRgGp159cqV0hdNxifinjxEybpQoFzix4527pKBOyUyZBl4vqBtC0PQg5PDoCmUW11yKZNdF2nmLVxvRDH0dml/QyhaxNETUxDp1zIYJo61YbD/rF++gsZsrbJqcsbVOptiqZ+jRNk2/GZmbEwHJN8VhyDs1kpSb7mvUJoteDkSXn+hRckOpSIWRMkuHtIiEiCBA8IHEf0HgrFokzIy8uSllhakkm409n59bYtqRzDgMlJ+MIXNPr7U0SRxVa9cy0R0MAyddqOz0AhSy6Totp0rjgOS/TDTpkU7ByEu8/yjU2b0UmHYlaiLZ4fkLIM+guZq12BJ4YKNB2XWtMlmzYxDJ0gCGl1fOyUweRwgWJWPlcqJaRr3z64fDnWxXTD9yU68qd/KtsBFAowMXHttgkSJLi9SIhIggT3OYJAJl7VCNGyYGpKSMnly/Duu1IZEwtSr0UuJ1Um6TRMT8PLL8vEDGIt/UFEQPV0MQ3pKhoEIbop0RO3baNHpjSW3OG9Qy/DYMm/0ngyotXxKRe2N6sr5dIcmhxgYa1OtdUhdHx0XaNcyDAxVLhq5X7kCJw+Hf/dvz+umtnpuLVacOaMkLXnn5do0tTUrensmiBBghtDQkQSJLhPEUWi9WheafGiabK6r1SEmLz7rmhC2u14m15omhAO14V8Xl7/2c9Cpscd/UaJQC5tUcqmJXqSk+hJbcvY1i26F52WQQT4fnANsen9DMWsTbPj4QchpqGTS1vXbHf4sJCLo0fh/feFWK2s7KwbUR4q1Sp873tCQkCiQgcOJGLWBAnuBBIikiDBfYi1NXFFVZiakknz/Hl5/ORJiXBsbu6cmgAhHqYpE3GpJBPvpz4lqY2dcCNEYKfoSaejEwSi54ircgxU+17X0Wm03B2JTS80TbuarrkeDh0SMvboo3IsRkcl9bSysvP2nheXOK+uwrPPCplJxKwJEtx+JEQkQYL7CL1C1JER6OuTNEyrJVqQzU3Ri9Rqu6dihoclXeF5MDAgJOQTnxBicj3cCBHojZ602yFBiKRsQuBqOzsDiNCBY3uHd41wfFgcOCDHRZGR/n5JPS0s7FwtFIZyTHwffvhDIS+PPSbEZP/+3QlaggQJPhoSIpIgwX2ATkccURVKJZkoq1XRQqytSRoiDOWx3RxSdV1ISLMpJGVkREpZn39e0hG3Cip60mh7rJ7T2DJ1AkMn8HtJhobnGZRyBreIf2yDEqweOyYRDsOQVM3ioqSsehFFQs7qdTmGm5vw5JMSKcnnRcSbIEGCW4uEiCRIcA/D92USVBqLVAr27pX7Z87ENuaNhqRYtrZ2T8VkMlJJo0jI6KjoKZ55httCAjRNQwtTRAFo0e56CxWJ6NWl3Crs2SNamsOH5VgqMe/6uuhpdoKKmGxswOuvw+AgPPEEnDqViFkTJLjVSIhIggT3IKJIqj1aLbmvaZIesCwpw61WZVWvSMrWlkzmu6ViBq80tFVRgPFxEXM+/vjtISEKGxvyntd7jyi6vUQEJJKxtCTHcGFBUlC6LlGO+Xl2FNMqMlKryedrNCTNA/LamZlEzJogwa1AQkQSJLjHsLq6faWueqO02yLAdN24JLfTkYnS83bel6qkUZESXY+1D0eP3v7vsrYmKY4w3J2MKCJyuzE2FhOHtbU4FaWErbsdQ8+Tz762Jrb4/f0SHTlzRsziBgZu/2dPkOBBRkJEEiS4R1CrbXcDHRmRSS+K4olybk5W8L4vK/RWazebdokw7N0rHhlRJNGUoSGpCNm//458paufLwx3j9ZEkWgyxsZu/+cZGYnJSColBGNrSyIdp0/HEaheuK4QFxV56nQk1QNCUBIxa4IEHx4JEUmQ4C6jV4ja1ydRC5CJb3lZtnn3XZkQXVdISLu9++Te3y8T+9KSbJPJyMr9Yx+7s+6hKspwPR+RKNrZ4+N2YWgoJiOWJWRkc1MiROfOyf93QhDI9/A8+U0cR47zY49JiiyXkxTQ7Ux1JUjwICIhIgkS3CX4vkx8ikzYtlR06Lo8Nzsrz126JOkaxxFC0mzuXhUDMhmmUnF0pVgUL4wXX5SKmTsFRT407fpEBO4sEQEhZbouviIjV3pZVqsSQcpmhcDt9JlVGimdlvSZio4cOCDPnz4txz+fv2NfJUGC+x4JEUmQ4A4jiqSkVAlHu4WoEItRWy0pyXUciYIoErJbKiadlv3UavHEXi5LBOCFF2T1fifRaMhnvZGy4DtNRECOh67L8R4fl9+h2RRhb6EgUQ7X3fm1nY4IXn1fXu84cqyPHpXUmaaJmPVWlkQnSPCgIiEiCRLcQewmRAUhHpcvx5qQrS0hK54nk/r1qmL6+mTiUxUgUSQEZHRUNCHF4u3+Ztdic1MmctOUCXm3smKQ73c3UCoJGVlYkJTVyop8llRKBKknT+5uj6+0L9lsXB3Ubos4eHAQzp6Vv6piKUGCBDsjISIJEtwBVKuyclYYHRXyADKZXbgghKPZlJRMqyX3lUHZbitzEF+LsTFZwadS8rqhIXn86adjonOnsb4eR310/frpmd0m+zuBQkHSKfPz8rtsbEhUqdkUo7fjx3f3GwlDIS62LURLGaUNDMAjj8gxWF9PxKwJElwPCRFJkOA2ot0WjYdCtxAVJGqwsiIT2uysTGpqUlb/320CN03RJhQK8tp0OiYh+/bJit62b9tX+0C0WnEUR9ev77nheXKzrN23uZ3I5yU6dfmyRDAMQ27LyxJRevddiWbtFtVxHHmuUBDS4jjy/Q8flmjU+fNCCKemEjFrggS9SIhIggS3Ab1C1HRaJjo1GXuePA+y+r58WUpYPU+2qVR2tiBXKBaFbESRTHLpKz3iBgeFnDz++N2b1BVcN640+SAEgZCWu/mZs1kRC1+6JPqRTEZIw9ycHM9z5+R32i065fvyG9p2rB3pdCQ6cviwEJNEzJogwbVIiEiCBLcQqspFGXRpmhCD7mZyi4tCPsJQJrd6XSYpw4ib1e0mSAWJqExPi4ZkY0NC/pYlE+mRI9JX5W6LJNXn1zQhV7tpWxSUzXuhcPs/2/WQyQjBu3BByJ3qanz5sqRXsln5za6nG/F9+U2KRfl9XFd+36NH5fWJmDVBgu1IiEiCBLcIKyvbPSimp7fblisxKsh2y8ui/1AT1+qqbLPbpK1pojvo65PVdrstk2Q2K6vwo0fldi/YjjcasYma+v/1UhKKiNwLsG0hHefPy2feu1ceX1yUtFc+L5GNSmX338p15bctFOT3XV6W32twUJxcDUPErAMDss8ECR5mJEQkQYKPiF4h6tiYVGMohKFMaqrK4uJFiWa0WkJAokgmOdfdfWLL5cQ4yzTl9cprpK9PVu6PPCK3e0V/sLkp39W24+/U/dl6xatRdP1U1J1GKiWRrHPn5DjPzMRmZ9msNAo8cULI5266Ec+TcyOVEkKysRELkh9/XB7f2JDbvn13V8+TIMHdREJEEiT4kOgVovb3x+ZYCkqMCjLhrK3FfV+yWblfq8lEfL2GdY8+Kmmbs2eFlNRq8n4qHXPkyG35ih8am5vbxaea9sERkeuZtN0NWJYQkNlZidYcOiT9ZTodiYY8+6yU9+6kG9E0+T2DIHbA7euT76iccstlePJJee2FC4mYNcHDi4SIJEhwk/A8iXAo4pDJiBC1ewLpFqP6vugCKhWJgmSz8vzCgkxI1xNzTk/Lvlstec9SSTQl5bKsso8evXN9Y24GqiGfKlm9kcn1bnmJXA+mCQcPCgFsNoXwzc7K95uflyhVoQCnTm2P6KhUlDpH1O9s25LaUdqRV1+NhcVKzDoxcfe1MgkS3EkkRCRBghtEGMrq93pCVBCCUa/L/ysViXpsbsqkVCyKFkQJUneLgti2REFsWybouTkhIZ2OREKUT8Xk5O37vh8FyvpcHRtdv76hGUga416EYcRkpF6XKMmlS/K7nD8v91MpiZZUqzGx7P1tXTdOzw0OCrFZWYHXXpPf8/nnZf8LC4mYNcHDhYSIJEhwA1he3m5DvndvXDKr0C1G9TzRfWxsyOOFgkxACwsygV0vClIqiSV7qyXvub4uBEb5VIyOSjlotx/JvYTuNIVKtyhNi0JvhCSKhJzdqzCMODVTr0uUSumCTp+W51IpiZbs5DeinGXDUAiIakSYzQpZ9X340z+NtSNRJMSnXL6z/YESJLgbSIhIggTXgep+qzA+fq1dercYFWRVrIhLFMlkoqIiyi11N4yMwCc+IZPZ2pqE+2077hK7Z49MegMDt/qb3jrU63FqQnXf7a3k2SlVowjavVD1sxN0XQjg6dPyWUdGJOKj6/KY8m85e1bSNt2ELAji3jQgJNP35bcdGREStrYm0ZFyGT75SSGxlYrcEjFrggcZCRFJkGAHdEc3YPeVaaUipAFk4llelgmk2RRxoqZJFESRkl7tgIJpSij+yBEJ18/NyWNRFEdT9u0TEtJdkXMvQqVYUqnYb0N9l92gqmYcZ3vJ870GTYvJSKsl54WKjF28KGLTxx4THcjZs9t1I74vpEXThJioVA3IbxxFcXTkm9+U/ZTLQnqUr8n0dCJmTfDg4R5deyRIcHfgeTLJKBKSzcrE00tCPE8EioqE1OsSFVlakglmaEgm4fPnP5iE5HKSipmclP1dvCirX9XRNQxFo/DII/c+CYG4Csi24+iPYXywqZnn3TteIteDIiMQR6xmZoRMKH+Xgwelz0+xuD3CE4ZyS6VkP2Eo545qcDg6KsdhfR1+9CO5qRRcpyPn5r2cwkqQ4MMgiYgkSIBMCJcuxZqG3YSosF2MGgRxRUyzKSkTw5DHlEC1uzS3dzIeHISXXpKJp14XElIqyb6GhuTzHDkiE9u9HCnohko7KK0D3Fi6xffvDyICcn4cOSLEwHXlN3/sMelJs7kpv9vEhJCU996T31fpRqJIXqMaFEZR3OQQ5Pd33bjM+z/+RxEuT05K+mZxUQhPImZN8KAgiYgkeOixtCQiREVC9u6VFW8vCWk2JQqiSEi9LuH35WWZRMfHZR+zs5KeUavfnULpmibb//RPyyTV6UgUpq9PJqWhIZmYjh2TCe9+ISEg38V1hXwYhnxXy9pOwnY6JveSu+qN4vDhONWytSXeIiqaNTcnv+Mzz0jKprf7riIwSvuhDNAqFdnf8LBsU6nAG2+ImHViQo6rErOqiFyCBPczkohIgocW3WZjsLMQFeKeMGpFaxiSclFRkKEhmWjn5uJJBOKJt1ecmk7Lavb554UANZuy0i0WZSIeHJTXHD0qmpCdojL3KlS3XVWmqtJRHyRWjSLZfreGcvcyDh8W8un7cj698IKITpWIed8+EbGm0xJ169WNhKEQTceJUzUqOtLXJ9ursuCvfjU+L9T5VqnsXMWVIMH9gvtoiEuQ4NagV4h6vX4f3WJUkAlBNa0zTVnptloyEalISW86pht9feKmOToqJGRzU4hIJiOTUqkUh/1VT5L7CapiBmKflJ2IyE5QZOR+xMyMCEodR9Jyn/gEvP66PHfunJjOHTkiv+/Jk3LOKMIahkI2stmYyHU6cQO9XE7Oz60t2f+PfywE5wtfkMcaDUnpJWLWBPcrktRMgocGuwlRdyIhrrtdjGrbspo9f17SLuWy3BYXhYSoShGVitmJhAwPy+SRTstqVglbTVNu6bTcDh2SSet+IyEQCylV6a6qClHpBIXuybL7//dSv5mbxb59ck6BEIOPfUwErOm0nDeGITqP55+Xc6c30tVqyWOZjBwT3xcivLkpx2VgQNI7tZqcg3/wB5IWVM66iZg1wf2KhIgkeOChQuTnzslkqOsi/uy1ZVdYWJDtQVIu1Sq89VbsJ7Jvn7zuwoW4WV03+ehd1VuWvNfP/ZyQj1otLs8NAin1BHFM3b8/1h3cj1A27ZYlE2kQxCWrN4JW6/Z9tjuBPXvi3/PcOdGMqMaEly/LuTEwIOmbsbGddSOuKwRGRZEaDSEeW1uyn8HBWMx6/Dj84R9KWlFVdi0uCiHpNpBLkOBeRkJEEjywUF1tz5yJtQfKi2OnaEOvGDWTEUGgKsEdGpKV7MqKbFepxBOsEiz2pmRU19yf+Rl45x2JEszPy8o5CGR/nY5MStPT9zcJgTiiYVky6QZBTNJ2S7t0Hy9V+ns/Y3Iy1hrNzsJTTwl5SKXkt+904pLt6WkhF91NAX1fyEc+L8dR1+X83diITfEGBuL+NJUK/NEfybmqzu0okvfu1kAlSHCvItGIJHgg0avtuF4jsV4xaqEgq9eTJ2VCsG2JVDiOREFUi3uVblCiTNg+iQ4MiJfEzAy8+aZMICsrMQkZGJCJd+9e0YxMT9+WQ3HHoEzJwjAmeuo4+f7u6Rj1/ygSEuj7cdfe+xXj43IMNjeFIDz2mEQpFhflHCiXhaw8/7ycD6rDrzqfwlDOjUxGbsp1dmtLSEm5LEQlk5HtPE/KhOfm4HOfi8XTm5tyS8SsCe5l3LaIyMWLF/mlX/ol9u3bRyaT4cCBA/xP/9P/hHs/yuIT3DdQUQ1FQgYHRW+xGwnZ2JCIiVq5p9PiBXHxoqRkBgeFMFQqst+Njfi1asLojYLouhCLL35R/r77rkzQa2uyfxWer9XEq2Rq6v4nIRCTELW6VxoIy9pubb9TxYx6rNm8/0p4d8PISGzFr6IVyptma0vOJd8Xj5CnnxYhq2FsF/aqLsZ9ffFzrZac35WK3FfRkWZT9vu1r0nK5vDh+Ly/eFFI9P0ebUrwYOK2RUROnTpFGIb803/6T5mZmeHEiRP88i//Ms1mk9/4jd+4XW+b4CGF624faHM5CZHvluZw3VgHAkI4Tp8WHYiqYjlwQLa7eFEmDc+TyUBNtkr/0J1yyGRif5Bz52SyqNdl8lAGX+WykJzDh2Xb3Sp27jeoihlFLNSxVz1WrlcRo7ZVXXt3I473G4aG5BxZW5Oox8yMkNH33pNzwnUlLTc2Jt/57bfFV0ZFkJQVfK0mREXZ4LuuRDpcV7QhhULcqdn3hTQvLsJnPyvPnzsnrzt9Wt7rfnDoTfDwQIuiO8eR//7f//v8k3/yTzjfPQNcB7VajVKpRLVapbiTwUOChx5hKARE+S4YhqRRdqs4iaK4Ay7EEYrTp4UcdDoSxUinhZCoyAjEpENFQXon1r4+IRef/rT4SHieEBG1nWlKOL1eF7v2iQkRqD4oOHs2dqdNpeR7Li3Jir3RkFW8InOp1PYKGRU9yedFT7Nnz935DrcL3Z41Bw7IsXnrrfg8mpqS5zxPyIiqqFJkBOT/uZycm93nVTot515fX5ze8jw5xtmsRNs+/nGJlqjPcD3n4AQJbgVuZv6+o6dhtVqlXC7v+rzjODjK3hL5IgkS7AQlRFXCUvjgDqXNpuTNFUZH4cQJiYK0WjLIT03JRHrpkkycrithb9XCXaVweknI8LA4aD7+OHz/+/L82lpMiLLZePI9elQm2gdl1a+giEW3cFelaLp1NCpa0q0NUbifbN5vBv39QmSXliQ6sW+fiFVff12Ow+JiXEXz0kvSY2ZuLnaoVZEmFfEYGpIonfIcWV+XbUdGRHviunK+12qxNuUzn5H0kOoUPTsr5EX1skmQ4G7hjlXNzM7O8o//8T/mv/6v/+tdt/n1X/91SqXS1duUWiYkSNCFSkUGV0VCJidFB7IbCQlD0YEoEqImhVdflSqGZlMG45ERmUxPn45XpIqERFGcilErVJCJY3wc/sJfkJJgRUIqle2rfNOU/R0+LJPQg0ZCQI6dMi9TKQVlYd5NRGA7EVH34drtHiSUSnKugETxTFO8RpSIdG0tPm6f/KTY+xcKsp2yywchHpWKnMfZbCwG3twUkbXrSopQ6UqUdfw3vwnf+55EQlTEaWtL0jj3s39LgvsfN01EfvVXfxVN0657O3Xq1LbXLCws8MUvfpG/8lf+Cr/8y7+8676/8pWvUK1Wr97mupevCR56NBo7C1GVb8NOUGJUpesYGxOicfy4rCJtW8iBZQkpUU3L1EpeeWGoCEi3N0OhIGmgn/952f7HP5Zt1et1PS7jjCIhKgcOxKZXDxLUytww5BYEcdv73j4z3WLMbjLSW4H0IKJYFOIMkvYDqZwpFOKuu54nJPvpp+XWbX6mqolU2k/XYx2KpgmpXliQaySblffLZOJoyrlz8O//vWx35Eh8fiqzvgf52Ce4d3HTGpG1tTU2uksHdsD+/ftJXXHqWVxc5NOf/jQf+9jH+Bf/4l+g34jX8xUkGpEEcK2wNJ8XfcX1/DZ6XzM2JkTj0qW43HFkRCYA35dS3WpVHrft2J7c9+OJtTsdo0jQJz8p5GhtTd5TCTZVft5xZMW7Z4+QkPu9LHU3NBqii8lkJM3V6cQr/P37hfxtbMh925aJVZWrQnyMUyn4qZ+SSpIHGd1tBqam5Bw5fjyOpPX3y3EqFiU6p1KISsTaTYjTaSErqsOvOmdzOblOVDRORRA1Tc7NsTHRM/m+EBSF0VGJpiRI8FFwWzUiQ0NDDN2gzH9hYYHPfOYzPPPMM/zzf/7Pb4qEJEgQBBLCVoOuaUpa43rW51EUp1sgDlG/9ZasEh1HBvd9+yQcvbws2yvxaioVk44wjAdxBcOQFehzz0nPmB/8QCZUNflGkQzySuyqcvAHD96flu03CkXA1HdUKQHLkmO4U1qm241W6UlU35UHHUpEeumSpAwnJ8X47J13hMBtbMSVLQMD4tB6/Licq563XXfTbgsJKZfl//V63Dzv0iUhNXv2SNSk3ZbzW/Wn+Xf/Dj71KSHVSlC7vCx/EzFrgjuF23aaLSws8OlPf5rp6Wl+4zd+g7W1tavPjSbqqATXQW9lC8iqutcOuxeNhgzUCtPTkpZ57z2Jgvh+vNoLgrhHTKcjq0+VGlCrTk3bTkKUvfbnPy/RlO98R17T7X1RKMjnrFZlxVkui0DwQefgarWtjpdtxxEO09xOOrrdaLuh0lmtVmyE9iAjkxGjsYsX5bwdH4cnnoijH5ub20m4aqQ3N7ddjxNFQrDX1yVaODoqr48iOZa+L9tPTcnv0enEv1etBt/+tpzPn/60XBvnzwvZmZ0VMjQ2dneOT4KHB7eNiHzrW99idnaW2dlZJlVS9AruYMVwgvsMGxuyIlSYnLy+BgRk0jp7Np7oBgZk4H71VRmcHSceUFsteY/z5+OVYzodh7RVFERpQxT6++WzvPyybPPnfx7n3dXkq8SBlUosSL3fLdtvFKrYrbvJnXJY7dV9KIKxU3NATZPfxXFkon7QkU4LyT5/XipbwlDSUpmMRAOVo2p/v5CJT31KynvPnImfs205XkqU6nlCOlTVl+pLozxLRkbkOHc68luoc/h3f1dSjQcOxKmjalVu09MPx++R4O7gtq05fvEXf5Eoina8JUjQCyVEVSRkaOiDhaggROPMmTi0v3+/rC5/8AMZiH1fBt+JCdn+4kV4/30ZmJWfhSIhIPddd7t3w/CwrFR/+qdlBfnGG3FIXJGQgYGYhBw8KOmfI0ceDhICMql1f1eV2lLaj2732e6Kme4SXvV4o/FglvDuhlRKJn+IIyEHDojXjIpqbG3J8xsbkhZ8+mk5LxXRS6XiY6k8W/r75abSK82mXBtnzsjjpZKQGBVZqdUkyvfNb8rjR47E6aFLl+KmkQkS3GokGcAEdxWqf4tCoSAh6g+awHvFqBMTMnl997tCBjxPyMDYmBCGWk0G4FotLm9UglTlDQLb9QmplKRinn9e/EGUW2UYyj7a7VhYqKpllJ38vn237hjd61C6DmXlDrH/hSIiEEdAdkq5dOtGmk3Z34Nk9vZBsCxxXVWN6sJQzmnbFm2I0jyVSkJWjh4VncmJE3JOdvfnCQIhL0Eg18DERHzeKo3I++9LhG9wMI6O2La8j4qOfPzjss3QkJAQzxPR8cjIw/XbJLj9SIhIgruCIBAioSYpy5J8+QcJOnvFqEr9/847MkCrdMvIiAyyQSB6k5UVGZwNI3ZTDYK4isN1ry3N7e+HL3xB9v/DH8p7+r5s2+nIfkolIVONhqxg+/ri8syHBY1GTPw8L/bCUIRDHVe1mla6BkVMekt4Pe/hiogomKZE086ejSuOhoclAvL223L+6bpECZeWYs2Haco5ro6ZKpfutoHfu1euj3Y7Lg++cEHO16NH5fpwHNmHaQph/+53JdL32c9KinFrKxayKjHrg1oFluDOIiEiCe4oeoWomibRgw8SosK1YtR9+yT68ad/Ggv7SiUR66lKlvfek0G33Rbi0G22pWlCQjqda0tz9+wRJ0rblnC1ErF2OjJgFwpxeW67LZGQgQEhQA8blPBRCX6VwVZ3RKRXI9LdHA/i9ADEeoeHEYYRkxFl4z46GlfNqGOdzUr6pVSSyMVbb0n6RAl9VRm1qp7xfTmvXVdIjhJZB4H430xPyzmtoiPZbNwL5/d+D158UYhPqSQExnUlSlIsxiZtCRJ8WCREJMEdw/q63BSmpsTr4IOgKly6xailkgzMq6vx4DwyImFk35cV2+KiDLZhKKtIFc0IgriktNWK38c0ZbB+9FFJxziOrApVNU2jIa9V4WwlBDx0SFauqtPqw4ZGI05xgRwbzxNiYZrb7d4VUenWiyhSAnGk6mGGYcg5deaMRCHCUCb7Z56Rc75alceyWYlcWJbYwheLcp0oUp7JxF2PVcfeUklI9uKiHHPHkeN99qwQnkceiatmFFGv1cSRtb8fPvc50WG127EnT60m+3wQjfoS3BkkRCTBbUe9LlEQheFhKWu9EXSTF02TPPrSEvzZn8mAG4axFsR1ZQA9flwGx3Y7Dl2rKIiqkulddWcyMtC++KJYa6+uSv5dTZyVivx/dDQmNGEon+dh72baG71QpbtqIlOC3utZvCs8DO6qNwJdl3TI6dNyLodh7DXy7rsiWlVkBOR8fe45IfanTgkRVy0JlBdOux2nbPbtkzSLItetllyj1apEZFQJe6cj15fSVv3e7wlJ37tXooDLy0KWLl+W99m//8Evu05w65EQkQS3Db1CVEUYbqSSpPe1ExOy8nvzTRl0Gw0Z8FQUxHGELFy8KFEQz4sHac+LV3gqZN1bmlsuS2nuyMj2vjSGEYey9+yJxXyplEwMNxrVeZChKmYUIbHtOCKSTm+POoEc0+4qGbi24uZhJyIgx0SRkUZDIhDT07HXiOpmbFlyTOfm4LHH5Ji/955EPVREpDvVoq6H4WFJxywvx1GTIJDXTk2JduT06bhjcjothOTVV8WJ+OWXhZgPDkqaxvfl2knErAluFgkRSXDLEQQyMCndhWXJCuxGVkpRJAOqmryyWZnwL12Kw84gIruRERn8fF8Gz2pViIJlCTkIQyEoYRgL+Lq79RqGpFP27ZPQdi4nNuXKJdQ0hYToumyzuRk7sw4NyapQNSx7WKHMslRjO4h1IaYpxE89vlP5bvfj3XDda4WsDyO6yUi7LeR83z4hHO+/L2RjeVkIgWEIEZ+ZiR1tFxfjMvNsVv6vdCNRJNfRvn0x8VbE5fx5IfZPPimER9eFcPb3y3XmuvD7vy/pogMH5DNWqxKtTMSsCW4WCRFJcMvQW9FyM0JUuDaFs2+fTGg/+pEQgmYz1nGoKEijIflt5T2Rzcrg5/tCZjRNyEKns90lNZUSEvL44yIE1DTRg6hVfRTJe9q2rA4rFRnEh4Zk8L4Rp9eHAcoMq69Pfh9FHHxfjk86vb0aSaE7EqKEqt2CYVXdkRzjmIycOSPn/OyskI2jR+Vcv3RpOxm5dEk0Jfm8RE4uX477K2WzsU5K6accR661tbVYk9JqyTXw2mtCuB95RFI+rivXYC4nxPz114UQff7zkp4sFoUMOY4sRgqF2MMnQYLdkBCRBLcEqj+Gws2kLHrFqIODQhLOnpVBtVKRAVb1bVFmWWfOyHP1ugzWhYK8Xk1ipikTWau1XcdQLMog/dJLkudutyXcrKIgzabccjl5v0pF7k9MyOtmZpIeHAq1mvzNZGLTLVUKnUrJbaeIR7dviEJ3BET17kmIiKA7MuL78vfwYdFzpNNyf3lZiHIqJZGQgQF44QU5Z8+dE62V0n6o49rpyGMqVZPLyWJA6UPCUPZdrYqJ2rlzQj4bDXmvSkVIx3/4D/L8zEzcx+nSJbk2T51KUpgJro9kOE3wkdAbxbjZ/HA3gVFiVEUMNjaEAKjoxdCQDI6djmhFWi25ZTJxEzAVeras2C5crbR1XbQgQ0PSV2N4WAbn48dlEsznJaSsQtD9/bLqq9dlVZjJPBx9Y24GSgCpKmJULxNNi8Py3VUxsL1qphe97qpJ0+3tOHxYSHs3GZmaksjdiRNyPQ0NybHf2JDJ/5lnJBJy6lTcwTcI5LlGQ+7XavJYf7+kVObn45SO8uKp1YT4jI/LIqHVkt87l5PrqDs6kskIyV9ZkWtobk4WEwcOJNdPgmuREJEEHwqdjoRgFZR/x43m9HvFqJOTseJ/bk4GL9MU4jAyEkdBLlyQnLUyF8vn406vyvApk4mrBBRSKRlkDx6Ej31M3uv8+fgzlEoSwlaW8Om0rAIbjTjn/rD0jbkZqLSLOi6GEUeoetMtCjs93puq6W4kmGA7ZmbkvHUcuV4OHxZS/fTTMRkZGJBzttkUwqBErKq7b7stxzef396RtztVU6nExmqqs7QSsr7wglTvNJsSCRsbi3vb/MEfiKD28GG5dgcGJJISBBLFvJmquQQPBxIikuCm4PsygauJJJWSaMGNrnJ6xai5nJCQeh2+//1Yi2HbcfRCuXW+9VY8sFqWrJZVPltZXKtUTLceJJcTovH00zJAmqaYOFUqMunl8zKwR5FUJUSRrP6aTSEupimRkISEXAsV/VC/vxKq6ro8rmz0e7FTrxmI96MiXwl2xr59kvpot+PISF+fiEvfe0+iIeWykPIoEiJw+LAc35MnJVLRbMbNBR1HfgtlguZ5srDIZCQ64vuyTRTJ9b+xIfoqkGunVpNtMxkhL2+9JaTjc5+Tx7rFrKurckt0VgkUEiKS4IbQSyBUg7mbUcX3pnH275dJ/t1342ZfKnKhOoSGobzv/HysG8jn5X3VKi6K4s6gKtSs0N8v23/iE0Imokg656qBN5ORAR1ksKzXY/fUgwfl88zMfLRj96Ci2+peHXPbjnUIsLM52U5lu+qvipaoqpkEu2N6Wq6LRiMmI/m8kG1FRkolISSeJ+mUAwdkm+7Ioyq1zmTic1+R/3JZrtOFhbgsXnWpfuMN+QwvvSSp0nZb9rdnjxAOx4E//EMxCDx6NBazXrok73P+fCJmTSBIiEiCD8TqqkQPFG7WRXEnMergoOzzxIm4LFaZig0OxsZLb78di+MMQwYyw5BBz3FirwrHudYltVyWVd1LL8X6ku9/X/Zt27Ld/LyQmmPH5P9BIIPtgQMSSdmz5yMfvgcWirSpFvUQ+4moKIkiIr1mZqqnUK9gVRERz0uIyI1gclKEqbWakJFDh+R6ePxxiXysrQlxmJiQ6+PcOTmnn3lGSIDqa6OOeaEQ66pUia/nSdRT9ZpR0RGQqEe1KpVnq6tCWNbXZXxIpyXy8s47cv2//LJc43v3xqndRMyaACCRDSXYFbWaDBKKhIyMiADtZkjI2poMdkrQePCghJCPH5cV1dKSbNfXJwNUf79su7go4retLfkcti0kRNfjklHlF9JubychmYwQj6NHpWnd0JCQHUVC+vpkIF1akm0ff1z0IUEgA/D+/bJNQkKuD+XJ0h3a766SUT1nerGbo2p3Jc1Or0uwM8bH5XwFIQZBINfGo48KEW82JQqhrtvLl+UYP/64aEfGxmR7XZfrqFiU+5Yl19rmplyPhYJcE6mUbOs4ch0uLko/plYLPvlJuSaDQMaN/ftlu81NiY6cOCGfIZ2WsUQJ2+fm5LPvpClK8OAjiYgkuAa9QlRVNnsz2EmMms8LMXnvPSEYnieDVl+fREFAJqB33pFVlioNLRRk8HPd7ZOfpsXW0wqFgtyee04GWcOQgVeRobExGZQbDdnuyJHYfM2y5HkVsUlwfSjNgK5vFwb7vkSkVPpsJ2HqTmREPa5s3hPcOJSHyMaGnOsHD8r9Y8fk91lclKjEgQPyWy0vy3V37Jj8TkrkqppFqnSbpsXl8J4nUcaZmbgaJghi0nj6tJCe55+XBcz6urxPoSD7W1yU9zl/XrQj2WzcJXt2Ni7JHxp6ePs2PaxIiEiCq/B9mZTVita2JQd8M+V2USQTv5qYlBjV90XAtrYmA1gmE7ujqtLbSkXK/1xXBjTblpthCHFwnNitUw2Y3W6e/f3yfi+9FOs6jh+Pe9VMTckA2enIYLdnT2xNXSjI4Dc6Gq8uE1wfKu2iBIemGWsLDCPuOdNt2d6tBekmJN1pHJWeUa9Nyj1vDENDcqxUFFL53Rw9GlvAz87KeR8Eshhot8WszDCEBKysCLlX5CKXixsaqlSNSvUUCnE6UwmLL1yQ6/iRR0SX9frrsfHgzIx8hmoV/uiPRNPyxBPy3ocPy/suLsrnX1tLxKwPExIi8hAgDEMWFtdoNNvkcxkmxofQu0b3XvLwYYSosLMYVZkrKVOkIJABrFSSiV815Dp5MvbsUJ1Ds1n5v3pdKiWP1+vbV+C2LSRkfFya1g0OygT2wx/G7qojI7Ia830hJH19EvXxPPkcxWI8uCa4Mah0jLLPT6djMzP1+6lOsYow9kZDdvIXUY85TuyWm+DGMDAgx3FlJY6AqNLzdFoeu3xZIn8qvdItdJ2djU0ElS9PqSTXXBDEfiOuK2W4e/cKGXEcuRmGXMdvvy37+NSnJAWr0jvFolyv8/Oy6Lh4ET77WXnvYlGuv24xaz4v12VSsfZgIyEiDzjOzs7xyrdeZfb8HI7jYtspZvZP8aWXX+TgzNTVEKvC9HRcgXKj6BWjqtCq40iZrLKOzmZjt9LuKMiJE/GqSQlSLUsGQdXcLpuVv9VqLJRTpbeFgqz6nn461it8//uyf9uW9zx5Uj6fGphVQzDlJpm0Mb85dDry+2QyccRC/WbK2My25f+9otPdJhUVFVHPK6+L5He5OfT3y7FcWpKIn1oQTE/Lb3LqlDw3MhJbvp8+LemcXE5us7Ny3UKcxmy1ZL/Kw8d1JVVz4IBcx6ursbi105H3rtWkpPjAAYlOtttCao4ckeiJ58HXvibv/dRT8tvv3RundlVFkErtJngwkRCRBxhnZ+f4zd/+KpXNGuNjg2QzNq22w4mT51haavP5z/wFxsaGgA+fkuiuqNF1Cb/qeiw+UzoPVbpXLstgqAaqbj+DdFoGvCiKNSQqCuI420PGui77zGbFXEmFn6tVWYFFkTwfhjKo6rqEix0nriQYH5fPsm9fXEWT4MbQXTGjwvLptJwLyipfVcZ0R0RgdyKi9CHKi0SVaye4eZRKcjwXFyWyoBo0jo7KNXXqlFx7atFQr0s6Z98+0Y2oDr7K16fZlMdUawPHiT1igkBITT4v0QzPk9/OtuX963XZ7+c+Bz/4gVyjly/LuZNKyf9Pn5a/P/mTMgbYtpAVNb7Mz8di9yRV9+AhISIPKMIw5JVvvUpls8ahmSm0K6N/PtvHvj0zLC1v8ObxU/xnhwcYH7/5K3s3MWq7HXsYbG3J6iqdloHKsmRC2toSQarryqCkIhsqtF+rxdGM7lRMdzffclkGrI9/XAY5kEHv/fdlcJyakkFsaUn288QTQkBUqmBiQvaTdAj9cFB6gXQ6FhBbVhytUlUYvWLV66VmlEOueixxV/1oUFVm8/OSAlHRznJZhNynTsWlu+PjMuFfuCDX8iOPCOlQ5fVKFK4iluoaVdeTStVMT4tAVXmOmKZENc6ckceee07unzghf0EWEcoU7ZVXJILzzDNyHgwPC1FSEdczZ64Vk39Q6jnBvY+EiDygWFhcY/b8HONjg2iaRhRpBN44ERqaBuV+m8sLrxOEh4CRG95vFMU5XIhzuBBbptdqMgD29QlZKJclctFqiYZEvV6tmrLZuGlasymDXT4fp2K6tSvZrAyE09PiXaDU9SdPxqXABw/KCksRoaeekoFYdRSdmJD3UJUFCW4eyudDtYfv7roL8cp5p7RMr6FZt2C1e3s1mSX48MjnhZTPzcl1p1KQhYKU9546JYuGIJBran1diMvQUNzS4MIFIReqqWGjIde0ilDW6/I7qVTN9HTsoqqqanQ9FrLOzEhZ/fe+J/u4cCHEsloEkcvaWo4wtFhY0Pn0pyWyo8Ss1WrIuyeqzC24ZNIpnn2mxNz8wnVTzwnuDyRE5AFFo9nGcVyyGck5RJEtJIQIw1oiY3g4ay6NZvsD9hRDqdoVVO652RQiUKnIYFUsyuOqIkYRihMnhFRUqzLh5HIyIPq+RC9Ux1aVium2atd12W82K6u5J56I7atfe00GR00TQ6d33pHPVCoJCVHty8NQVnu6njSv+6hQjewsKyalYRiXfHYTvF4ystNxVyLV7gqb7rLsBB8euZyQg0uXJP2hopeZjKRhTp2Kr7/9+4WYqH40hw/HJfGXLglRSaXkGu7Wjah+NUEgi4PBQXmP8+fld1QN8jY3JWpZr0tVzY9+tMEbP27QbLYJgoAgWmRz6xEGB8t84xtZ9u6VKMrsuVjrFgXDpFJ5vvXtDJvVdTru3DWp58WlNX7pF76ckJH7BAkReUCRz2Ww7RSttkMhn0XTHExrCU2TGsl2q4OdssjnPliZ6nkhP3q9SqvtkrYt+ko+ZqrF8kqGdmuIuTmdWi12M83nY2v1RkNy0ap9eaMhk1c6LaSi2YzD/Co6Uq3K5KbKOU1Toiu2LQ3rVLmh54koVRGYqSkRx3Y6kgs/elRICci+9uyJSUiiwv/wiKLY1Tafl+OviEevk2qvJ4gq21Xo9g7ptXvfrU9NgpuHcjS9eFEiHuPj8YJBeY0sL8t1OjMjCwqVUjl0SK7LbFae39yU36Vel/2qTtcq1arOj6EhSZuqlKg6T5Q+bHGxycX5N6lsnWZk4FOYVhbfm2Zt/TxtZwHPe4IwTHPqVIOTs9+hsrlwhXBENJtbnHx/Gcf1eezo4+SyHXS9QyGf5dDMFGdm5/j6H7/Kgf0TSZrmPkBCRB5QTIwPMbN/ihMnz13RiADILBFFEYvLGzx27AAT40PX3c/rbyzzZ39+lqWVDWr1Opfm3gAtYmR4itHBp+jvm2RkaIqpyRyGEUdBbFsGH7X6UWF225ZIhWnKyqvdlsEpm43FpiqFouvx9v39YpS0d698rkZDIiFKlJrJCAnxfVn97d0rKn0F5Ydy6NDtONoPF1Q1SyYTO6nadhzN0PU46tFNSBR60zG9zyskEZFbi3RaIh7nz0tkU107vcZnp07FxmdhKPcPHZIFgG1LZLNaleta6UaUNXwQxKRDpWomJmQhcelS3KQynQ5ZWoogOEopX+KtE/+GbOYx+kuH0bUclYqHZbzOQPnjXLpcI/KfZe/EUVKZ00Jc6VCpnsDUB1ha6SOfn8IwHAxzHU3TGB8d4Oy5ORYW15iavPHUc4K7g4QqPqDQdZ0vvfwi5f4iZ2bnqNeb+EFAvd7kzOwc5XKRL37uxV1XC50OfOc7a/z+V9/k8vwyQbjE3MLrtNodsulHSBmPQTTM8soWp84eZ3V9jogVXG+DKApZWRGiUKnILQhk9dzXJ/9fWpKBLpWSx4NAVlrNZjyZFYtCQGZmRHGvSMjyckxCJidlIDx5UvZx8KCs9pSVtGXJNradkJBbBSUezmbjCEg6HQtTDUOIZnfpbrehWXe3XvWY2lalfFSapvu1CT46UikhGSDXoCrd1zQhGsp88MwZeVyVzJ45E2s1Hn9cUi/lckwoGw25VpUJYaMhqdyVFSEefX3yWlWd1m5HuJ5DENp02sP0Fb7A/OJ7nJr9P/D8BhEGa+t5NrcuUWueJ2P34/tDdBrPEwQ5XM8jCELMVJ25xddpdzqEYex+lsmmcVzvplLPCe4ekojIA4yDM1P80i98ORZzrVawUxaPHTvAFz+3s5hLiVFbrZA33z5Ftb7O3mmd1388RxQVOHrwc2gUCII084sXME2d5ZWznDj5KqOjOcr9I5T7HqevOA5R5irZKBRkUNvYkIlMdb7tTsUoYaJpCglJp8UbRJUTggyIc3Py/yNHJMx8+bIQjiNH5LVnzsj3UCmipMPnrYVqimZZQhxBdAiNRkxEUqm4bHenxna96ZluEmIY24mIKuNOcGtgWULuZ2eFKISh6Do0Ta4h25aoydycGJ8NDcl1e+6cRBYPH45L9FVkU+k/SqW4ckp18QUhN/39spioVGBxMSAMTYgCdM1kZPARMnaeC3N/xhvv/HOeeORnMc0R5hYciPKY9nmiYJIwzOB1ZkgZg5jmKaIootVus7L+NiODAxStDJqm3VTqOcHdR0JEHnAcnJniwP6JGypv6xajVjarXF78MUNDWeqNkE47x/Tk80RhRNtxaHc26LS36Lib1FurZO0BOq006eGnqNcinJZLNmtSLFr098tqeXFRBifLEgJimpKLVlbtSvzYrQc5ciSupnjjjbjcV3UXXVkRQvPEE/L5lbOrqtjp75d0UYJbB6Xd0PXYJyablYkoDK/tM9NNKnpTMt0aEQWV2uk2x0qIyK2FacZkRJXwDl3J0ipfndlZiZqoSrPlZVmkjI1JdDGfFwJqmtt1I6qyTbkkb23J305HoijDwyHz8xfwgzK6ZpLJlvHcOvncGJPjz4Nm8u6pr5PP9WPbP0U23YfvlDBTFXTTIfRHSZmwf+qneefkK1TrVU68d47Z9By5bIbxsUEazTbPP3P0A1PPCe4NJETkIYCu69fNk/Y6ow4PQ6RVcZwW2Uw/m5sZcpks4HJ54T00rYjr1HDcDs32BuW+cYYHjzBYPojnGuSyfXh+B9dz6e8foNnU2dqKfSfSaRn4lEYEYp1IPi8D4jPPxN1vfR9efTXuNfPoo/Dmm0JK8nlR1av245omg10mI9+jXL6th/ahhBKWGkbsIaLswpWrqmXJ76aIiEJ3WqY3UtKdvlHW/0qPUizeme/2MME0JZV59qxci2EYk/bxcfkdz54VAhKGQlDm54WcdDqyTS4n1+2pU3IuKKM7w5Brr1oVMqn0I54H6XSHllOlWl8imz5ARi9imhk8r4ll9jE29BxRaFOtX+br3/lf+MSzv0B/3x76+/owDA8zNU+zaROFFhOjn2B0uEWt+Rpb1Tpz8yucPH2Bgf4S+/dOcO78QlI5cx8gISIPObot3g1D8se6Ds1WhrSdp9UYwTIjgnCDtfUKTicgpEKjuYLvRwwP7GVs5Elsq0gmXUbTDAIcTLNOq9NgdbUPz9OvpmIyGVlFt9uxGLE7FXP4sJTnqvbgrZaQEJVqmZoSfUizKdu88IIMlpubsh+Vp1ZVAQluLVSUQgkUl5djQqFKOJWrqiqZ7g6+qWjHThoRRVK6yUqnk5ia3U4YhkQ3zpyJu+mOj8tzg4NCKGdnpbw3DGV8mJ+XbdttSbUcOiS/+XvvxdERkOu8v19IiCrx9X1otXWiKE9fHywtncK2ivT17SWKdIr5ccJgkb17Ps7GxhCmmePPX/uXDA8e4NHDX2R4aATbnmBl5V2WV+eZmniOjlPBMj/DVvXPMQyHlKFTKuVYWlnnN3/7q0kZ732ARKz6kKLTkVWMGjSmpmL75CiCMBhiavwpKls1slmdfC5DveEQhAFrGyfYrF5maGCG8dGnMY0M+fwIYRTieg3CoIKGgalPUKuFRJGDnXbIZMKrZYGuG6di+vuFoDz3nBALRULW1mISMjoqA+Mbb8jrx8akwZ36DqmUPG/b8l0SEnJ70GzGQtV8Pm58p5rdKXt309zuqtrbfbf3bzdUeieKhIi2E73hbUV3NVmtJkRDoVSS9OjAgEQcz5yJiUqnI+W8th0vIAYHZVulIapU4h5TmcwV6/6mhWmU8dwSmmZQa65y6fKrOG6LMPTp75sgZWYY6N/P9OTz7Jv6OJ1Og1ff/GfML55jc6uGro8yPfk4qfQCjteBSGdmz6d4ZObLZNJp2m2H0eEBKps1vv7HrxJ215AnuOeQEJEHFGEYMje/wvunLzI3v3L1QowicTi8eFG2KxRkoMnl5H67LYNLu63zzJOHyecsFharGKbBVm2JSws/ot1pMj7yBOXSNJn0AIX8MJoG9cYq9eZldKNAEPTjBxptZ5PK1jJLS+vMzbWo14OrbeKzWRm0slkxN3r88VhVf+6ceIBEkQxyYSjluI4jJYhPPSXP12ryGtXJd+/e+LskuPVQoXflHwFx7yCVilGeIr4fN7HrJiKGcW1ERP1VRFiVBbdad+67PczQdbnOQIj+pUvxc7mcVNQMDwuxOHVK0qfqNzp9WrY7fFgWM9PTsq1aaNRq8n/pvBsS4aNTQNcKlPunyGX78COfpbUTbNbn8X0Py0yTyQyQzw6wb89LTI2/QF/pCO+c+h3ml75LKgX9fcM47TFq1XUuL30bK2WQTufZP/XT1Oo+6xtb28p4E9y7SFIzDyB267j7Ex9/ibQ9fnW77j4rvdbt2SyMjQ3xxKNH+ePvvMbxk99lo7KGphlMjDzF0MBhRgYfwbQyBL7DRvUCnt9mYvQoBDnaTpswdMSqnSIRFkFgAwGWFVEomFfeQ4SmU1Px5/jxj2M7aWXPfu5cvHLbt080Ir4v6ZxyWZ5TTq8Jbh+UANGyYr8X1a25195d+Yp8UMWM2laRlG5diXqPBLcfmiZkQhYismBRfZxsW8iI6up7+rQ8l83KtXr2rFx/hw7J4iablZROoxHrfMLQod5Yp97U0DCxUyU8z6JUMHFdl1Z7i3p9hXptiaHBQ/heh0x6gEZ7nbGRx0nbRfxwiMWVUyysvM+xQz+LoRcZHnwE04o4d/mPKJcOkUmPMth3mOpWjj1TNo5boVZvMje/kvSjuUeREJEHDDt23G15zM5qLC+9xcufsXjiiSH6+uJGUYaeJfAH0TS5MG37Sl+YpVV+8PoPqLfOcvjwBMXCIIXcHtL2fsqlvZhWmo5TpdncIGXnGSzPYBg2jXYN16lTLKbQGQRsNCwMI8APPMLIIZvt4+hRnWPH4q6/QQA//GFscvb001IZMzcXh39VekZ9TlV2ODMTT4AJbh+6LdhVtMK24y6sEJOI3VIz3VqQ7u3V491RdEVuEtwZKDJy5oxEH2dn5doCub4UGVlYEPKxZ48IXFdWpOR3akoWF9msLBJOnpTXra871Go1/CBFu72EZRYIw4CUlUfXDPpL06SsAs3WGkEUsLB8HMdpMtC/h0y6H89vkc8Pk7JGSaUK1BqLfO9H/4wnj75MJn2YwfKjpFJ5Lsx9A9d5j4nRJ/A9k61KH567yu999TusrW8l/WjuUSRD9wOEnTruBn4faTvH2CgsLq3w7qk/ZXzief7V77zG7Pk5CMexrAxjIwN84sWD9PeN4jiwsLjK//Yv/wmnz54jlUoxPvwU/aUiA32Pkbb7CIKAytYlWu11hgaOkMsMABHN1hae18I0TQK/iK/ZaIaJoflAgGE08DyPmYMazz7b32VwJC3CVQ+aY8ck6rG2JvefeEIGybfflr/pdNzwLukbc2ehCIOqmFGlmiAEUqVmuu3dFRFRBKS36Z26meZ28pE0vbvz6I6M+L78VWkbXZdUrqYJGTl3Tkp7VWO9ubm4Ws22lYg1ZG5+jTBKE4QOaXuIRmMVtAb5LERAPj+MYVgYhkWztY4fdOg4W5w5P8vYyGOkUwVyuQE8z6dcfJS01U+7FbKw+g6Bf5w9Yy9jGHn2TvxFzl3+Jm3nLBEjLCyl8LwSF86H9JdNJsYGaXfcpB/NPYaEiDxA6O24G/hFwjAPRFjWOgMDVd46vsKZsxfxgzSjg4+SslO4rsuFyxepbNZ4+TPPk8sH/Na//ae89c57ZOw+irkjWMYk5f7DWGYGx21RrS3S6VSZHH8Ky8riBw6+36bdqZFN95POlEilCkRRSOh3wPQwNB/NcKnVX2N45FlsW1SplQq89ZZMVoOD4u74gx9IbrlYFGv3rS1ZcakOvErQqjqEJrj9CMO4tXuhIKtgkNSMEj3repzuU4LTbvR23+2G0ofs5D+idEUJ7hwOH5aoRxBsJyPK+Mw0hXgsLMjvtm+fpFFXV+Py3oMHYX2jyo/eXMTQ+3EcCz8IyGUHcb06W7U5UlaBIPBJ20UMI0UQOERRiFY0yWb6WF47Sbm0h83qOcZGjmGGJvn8JOMjJn4wQqM1z4/f+1fsGX+JYmGC/VOfZ6t2gbMXXiWzdI6JkSfpdCwqWwYXLp7n8KGRpB/NPYaEiDxA6O24q+k+hlZDN8R1Kp2xuTS3wv49z3Ng/54rK9smup5jcnyKpeU13nn/z9ioVHjv1DkG+44wUJ6ikJ0ikx3FMm3qjSVqjRVSqSzjY09gGjae18T1mnheh1JhHMvKkrKyBIEHkUPHqWGYUCo18cNZ0C5RLHySMAx588dbnDuvYVsWTz6ZxTD0q+W5g4NSRXPxouSlQSo1SqW4025CQu4cGg2ZYFTFzKVLcvxTqe1VUCrKpYhIb/lu91+4tnxXObJCnO5xHHnfBHcWBw/GHXRPndpO/Gdm5Le+dCnuXTMzI9vXapK6m5mB8kCVVucizeYihMOk7RJuGICmk0730Wxt4HpNgsAhnS6Rz46gaQaWmabj2FhWhrWNc9Rql/CDFtOTR0nbU2TSRdLpEab3HMCySpy/9DrV+hn2Tn6GQm4vjx6a4r0zf8D88usU85Nk0sOEYZZvfPuHvPjco0k/mnsICRF5gNDbcVfXt5ccrK0F5LOHGSj3o2ugGZuEQRmIMIwW5fImx0+scGmuwtjI02TtCXKZEWx7EMdtsFo/Q725xtDAQYr5EaIownEbdJwamqZTKkyQThfR0HG9DmHYIaSFpocsrb5OOhewsnaZx44doNXu8A/+4eusr0f4gU8QneLcxUcYHnwcO1VgakpMzU6flhWW8q3I52WyU/0yEtw5qB4z5bKky5SwOYpiTxjVqBCu7aqrHuu9dT+noiLdr4+imAAluPPYv1/Ihqqo6yYjU1Pye1+4IIuFMBTycemSpHVOnZJxqe2cZX7Jx9AHmRx9hggLIogiCX15fpuwFRFFIZZpU8gP025voWk6mmsyNHCAtJ1nZfV10naKemOFseGnCEKPdmuAcukw9kyGS/Pv8ua7/4ZD+75IJt3HIwe/zPrGKdrOefxgE8sq0Gi0ePX1E3zuM88l/WjuESTxqAcIquPu4tI6UVdMPIo0PHec6lYKO2XR3+9gphbQMIiikFb7HJWtC3h+QLuZJ2PNMDZ0FF3XsKwirXaFdmeLVmeT4YGDFPLDBIGP77dZWz9DFEXkskNkMn1EYYjj1mX79hZhELC2+edcXvweZ8+/T7lc5MjBvfz2v7zE8opPJp1icGievvx+VldLnD27RKGwwbPPitBtdVUGur4+ISHZbEJC7hYajTg1o1IoqnTXceS+ZcV9gbpJhYpw9BqWQTypKdKiyIeqoHGcxEvkbmN6Oi6LP316e8pteFiik/39kq47c0bSq4o4NhtDjI2MsLJ+hsrW+yyuvkYYtmh11vH8No3mKqaZJgjbVOvzNNsVPK9NOl0kmx2Qv+k++ooTlEp7mb34fTZr8xT6ljEMF9etYRh9pKwZSoUjaKQ4cfqrnDn/bUn3DhxhqPw4ESGut0Uhn6XVavPuidmkH809giQi8oAgDKUK5tDBPZyZvcTp2ctMjA5ipwdxWlkqWxsUi1n86AId18BKZdmszjN7fp7KZo0w0OgrHSFljZHPyZK24zTY3PwRppnGMFPks0Pouonvd3CcOtXaAoXcMLnsALlMP77n0O5sEmk+YRDQcbaYX36DpdW3iKKQY0ee4md++mX+6GtVOo5Pud8mlZ7Fc/ZiGGMU8h7rmyc5e9Fj6PjPUK3q5HIyAKZSkpIZG7vLB/ohRvfko8pqbVtIguPEJlaqz4zSeJjmzl13YffoiPIjCYK4/DPB3cXUlKRgajUhI90i8f5++d3PnxeBeRBIJCWTgY0NnScf+wzf/cE7rG+s0e40gYh8ZopWe5Nifpyt6hyF3DDtThXPb5PLlDH8DLZdIJMq4nRq2HaRdGqQybFnaXXO43oLNDsV6nWdYn4/um4wMfoEnt/i7IXvslVb4Mfv/hsOTP8EQdghbQ3Q8dbRDR1N11leqfCJF59I+tHcA0iIyAOAXt8Qx/VwOgGrehHTCrHMNgf223zx84/zjT/2OXHyHAPlIm+/c5Z2x7miA5kkDMtEYYDrBiyunCEMU2i6gWmlrqRafLhiXNbubJLLDTE0cAjDsnG9Nq3OJhohELG6/h6V2glqzYu02w6DAyWymUFefz3F5maTQt4nZS/gOvsJvSE0vYOZWqS/r8L8/BTzYy3GxvLk8zKRDQ7KLcHdh6bFXXfT6Tgaoqzf1eSkIhu95bm9hERFTrpv3RU0yho8wd3H+Lj8XltbEvk4eDAWEefzMTlZWZFzYnpaXhNF+/jUx/8C33vt26yszjF78YdMjUkFjeNsYqey1FtrRKFPGGhsVRfJ5YZotTexrSzpdAHfbzM++hj15hJRDU6euoimd4iCKTYry4wOP4lupZkafx7Pc7kw932iKGT24p+StotYlkEmHREREQYhpmXy7NNHE6HqPYCEiNzn2Nk3JM3GBlimyQvPPcKLHyszOSGsf2W1womT5/jzV48ThXBg78cIgyyel0HTXMZGBjh7/gREGWxLKm5crw1RRMrKUqst0upUGRk8TLlvGt2w8NwWHbdOGHigwblL32Fp5W06zgaappNKWZRLB7h4ocSifY5qfZaZgTyee5AoNNH0FpZ9Ed1oYRqPEgYOhtmhWMyj62LvrrxGEtwd+L7cbFsmnEZDHs9m5XHPi6MfSmDa3XG3u3x3NygSol6jTNPa7cTU7F7C6Kj8jpWKVNV0kxHVL0rXJXri+5Km2bdP5zOffAbfDzh/+X0qlUu0O+eIwgDHadLfN0Exn8d1Q1y/DUTUGksUcqNUG8tkM2U03ULXDFJmkXLfYdYrZ9A0k7XKa+haP223RT4zTD4/ymD5ACvrp2i1N9CAjlPD93V838QwDUrFHEcf2cexI/vu5qFMcAUJEbmP0esbAga+N0ba1hgfC7k09w6rGz6TEz/HufMLV6Mm6xubtJpQ7jtIu1kiCAIiOtSby1S2lijl+khZGRzXxfM6bNXmGejfz1Z9AYjIZfsZ6NtHEHo028tXPkxERMDJ03/I0uo7+IFDPpchm00zNf4xsulxms0Wa5WTdFyfiZHPk0qZmNYmhjkPURa3M4PvBmhag/5+A12XQSyfv5tHOQHEFTOZjPweqlw3l5PVcRDEJbaKlKgoxk7Oqt2GZt0RlG5YljyW6EPuPQwPy++2vi5kpNtQ0LJiQevCgghZOx345CeHgOd58+0SC8sTXJp7m8Xlc2gMYFoR2cx+0CPymX42a8uYepp6c4m0XaLd3sQwTILIx7YLOJ0qE2NPsb5xioE+mwtzr1Orr6DrJpaVZXT4GEMDM9Qaebaql9CATMYmCCKGB/uZHB/mmSePJGmZewQJEbmPca1vSAHQ0DQP01phdCTD2XNzfO/Vd/jaN74vUZPRQbLpfYTBOobWz2ZtkU5nC90wydr9GFoOx7Nw3SqtTpWOU8d1W5w88zVm9n6K4cEjZDNlHK9Bx6mi6xYQUa0vsrT6Oro5Rz5vMVAeBDT68k9jGWVc1+P47B8wMXqIkcHH2KzWGR/3sFIr+H4fvrOHCNisLzA5UWBwoMT0dGwfnuDuQnVMHhgQItLpyETTTUQUuVBCVs+7try6Vw/SnbrpjqBA3MvEce7IV0xwkxgclN98dVUcWLtbRhjGduOzuTkhpp/61BCjowO8d+oCgW+Rzb5LuVxkdbVBrTlHyhxhvTpHf980tcYihBGOUwfNoNWp4LhNCrlhyn2jOF6L4aHH2KicZ2r8GVbXT9NxRGNy4fL3mRp/BsvMXCkh19B1Qzp09xWZnh7ji597MUnL3CNIiMh9jF7fEF1voxtNwKNWb9LuOGxu1vjmt39IZbPGwQP7CfwRQt8mndJZXH6ftY1Z2s4mB/Z+gkJ+kDCKaLUb1BqLhGHAxuZ5tAgePfJlSoVx7FSJdmcLx21gXzEsm1t4g4WVt2g7Cxw6sIcgCLFMk06nQBDomIbDxcVvMzn2OLn0OI7rU2++x/lLVcaHj2EYkwSex2ZtgVxe59knj7B/v361DDTB3Ue7LSW6ui6TjBKPqo6qvY3sVCqnW/cBOwtTFYHpTeWoSa3bnTXxjbm3oPo8LS+Ly2p3vydlfKb60yjjs4MH4WvfkBa/xw5/Giu1xJ6JkFrNY23dpdGM2KpdJp8bod2p0ulUSaVypKwMQeCxsXUBO2Vi20Vq9TXy+WGymT50zWBj6zz1xgpEAZcXfoSm6ZimQTaTvmJpoPHcM0f5T/9PLyeOqvcQEiJyH6PXN0TTXTYq1auVMJ2Og+N4zC2s8vQTnyDwRxFDZZ+NzVkuL7yOH7iMDh2lvziN5zt0nCa1xgpRFLGw9BajI48yNnyMjN2H4zRotirksmUs0wYi3j/zCktr7+J6NUZHytjpFKm2yXqlihZ5hNG7OG6NseHnyGVGaXeaLMx/j4+9sJex4ZdYX7PwgwaaXmFqqp9nnjjMJz85lPSNucfQnTZRJbWqOqaboKh0inJh7UZvbxm13+7oSPfzakJTmhPPS5oa3ovo64s1IefPi8Nq9yLi0CG5v7Ag26ytV7m08Dbl8h5Ao1Yt4/oXSKV0Gs3LbG5pFPMj+F4Vy0yx3lzl8tnXOTLzBTLpEo3mKstr75PPlvACDTsoYZopxkefoNneoN5Y4erpGoWMDA3ywnOPsra+xaNH9/Orv/ILmMkAc08h+TXuYyjfkBMnz3FoZorKZo23jp+h3XHI5zN4nk9/fx5Tm2JpsU023aCvv4Eb1FirvE0QeOi6Tr25wuraSUwzj+M26Tg1VtfPMDX+DLnsoLT6Pvct0ukiQwOHAOh0qsxe+i4Ly8cJQ4fJiRHGRwfJZTM0Gy0ajRa5bIjvR4wOPUsuM4rjVJm98CdMjJfQogPsn36Sg/sgk2uTy1gMDJQ4dEhPrLzvYWharNlIpyUyoqplFBGB7RqRbqioiYqKKL8QFTVRhEZFRDQtJiLtdkJE7lUUi/Lbzc+LJqQ3rTo9Lb/d/DycuwCEk3ScNc6eO4fnFqhszYLm4Hs+lxcX2DP+DGguhlHi8uLrBIHLe6f/kOnJF4GQMNRxg3XaLYOOVieXHyYVhZQKo6xtnLn6vrqhMzRUZnOrzt49o/z8z76ckJB7EEmC7D6Grut86eUXKfcXOX32Mu+9f55mu0M+l6bRaFPum+SxR36CXC5LrbHBmfPfBzxSqZC0nULXNTRNo9XeZLO2gOO22Ng6x9rGKVJWFk3TqNbmefvE79BorWJZGTQNVtZPMXvpu1xeeJ0gcCjkc+zfO04+n2VkuEwqZaFpGpqWo69wCA2NZmuV9868gpWCJx/7EkQDgMfMTIHpqWEGB/s5ciQhIfciPE/IQDotk0uzKUQhnY5TNiAEQple+f61Nu2wu6MqxNERZWTWa4aWeInc28jnxWsExFlVdWcOw5C5+RW2ahfJZNcZKIOuDXHxgsna+hpt9zyFgkE6beN6PpqmcWn+DTY2L1KrLzA19hR2ShTrl+ZfpdFcJWWZmEaBan0JP3RpNZfRcMlmLY4e+hS5TP5Kui9icWmN1bUKqYTF3rNIqOF9joMzU/zSL3yZ//3ff4sTJ8+hmzodx2NibD/79jxKLpdmcfk9tlYv4bgpavUmxUKO4aF+VlYr+EFAJm2D1mBp7S00TUZ/12uyuPwOQdDBsvJMjD5BsTBBtTpPpXqJjc0LaJpGJmNTLOY4e+4yhbxc/K22S19pkr7iBEEYcmn+NdqdOvl8gU997GexjUF8vcpAWb9qdpX0jbl3Ua/HFTOFAlSr8ngqda2ZWT4vxKS7X4z6Czs3vOvWiCj/kCAQMqNMzYIgISL3A3I5iX5cugSXL4PrLfCn3/v+VY8j206xd2qGra0GujbA5NiTtJ05XK+CndIZGS6zslrBCV2WVs9SKlYZ7D/A2PCjrG2cpdneoFa/TCrlkw/30Fccp95cp5AfpO2sYpoGuq4zMvw46xuzBFGdT774JP39BZaWN/jN3/5q0nH3HsQdISKO4/DCCy9w/Phx3nrrLZ588sk78bYPDQ7OTPGzf+nTnDl3mdGRATJpm3xuECIN3Vhk394BKptrrFe22Nqqk8tl2LtnjNOzcwTtDpquXZkcQgJfiEgUhRhGgGFa9Bf2kM8Ns1E5x8r6+7Q7WwBYlomhG3TaDkND/Tz71BHSaRuNEVZXO0RhSLMzSy5vsHf6EMcOfY50eoD1jTkmJvoYHi5iGCJeS3DvolYTEtDfL0SjuwFhsykRE9eVCEk+L/4SqnkdbDcp69WJqHSMMjVT/iGdjhARlZpxHBLd0H2CTAb27oVXX13jW995m0p1hdGRongctR1+/M6PuXhxg+Ghx0jZk9jWHly/jtNpsLnVYKBcolZv0O641BtrdJwGo0OPkc2WCKMGtm3RaG5Qb2wyOf4UI9lJ/CCg0VzFceu0Wi0G+g8yOXYUy/bJ5tIUCzkK+WzScfcexR25tP/u3/27jI+Pc/z48Tvxdg8lioUc/X0FcllRh0P7yg0GykWOHJrm/TMRzXaH8xcXsVMWX/zsx3jz7VOsV7bodBzCUFwHU5aJH4RYlsn0xHO4XkBl6wLzS28TBO4VHwidtJ0ibac4eGCKmQOTlPtLBN4Eh2fA0ue4uPA6k2NlJsb30ld8nMC1WNu4RC5n8MyTB8lmdfbuvZtHLcGNwHGEGOi6kATHEYKQzQoRUYZmuVysEemugukVoe5WvqueU71sfD8WxLZacdonwb2PVCrknZN/TqMZMD35GKa1ia63KOSzTI4P8f7pi7j+eXQ9RxgWaNRNVtbWAMikbeyUhWmaVxY7OmF0kUMHRxkdmSaTsdnaarC5VcM0G4wNP8XmZpOtWo6FpbcJwiZtd5ax0RcAjZQpTW80TUs67t6juO1E5JVXXuGb3/wmv/u7v8srr7xyu9/uoUWvcFXrGt2jKKLdcfiLX/wEf+mnfoJWW8zGJsaHODs7x+/87h/zozffY2F5jXqthWGIG+rw4KMEgVi2V7beZXrPEKPDZVzHY3xiiEajxfBgmbHRAUDD9yYADV0PGZ9oUWsVGB/bT+DvY2vLB22DyYk+nnnyMIcPDTExcbeOVoKbQa/RmEqRZLOxZ4jvxz1lYHv0YqfS3d00IhBrRJQI1vPkPRN31fsHC4trnLt4gXK5jKYNEfhlMCN0vU3qSqO5er3KU4+1aTVz1JvD6JqJYW0QhRGWZRKGIY7r8cyTRxgeKlMsiG4tiiI2N+u89OITRBG89/5pDh96lGZziMnxEd59/1VKRZ3VjeNMjO2nUCgCwooz2TTOaiXpuHuP4bYSkZWVFX75l3+Z//Af/gPZG+jh7TgOTpd7Ua1Wu50f74GCEq4uLq1xZnaO8dEBMtk07VaHxeUNyuUiX3r540zv2d417vChaX7+5z7H6nqFfD5LPpeh0YhotUrUqg02a8vkcy0+++nnGCyXWFqpUC4X+fQnn+H3v/odRob7AajV2vheE9Ny8Pwlzp6fo1IxGerfT8oyyWU7PP7YEY4e2cvgoM7w8N04Sgk+ClSVi0qTqBJNJUztTseoqAZs77yr9tObpul2WVXRFNXNt9VK9CH3G656HGUNdH0Z3xslCk3QJXo7NNTHhYtLOF6HuaU3IJpgZGiSKBri0sJbjI4McGDvBH/6vR9zZvYyI0P9BGF4zXgGsLS8zrmLJxgfmaRYHKC/dIBWe51MxmN6Tx5dj0VK7VYn6bh7D+K2EZEoivjFX/xF/ubf/Js8++yzXLx48QNf8+u//uv82q/92u36SA88lHD1agO81Qp2yuKxYwf44ude3FGgFYYh3/jj13Bdn2efOkIY9hP4OdrtDuuVM7x7sknKTuH5AZvVxtV9pdMpvvaN77OwtM7S8rp08A3fxfPaNFsdBsuH2DP+FP39RQxti0q1wo/f7rB/X5rh4fG7cHQSfBh0OkIKMhkhHp2OEASVJumOUuh67B2iohq92/RGQ3p9RNTzqvw3sXm/P7Hd48jAtBavPqdpGhOjQyyvVDh3YYH19S3y+RaZ4BiddkB/3xQz+/sZKBd5/pmjvH/6IgtL6ximseN41j3mdZwlLLNEITfMoZln6O/fuvq+URSxuLzBY8cOJNbu9xhumoj86q/+Kn/v7/29627z/vvv881vfpN6vc5XvvKVG973V77yFX7lV37l6v1arcbUVKJuvhkcnJniwP4JFhbXaDTbV1Mwuwmzem3ioyCFpkGxtEmpb4ChwWdYWFrnr/7c55nZP3l1X2EY0lcq8K0/eQ3btsjnc1imztzCFsX8EUr5w6RSKbKZKppuMJYZ5NL8Cb7/WpMnn/i5RCh2n6BelwiIqphptYQY2LY87nlCFtTP2V3Kq4SovdbtO2lEuslJd7WNZW0v5VUurgnubVybKo6fk1Sxy2c/9SxBEPLN+ddAA8d5m/GRw0xOTjBQlqlpfHyItuPyV3/u84yODOw4nvWOeatrm3z9m3PUG20sc4B0boNOu8nC8jp2KsXBmT0sLK5dd1xMcGdx00Tk7/ydv8Mv/uIvXneb/fv38yd/8ie8+uqr2D0+3c8++yx/7a/9NX7rt37rmtfZtn3N9gluHrqu37AQq9cm3rAqgH914MjmMhimwejIwI779P2AKIpIpSzCwGCg7wmKhWk0YKv2LmhPAWBZG4wO24lQNTcrcAAAMaRJREFU7D5DvS7RCFUxs7IijysPkW4zM02LiYhK46i/vfqOnTQicG0URVk/dHuJJKLVex83kir+z3/+S6RSJssrG2SyNv2l4lUdiEK71SFtp5jZP3ndMaN7zHvk8F7GRwf5o1feY36hQ6Vq02qt0my38Wyf3//qd/jaN77PzP4pvvTyzpHiBHcWN01EhoaGGBr64LDWP/pH/4j/+X/+n6/eX1xc5Atf+AK/8zu/wwsvvHCzb5vgNuEam3htux3mbjnV7736Dj98/QSmadJstanWWuyZ+BiF3CSWEVFvnqTtrtLuTFMsBmialwjF7kMooaimSRREmVQpV1XXFT2IaQppUG6qKoqhynK7sZOXSLfTKsR/1bpEmaIlROT+wY2kisMw5PFHD3Li5DmKk9tJyEdJpRycmeL/+rcmmD23zptvVnj9xzqpVJHh4fBqGfGJk+dYXFpLfEXuAdw2jciePXu23c9f6eV+4MABJicnb9fbJrhJfFC1zU4DwdnZOf7173ydjUqV0eEBRkfKOK5Ff2EfjcYWLecstu3itwJcbxlN6wMSodj9iN5ohutuL931vJhwdBtXKuLQTS52Ss3A9mZ56j3V6zIZ+b8iOIlo9f7CB6WKbyRy8mG75Oq6zsyBQV751nfxg4ipiQPoGuj6AoV8lkMzU4mvyD2CxCLoIcfNDgRhGPLKt16l2WrTX8qj6Rp2qp9SYRyiGgsrP8D1GkxNjmCaBqmUnGKJUOz+gyIQqpIFRBcCQkS2tuS+Kt3N5WLiEgSxXXs3AdlprO/ef/f7gpAbZWoWRQkRuR/xQaniDyOyv1EsLK5x7sIlyuUiuj5AFBlEURpN6yS+IvcQ7hgR2bt3L1FiBHBP4mYGAiVuPbBvAtf1WFuvMtgnVTDN9ln6+iwWl3zmF1fZNzVKNpumXm9+5NVNgjuPdjuOSiijMscR4tDtIaLSM91iVhXBUKJVhRvxEel+zDTl/ZRpWnLqPJi4WZH9jSIuI7YxjCXCMI2muVefT9LF9waSiEgC4MYHAnVh57JpZvZPUq+32NhcBG1TdAK2hWVZDPSX6OsvcuHS0i1b3SS4s1AVM9mskAzXjStoVOWKIgjK3r3VkscUEemOduyWmlH7iqK434wiMEp/EgRJ990HHTcjsr9R9GrgdH17SC1JF98bSIhIgqu4kYGg+8IeKJd46olDzJ6fp7LZulpBMzLcz//w3/8N9u+buKWrmwR3FqrZXakU95UBIR0KKiJiGHHb9yCItSPK2KzXVbWXiHSLVXV9OzlRniRdXocJEtwQPowGLsGdR0JEEtwUei/sgXKJcn+RWr2J63rML67x/DNH+eQnnkyIx30O35coRF+fkI9KRYhBOr29zFa5qqoKF12Py3i7HVZ3IiGw3RdEeYeo13XbvCemZgluFrdTDJvg1iE5+gluCurCLvcXOTM7R73eJAhDdE2jsllncmKYL7388eTCfkDQTSJURMQ0JToRBNtt27uNy1TVjJTvhkRRgO+7tFotoijcRka6e9RA7EsShhKRMc2490z3Z0qQ4EagNHCPHj1AZavO+YuLVLbqPHbsAL/0XySlu/cCkohIgpvG7VS5J7g3sFPFjNJ9ZLOxmZkSk2paTBRA/q9psFWtsbUFQWBTqzfYPLnIwnKbp5/YDwwCcYO87qZ3iuy0WjERUe/vunH0JUGCG8HtEsMmuDVIiEiCD4Xkwn6w0WwKMchm49SJMjbL5YQgKCKinu92VQ0CaDZbzC2cg3APppXBwMDEYmHpMlvVCoN9P4lhpLaV+EJIp9PBcSJcN4XrGliWnFOKqHQ6CRFJcPO4HWLYBLcGCRFJ8KGRXNgPLup1IRqqxwzEHh7ZrOhFXFeiIbouhMSNqyIJw5DVtS06jksha1193DANRob7WV5ZJm21KBZMDEO/Ql48Fpc2aXXWiCKdlDlEs91mfLQPz8tQ2Wyzvu5TKpUolRLCmyDBg4KEiCRIkOAaqB4zqmLG9yVVkk7HVu6OE1Kttmi2IpqtCMfJAzq6Do1Gh7bjkM9mr+wxAkLQQAPKfXlcz8P3AwxDx/dd2m0XP3RIpww0zSaKDLa2OswvHGeg/1EqW4ucPHOG8bEiX/6pg0kKMEGCBwQJEUmQIME1UELRUgnS6ZCzsxusrZn09+tEUYHFxQ1ee32NrWqLKLI4dXaF989YPPX4o4yPDeF4IVEIhmUSegARaKH8JcJKmTidANfzqNVaNFshUWRiWyl0qwWhjudrBEFErbZBfykkmykQaVkuXd7gN3/7/aRHSIIEDwgSIpIgQYIdEQSwuLTGN7/zHS5cdNDZB9omf/7aEtWtASxjgny2gG6ahGHIxUtrbGz8iJc/8zymkUfXdQIvRAMCAiLfR4tcOk6Ly/NzDPfP0NDafONPXidlPEo2XULiJeLx4HsBYQR2OsIPAnBDTCtibHSQC5dnkx4hCRI8IEiISIIECbbB90PW16tcuFjjvVNvUm/NMjH6FIZexA8a/Pmrb5PPPsLjRw9imGk0zcPORUxkhlhc2uDN46eZGnuRtJ1hbWOJbMZD16DZalJvrrC2fgbXazE2ZGJbFl6g43U8fCsAPUTHww8hiPJo6NQaW5QKLo7rsbqxTCajUyjYSY+QBAkeECREJEGCBFdxdnaO//j1t5mbD1hablOrLZPJNRgZDLFsnQgPXTPQdY1qrcHwQAm0DprWATKU+wosLa8z2OdiGAaVSpXUsIdhGASBT6vZptFsY1ghlqUTEBKGAZoWEkURTscjZUMYNCAo4/s+jWaDMHTQtIgwjNiobDG/dJGhgb6kR0iCBA8AEiKSIEECQEjIb/72V6nXc+SzwwTBOqmUx9r6Jimzwv7pEfywgabZ5DJpHMfFC5rYpouuu/gBWLaFV62ztLLB0nIDPwgR38SIIPDxAg9D1zA0jXa7g2XqXJ5fYnRwP2EIrhfguhG6YRCGAUEQ0mxs4Xouvt9ma6uF4zZZXl+WZmaZpI43QYL7HUlyNUGCBIRhyCvfepXKZo2piWlMM4vve5iWQ7m/iOO0WK9UMM02tl1A0y2CMCAMPQyzBviAhue4eK7P3NwyrueSsiyMq/arEVEUgg6tToeO0yKMAnI5G02LRBfiB2xu1llbrxGGIWEU0XKaRFGAaaTQdZ0wDOl0AlbXKly4tHQ3D1uCBAluARIikiBBAhYW15g9P8f42CCgYeji79HuVNE0A9s2WVk7AZrLQP8QjhMRRf5Voag0rIvY2KyxtVWn3XEIo5AwgjCKiKKAKAyJwgDP9fGDAN/vEIYum5s1PGXLqulsVOrUai38UHm6h4Shj++7BEGArmlk0gV8P+CPv/MjQuUtnyBBgvsSCRFJkCABjWa7K9Whkcn6pOwG9UYT08hgmgbtTg3PC5gcmyKKDHzPx/U6BEFAvd5kaXkdz/OoNTqEUQRRiK4ZuG6LRnMD1+sIoQiFlBBB4Id0HBeiAI0IQ9fRNIMwDAkCcUjTdZ1qbZEg8nHcNpquUSr0oxs65y7Os7C4dncPXoIECT4SEo1IggQJyOcy2HaKVssjbYNhOOyZyrJRsWk0QMMlDBusb5hUt3RMYxA0WFhcpuMskC+scezwIywsrYBmYhqiC3HcNl7gEF6JbgRRCFGEH/j4oY/ndyBqx9GPK9GYIHBw3CaGYWGnsrSdCmHoMr/0FtMTj2NZWVIpiygiEawmSHCfI4mIJEiQgInxIWb2T7Gy6hBFEZrWZqBc5KknDjM0OEqj2aLRqvP+mUsATO+Z4uD+SSbHhygVc+SyBfZOj+G4DvlMFl3XCAIhF2Eo+pEIJEVDBFFEGIZ4vsPy2inCSKVXNCJ0qaBx67Q7VUDH99o4Xo0wdPF8H8vKk89myGXT5HOZu3HIEiRIcIuQREQSJEiArut86eUXWV76EUvL65T7t8jkUqQsg0KhRH/fAH7os7lV59ihI4RBHoBCIUWpb5KLlzd558Q5NCKKxSJRCI7ToO1UKQPRlaZ2QjgiNE0D5P+maYleJPLRNB3DsAjDEN938H2HVrtCJtNHKpXHsrLomk7atkFL88SjB5kYH7pLRy1BggS3AklEJEGCBIB0VH75J3+CPZOjVKpVzl1YYG5xlf5ikcePHQBgZKhMvdnCcR2kd4yPRkA+X2BhcZW206Gv1I9pGtSby7Tam6Bp+L5DGPgEgXiKGLpGFIqVfKPZod5YwQs8NHR0zSAIfQLfIYoCbLuI53VA08lli2iGhqbpHD64hy+9/PHEWTVBgvscSUQkQYIEVzE2OsRPfXGAtUqOb/2JREe2ag2++4O3+fE7b1EsFBkasND1ZbKZNIVclnpzka1qFd/PYhgebmeLvmIBz+9ciXxoeH6b85e+S6RFmLpGJwpBu2L/HmoEV8Srmq6BpolQVdOwDJtcpoTjtImiENNIk0lbPPPkEf6L/+xY0msmQYIHAAkRSZAgwTYsr2zwjT/5AZXNGpPjw7hOhkuXL9FotNHIkM92iEKX6laDy16HtrvC8OAIqZRJ4Pu0Wz4aTTzPwTAsAKIoZH3rAu3OJlPjTxEGPlEUAaBf0YT4fofISKGh43kdNN1As3IYRgbdbJMyLWb27+Pln/wELzz7KAdnjLt5mBIkSHCLkBCRBAkSAOB5Ymz21jtvU9mscWhmiihMcXmuguc3KBZzBH6Kdtsll8thGDYdp0GzVaVa7cPQI9AChso2mqYRhh6WmQZkv4HvEIUhQRAQXDErQwNdN4iiED9w0XUTTdOpt1bJM0DG7idlZWm11/HDAD0yOXHyFJVKHSs1zaGDSUQkQYL7HUlyNUGCBADU61CpVLk8f57xsUE0TaPeiGg0WqRSAbquYVkZgiDA81w0TUzPWq0qrU4bPwhIWTaGIeub8Ep5ru93qDWWaLY3ut5NKmcANE3HcRt4XhsQ91Xf71CtL4qexEhhmGBoOqZpUCgYXJ5f5v/3L1/h7OzcnT9QCRIkuKVIiEiCBAkAqNWg7bi0O5tXe7i4fl2Ih1/H9wOCYAPPq+P7PrqWAsAPXNY3TrNWOU274+C4oucIwoBqfYl2ZwvHqcmbaFI+E4YRYRQAGrpucOHyD+i4NRynieu1rmhLQNMMNA2CwMFOW7iOTxS5jI0OUq16fP2PX02cVRMkuM+REJEECRIA4DiQsVPYtkWr7QCQskKqjVM4jkcYRuh6QNuZI5N1MU0N0zTRNZ0o8mk0N/GDgHZ7i3pjhTD0MQ2LRmuN1fUzAGgoghHiOg06bgPHbdBqb9BorhKEV9xUrziydjpVIjTAJ5tJg2bg+g00TWNkaISz5+YSZ9UECe5zJEQkQYIEgGRKBgZKzOyfYnFpnSiKKBaylPvztDsOmgaO65HJ2Nh2Az+o4PuSspF+MhGr66eo1heJCAmjgDD0r0RHhNiEUUAURbheC93Q8Nzm1ffvdKq4ntzXdZOl1XdxvCYaUC6naTsVWu11HKdFFEXYdgHH9RJn1QQJ7nMkRCRBggRXYRhibFbuL3Jmdo5Go8W+6XFs28JxpOlcqZSXJnUamKYB2lW5Bx2nRqu9QRRdqZTZPE+1tkAYBgAEgcvi8nFct4LjrtF2ttA0qX6pNZavOKmKgFVB03QWlxZZXLzM1tY67743y9lzc2xstLFTVuKsmiDBfY6EiCRIkABHAhYUCmJs9ku/8GUePXqAyladrVqD6akxjhzaR6mYZ32jSq3eYqBc5NDMFJm0vW1f65vneP/sKywuHwciXK95lYgA+IGDYRq4Xgvgqh6k3dliZfUUrXYFXY8Fr0EYsLK6ghf42KkcAFvVOmfOXabcV0ycVRMkuM+RlO8mSJCA2hUtaaEgfw/OTHFg/wQLi2s0mm3yuQxjowP84LV3rxqdZdM5LMvgC599gW//2RvU6y3QwHE8WlcqZGy9gAaEkQ+ArmtoGmhAu90mZcVEZG3jLI3mGoXCMPqVKEkQumL5HgUYmk4Q6qyvbZJNi9A1IrqDRylBggS3AwkRSZAgAfW6/M3l4sd0XWdqcmTbdj/xiad46cUnmF9Y4/QZKBUsnn76p/m1X/9Nvvq179JqddB1TVxSNQ1DD9B1nTBsk7KMK66pIZ7nYZo6mqahXQ3MRoShh+vWr6RrNFy3RcrKYRgpgijEcxzRqxAxs3+Kza3zLCyuXfM5EyRIcP8gISIJEiTAk0a5qrr2utB1nbHREVpNKBbBNOEX/upfYG19k9feeI8IsCwT3/fpOA3mlt4gnbY4MDnJRqVKtd4kk04TRQG6pmOZ5tX3jiKftrNFs1kBIiEl+WHSqTwdd5VWc418Lo1paWSyaRqVRKyaIMH9jkQjkiBBAqLoxkiIgtKU2FfkIQdnpvg7/5e/xk9/6ZMUchlcx0PXdQqFHAf2j/P5z7zAx55/DNMyOTwzzUsvPs5gOU8mk8K0rCsfAjzPodXaZLM2h6ZBvbmC06nhhw6Os46dMQmCkHa7TrvVwU6lE7FqggT3OZKISIIEDzHCMGR+YY25BcjnUoRh6Ya62XY68jedjh87ODPF//P/8V/yk596ht/8rT+k3myxf3qM4eEynbbDqbOXsUyTvXtGuXh5mSDYYnxkkPHRQTY2LxMEIRCxVjkNgG2naHfW2Ni6SBS4mGYOQ9dxghAvaNFotjn6yL5ErJogwX2OhIgkSPCQ4uzsHK9861XOnV8jCkcwzCavv5XhSy+/+IFdbXsjIgq6rvPpTz7DxNgwr3zr/9/evYdFVecPHH+fGWAYrgGiArIo4iULAyHZ1N1M+WnWY6uWdjHNS7ilrdrF1DStViVD20xbs0yrtfWWq3kLI/PSmmmmkHgBUUEcEBEUkOswM78/WCZHUCCZhsHP63nmeZwz5/KZ78E5n/O9nO9+0s5kcjYjG42TI3d3CUajcSTrwiVKy8rxbeEFSikeHu608PEkP78QfaUBB7W6qmnHYKCiopjsi0k4OjhUzdRrMKDXV2IwlOLi4swfAoPQZeUS4O9brwRKCNH0SCIixG3oVFomn3y+mfzLhfi37oCTow8V+hKSj58mKzuXsSMfuWkyUp2IONzgF+RGo25iF3zOlq+/p2VLL6pbghRFoXVLH4qLy1CpqmblNRiM/3t0u4KrixYXF2fKyisoLStH4+SIu5uasvIKdu5OYtf3OwkJDqxXAiWEaHokERHiNmM0Gvk6Yb95hl1DpTcmkwo3NzUdQwJJTcsk/tv9tA8OuGEtQ3Xn1pupbdRN94gubE/YR1FRCQoqHByucLU4h5LScvz9fNE6azifdRFFUTCZjGicqh45Xz1I1+sOD1r5euPt7UGrFt5onFWU6yvqnUAJIZoeSUSEuM3osnJJO5NpnmEXqmfBBVDwb+1jnsOltmGxRqORS3kFlJRW4OpGg5pFutzZjs4d2pJ/uYDikjKKik/ioFbT0teLkOBAHB1U+Pn58MCfIjmboeNi7mWuFFxFpVIR0i4AfaWBCzl5dAwJpFKvQcGEu5tLvRMoIUTTI4mIELeZq8WllJdXmGfYVTtcwmRyMn+udXGm/GJ+rcNiq/uVZGQ4otdXsiX+XIOaRQL8fQm/pxNHj6fRuqUP+spKnBwd8XB3ASA1LZOw0I48NawfgEXTjtFoZOHif1smUErVE1sVpe4ESgjRNEkiIsRtxs1Vi0bjRElpOe5uLihKJYpSaf68alhszTlcLPqVtIxAo3GgvPJKg5pFVKqquWyysnPJuZiPf2ufqueBXC0h60Ie3t4ePBh9n7lG49qE4kRK+nUJVB6K8msb0c0SKCFE0yX1l0LcZgL8fS1m2L2WyWQi60IeHdoHWgyLvb5fibNWU/WckP81i+RfLiT+2/3/62B6c9fPZXMmPYv8K0WE3tWesSNunMxcm0ABqFRlKMqvc9jcKIESQjRtUiMixG3m2lqJ1LRMc61EaUlZrbUSUHu/EkX166R1DW0WqW1UTV19TaoTqOTjp+kYEmieowZ+TaBC72ovzxURws5IIiLEbai6VqL6WR/lF/PRODkSeld7Hoyu2d/j+n4lDo4XgV9rP35Ls0hto2rqWr+hCZQQoumTRESI21RDaiVq9iuxHL/7ezWLNDSBEkI0fZKICHEbq2+tRFNqFvktzTpCiKZLEhEhRJ2aWrNIQ5t1hBBNlyQiQoh6kWYRIYQ1SCIihKg3aRYRQjQ2SUSEEA0izSJCiMYktzFCCCGEsBmrJiLbtm0jKioKrVaLl5cXgwYNsubhhBBCCGFnrNY0s2HDBmJiYpg3bx59+vShsrKS5ORkax1OCCGEEHbIKolIZWUlkyZNIi4ujrFjx5qXd+nSxRqHE0IIIYSdskrTzOHDh9HpdKhUKsLDw/Hz82PAgAF11oiUl5dTWFho8RJCCCFE82WVROTMmTMAvPHGG8ycOZOtW7fi5eVF7969yc/Pv+F2sbGxeHp6ml+BgfJcAiGEEKI5a1AiMm3aNBRFuenr5MmT5qnAZ8yYwaOPPkpERAQrV65EURTWr19/w/1Pnz6dgoIC8yszM/PWvp0QQgghmrQG9RF5+eWXGTVq1E3XCQ4OJjs7G7DsE6LRaAgODubcuXM33Faj0aDRaBoSkhBCCCHsWIMSEV9fX3x9657UKiIiAo1GQ0pKCr169QJAr9eTnp5OUFBQvY9nMpkApK+IEEIIYUeqr9vV1/GbscqoGQ8PD5577jlmz55NYGAgQUFBxMXFATB06NB676eoqAhA+ooIIYQQdqioqAhPT8+brmO154jExcXh4ODAiBEjKC0tJSoqiu+++w4vL69678Pf35/jx4/TpUsXMjMz8fDwsFa4zV5hYSGBgYFSjrdAyrBxSDk2DinHxiHleOtqK0OTyURRURH+/v51bq+Y6lNvYkOFhYV4enpSUFAgfyS3QMrx1kkZNg4px8Yh5dg4pBxv3a2Wocw1I4QQQgibkURECCGEEDbT5BMRjUbD7NmzZVjvLZJyvHVSho1DyrFxSDk2DinHW3erZdjk+4gIIYQQovlq8jUiQgghhGi+JBERQgghhM1IIiKEEEIIm5FERAghhBA2Y3eJyLZt24iKikKr1eLl5cWgQYNsHZLdKi8vJywsDEVRSExMtHU4diU9PZ2xY8fSrl07tFot7du3Z/bs2VRUVNg6tCbvgw8+oG3btjg7OxMVFcXBgwdtHZJdiY2N5d5778Xd3Z2WLVsyaNAgUlJSbB2WXXv77bdRFIXJkyfbOhS7o9PpePrpp/Hx8UGr1RIaGsqhQ4catA+7SkQ2bNjAiBEjGD16NElJSezbt4+nnnrK1mHZrVdffbVej98VNZ08eRKj0ciyZcs4duwY//jHP/jwww957bXXbB1ak7Z27VpeeuklZs+ezeHDh7nnnnvo378/Fy9etHVodmPPnj1MmDCBH3/8kYSEBPR6Pf369aO4uNjWodmln376iWXLltG1a1dbh2J3Ll++TM+ePXF0dOTrr7/m+PHjLFy4sEFTuQBgshN6vd4UEBBgWr58ua1DaRa2b99u6ty5s+nYsWMmwHTkyBFbh2T33nnnHVO7du1sHUaT1r17d9OECRPM7w0Gg8nf398UGxtrw6js28WLF02Aac+ePbYOxe4UFRWZOnToYEpISDDdf//9pkmTJtk6JLsydepUU69evW55P3ZTI3L48GF0Oh0qlYrw8HD8/PwYMGAAycnJtg7N7uTk5BATE8O//vUvXFxcbB1Os1FQUIC3t7etw2iyKioq+Pnnn4mOjjYvU6lUREdHs3//fhtGZt8KCgoA5G/vN5gwYQIPP/ywxd+kqL/NmzcTGRnJ0KFDadmyJeHh4Xz88ccN3o/dJCJnzpwB4I033mDmzJls3boVLy8vevfuTX5+vo2jsx8mk4lRo0bx3HPPERkZaetwmo20tDQWL17MX//6V1uH0mRdunQJg8FAq1atLJa3atWKCxcu2Cgq+2Y0Gpk8eTI9e/bk7rvvtnU4dmXNmjUcPnyY2NhYW4dit86cOcPSpUvp0KEDO3bs4Pnnn2fixIl89tlnDdqPzRORadOmoSjKTV/V7fEAM2bM4NFHHyUiIoKVK1eiKArr16+38bewvfqW4+LFiykqKmL69Om2DrlJqm85Xkun0/Hggw8ydOhQYmJibBS5uB1NmDCB5ORk1qxZY+tQ7EpmZiaTJk3iiy++wNnZ2dbh2C2j0Ui3bt2YN28e4eHhjBs3jpiYGD788MMG7cfBSvHV28svv8yoUaNuuk5wcDDZ2dkAdOnSxbxco9EQHBzMuXPnrBmiXahvOX733Xfs37+/xpwAkZGRDB8+vMGZbHNT33KslpWVxQMPPECPHj346KOPrBydfWvRogVqtZqcnByL5Tk5ObRu3dpGUdmvF154ga1bt7J3717atGlj63Dsys8//8zFixfp1q2beZnBYGDv3r0sWbKE8vJy1Gq1DSO0D35+fhbXZIA777yTDRs2NGg/Nk9EfH198fX1rXO9iIgINBoNKSkp9OrVCwC9Xk96ejpBQUHWDrPJq285vv/++8yZM8f8Pisri/79+7N27VqioqKsGaJdqG85QlVNyAMPPGCunVOpbF7B2KQ5OTkRERHBzp07zcPujUYjO3fu5IUXXrBtcHbEZDLxt7/9jY0bN7J7927atWtn65DsTt++fTl69KjFstGjR9O5c2emTp0qSUg99ezZs8bQ8dTU1AZfk22eiNSXh4cHzz33HLNnzyYwMJCgoCDi4uIAGDp0qI2jsx9/+MMfLN67ubkB0L59e7mragCdTkfv3r0JCgpiwYIF5Obmmj+Tu/sbe+mll3jmmWeIjIyke/fuvPfeexQXFzN69Ghbh2Y3JkyYwL///W+++uor3N3dzf1rPD090Wq1No7OPri7u9foU+Pq6oqPj4/0tWmAF198kR49ejBv3jyGDRvGwYMH+eijjxpcO2w3iQhAXFwcDg4OjBgxgtLSUqKiovjuu+8aPmZZiFuUkJBAWloaaWlpNRI4k0xofUOPP/44ubm5zJo1iwsXLhAWFkZ8fHyNDqzixpYuXQpA7969LZavXLmyzmZFIRrTvffey8aNG5k+fTpvvfUW7dq147333mP48OEN2o9ikl9NIYQQQtiINGoLIYQQwmYkERFCCCGEzUgiIoQQQgibsavOqqL+ioqKyM7ONj8ITgghRMOpVCr8/Pxwd3e3dSjNliQizYzRaCQ2NpaNGzfaOhQhhGg2Bg8ezPTp0+V5QVYgiUgzExsby6ZNm5g4cSLh4eE4OjraOiQhhLBber2eI0eOsHjxYqBqmhHRuGT4bjNSWFhInz59mDhxIiNHjrR1OEII0Wx8/vnnvP/+++zatUuaaRqZ1DE1I9VPWAwPD7dxJEII0bxU/65Wz3smGo8kIs1IdcdUaY4RQojGVf27KgMAGp8kIkIIIYSwGUlEhBBCCGEzkogIIYQQwmYkERFCiNtQWVlZna+G9IfIy8ujZcuWpKenWy9oK3jiiSdYuHChrcO4rUkiIoRoFL1792by5Mm2DsOsqcXT1HTr1g2tVnvDl4uLC+fOnav3/ubOnctf/vIX2rZta1724osvMmTIECtE/9tdH9PMmTOZO3cuBQUFNozq9iaJiKiVyWTiamkFV66WcbW0gub0uBmDwdDse74bjUYyz+dwIiWdzPM5dvN9KyoqbB2CzZiMRowXczFkZGK8mIvJyufs2WefxcPDg9TUVM6ePWvx6tu3L9HR0RZJxc2UlJTwySefMHbsWIvlBw8eJDIy8pZjraysvOV9VLs+prvvvpv27duzatWqRjuGaBhJREQNBcVlnMi4xNGzORxLv8jRszmcyLhEQXGZ1Y755ZdfEhoailarxcfHh+joaIqLizEajbz11lu0adMGjUZDWFgY8fHx5u12796NoihcuXLFvCwxMRFFUcxVxJ9++il33HEHmzdvpkuXLmg0Gs6dO0d5eTlTp04lMDAQjUZDSEgIn3zyiXk/ycnJDBgwADc3N1q1asWIESO4dOmS1cqgsZxKy2TJsi+JW7SK9z5YTdyiVSxZ9iWn0jKtdsxRo0axZ88eFi1ahKIoKIrC6dOnGTt2LO3atUOr1dKpUycWLVpUY7tBgwYxd+5c/P396dSpEwA//PADYWFhODs7ExkZyaZNm1AUhcTERPO2Nzs/tcXTlJsMDOd1VGzaQvnaLyn/ciPla7+kYtMWDOd1VjvmyJEjKSsr4+jRo7Rt29b8cnNzY+/evTz77LPmdQ8cOECvXr3QarWEhYWxd+9eFEUhOTkZgO3bt6PRaPjjH/8IVCWUjo6O/PDDD8yYMQNFUcyfzZ49m9DQUFxdXWnVqhXPP/88er3efKz09HQURWHdunX86U9/QqPRsHnz5nrFAXDu3DmeeuopvLy88Pb2Zvjw4Vy+fPmmMQ0cOJA1a9ZYrazFzUkiIiwUFJeRej6P/KJSNI5q3Fyc0DiqyS8qJfV8nlWSkezsbJ588knGjBnDiRMn2L17N0OGDMFkMrFo0SIWLlzIggUL+OWXX+jfvz+PPPIIp06datAxSkpKmD9/PsuXL+fYsWO0bNmSkSNHsnr1at5//31OnDjBsmXLcHNzA+DKlSv06dOH8PBwDh06RHx8PDk5OQwbNqzRv39jOpWWySefbyb5+Gm8vTxo3y4Aby8Pko+f5pPPN1stGVm0aBH33XcfMTExZGdnk52dTZs2bWjTpg3r16/n+PHjzJo1i9dee41169ZZbLtz505SUlJISEhg69atFBYWMnDgQEJDQzl8+DB///vfmTp1qsU2dZ2f2uIJDAy0yne/VYbzOvTbd2A4m4Hi7o4qwA/F3R3D2Yyq5VZKRlq0aMGgQYNYsWKFxfJVq1bh6enJoEGDgKqEr2/fvvTu3ZsjR47w+uuvM3ToUDQaDZ07dwbg+++/JyIiwrwPBwcH9u3bB1TdGGRnZxMfH4/JZMJkMrFs2TKOHz/Op59+yoYNG1i+fLl526SkJADi4uKYNWsWx44do2/fvvWKIy0tjYiICEJCQvjxxx9JSEggLS2NKVOm3DAmgO7du3Pw4EHKy8utUNKiLjLXjDAzmUzocosorzDg4eqEoigAqBzUeLiqKCyuQJdbhIeLxvxZY8jOzqayspIhQ4YQFBQEQGhoKAALFixg6tSpPPHEEwDMnz+fXbt28d577/HBBx/U+xh6vZ5//vOf3HPPPQCkpqaybt06EhISiI6OBiA4ONi8/pIlSwgPD2fevHnmZStWrCAwMJDU1FQ6dux4a1/aCoxGI18n7Cf/ciEdQwLN58jdzYWOIYGkpmUS/+1+2gcHNPrEXZ6enjg5OeHi4kLr1q3Ny998803zv9u1a8f+/ftZt26dRULn6urK8uXLcXJyAuDDDz9EURQ+/vhjnJ2d6dKlCzqdjpiYGPM29Tk/tcXT1JiMRioPHsJYdBVVG/9f/1+5aFFp/TGez6Ly4M+o/P1QrDDZWkxMDA8++CBZWVn4+/sDsHLlSkaOHGk+HxMnTuSRRx5hzpw5AHTu3JnPPvuM8+fP4+BQdQnJyMgwbw9VM9ZmZWXh4+Nj/j9X7a233jL/OygoiOjoaFJSUszLEhMTcXV1Zf369RZNQ4MHD64zjvHjxzN+/HiLv7tXX32VKVOm3DQmf39/KioquHDhgvk3SPx+JBERZsVlegpKynBxdqiRaCiKgouzAwUlZRSX6XHTOjXace+55x769u1LaGgo/fv3p1+/fjz22GOo1WqysrLo2bOnxfo9e/Y03zXVl5OTE127djW/T0xMRK1Wc//999e6flJSErt27TLXkFzr9OnTTTIR0WXlknYmE3+/FrWeP//WPpw6nYkuK5fANq1+l5g++OADVqxYwblz5ygtLaWiooKwsDCLdUJDQ80XPYCUlBS6du2Ks7OzeVn37t0ttrHH81Mb06U8jLosVD7etZ4zlY8XRp0O06U8lJa+jX78vn37EhQUxGeffcb06dP5+eef+eWXX8zNFBkZGezatcui6QNAo9FYXMxLS0stzhfAkSNHalzwMzIyeOedd9izZw86nQ69Xk9ZWRlvv/22eZ2kpCQeeeQRiySkPnFkZGSQkJDAf//7X4tRMAaDwVwbVltMAFqtFqiqORW/P0lEhFmlwYjRaEKtrv3OS61WYSyvpNLQuJ3o1Go1CQkJ/PDDD3zzzTcsXryYGTNmkJCQUOe21Xf213amvba9uZpWq7X4oa/+4bmRq1evMnDgQObPn1/jMz8/vzrjsoWrxaWUl1fgotXU+rnWxZnyi/lcLS79XeJZs2YNr7zyCgsXLuS+++7D3d2duLg4Dhw4YLGeq6trg/dtj+enNqbSMkwVehTn2s8ZGg2m/MuYSq3TP0tRFMaMGcPKlSuZPn06K1asoEePHtx5551AVcLu5OTEXXfdZbHdiRMnLPqQtGjRgsuXL1usk5iYaHHRz83N5d5776VPnz68++67BAQEYDAYiIyMtFgvMTGRadOm1dhXXXEkJSXh7e1d4+8Lfv3/fn1M1fLz8wHw9W38ZE/UTRIRYeagVqFSKRgMRlQO6hqfGwxGVCoFhxskKrdCURR69uxJz549mTVrFkFBQezcuRN/f3/27dtnUXOxb98+8x1y9Q9HdnY2Xl5eABYdGm8kNDQUo9HInj17zE0z1+rWrRsbNmygbdu25mrfps7NVYtG40RJaTnubi41Pi8tKUPj5Iib682TsN/KyckJg8Fgfr9v3z569OjB+PHjzctOnz5d5346derEqlWrKC8vR6OpukD/9NNPFuvU5/xcH09TpGidUZwcoawcXGo5L+XlKI6OKFrnmp81ktGjRzN79my+/fZbVq9ezbvvvmv+TK1WU1lZSVlZmbnGY+fOnRw7dszigh4eHl5j1MnRo0d59NFHze+3bNmCwWBg9erV5puCJUuWoNfrzbVkhYWFpKen15i4sz5xODo6UlRUhL+/Py4uNf/+a4upWnJyMm3atKFFixb1KjPRuKSzqjBzdXbE08WZkrLKGsN1TSYTJWWVeLo44+rcuJPqHThwgHnz5nHo0CHOnTvHf/7zH3Jzc7nzzjuZMmUK8+fPZ+3ataSkpDBt2jQSExOZNGkSACEhIQQGBvLGG29w6tQptm3bVq+HE7Vt25ZnnnmGMWPGsGnTJs6ePcvu3bvNHSknTJhAfn4+Tz75JD/99BOnT59mx44djB49usle3AL8fQkJDiQr+1Kt5y/rQh4d2gcS4G+du762bdty4MAB0tPTuXTpEh06dODQoUPs2LGD1NRUXn/99RoJRW2eeuopjEYj48aN48SJE+zYsYMFCxYAmC9g9Tk/18fTFIcwKy18UAX4Y8zLr/WcGfMuowoIQGnhY7UY/P39eeihhxgzZgwGg8Gi/05ERASOjo5MmTKFM2fOsGXLFsaNGwdgkYj079+fY8eOWdSKGI1GUlJSyMrKoqCgAB8fHwoLC9m8eTOnTp3i3Xff5c033yQgIMB8Q5GUlIRarTb3EWtIHFFRUXh4eDBy5EiSkpJIS0sjPj7e4lky18dU7fvvv6dfv36NVKKioSQREWaKohDg647GSU1hcQX6SgNGkwl9pYHC4go0TmoCfN0btaMqgIeHB3v37uWhhx6iY8eOzJw5k4ULFzJgwAAmTpzISy+9xMsvv0xoaCjx8fFs3ryZDh06AFV3QatXr+bkyZN07dqV+fPnmzuz1WXp0qU89thjjB8/ns6dOxMTE0NxcTGAuSbGYDDQr18/QkNDmTx5MnfccUejd/RsLCqVigH/dx/eXh6kpmVSVFRMpcFAUVExqWmZeHt78GD0fVaL/5VXXkGtVtOlSxd8fX3p378/Q4YM4fHHHycqKoq8vDyL2pEb8fDwYMuWLSQmJhIWFsaMGTOYNWsWgPluuD7n5/p4GvJwrt+LolLh0D0SlbsbxvNZmEpKMBkMmEpKMJ7PQuXuhkP3CKt0VL3WuHHjyMrKYvjw4Ra1CX5+fqxYsYKvvvqKrl27snLlSp555hlCQkLw9vY2rxcaGkq3bt0sRkTNmTOHTz/9lICAAObMmcPAgQMZO3YsI0aMoFevXuh0OoYNG2bRZygpKYlOnTrV6G9Snzi8vb3Zvn07eXl5/PnPf6Zbt27MmDHDohP69TFB1RNmN23aZNEZWvy+FFNzelLVbe7kyZM8/fTTrFq1yjyc7bcoKC5Dl1tEQUkZRqMJlUrB08WZAF93PF2tV0UsGseptEy+TthP2plMyiv0aJwc6dA+kAej76NDSNMcwlqXL774gtGjR1NQUFBn/x57ZDivqxo9o8vCpNejODqiCgjAoXsE6jYBtg7PzGg00rt3b3r16mUxYglg27ZtTJkyheTkZKsn6zeLo6GWLl3Kxo0b+eabb266XmP9voqa7KPxW/yuPF2d8XDRUFymp9JgxEGtwtXZsdFrQoR1dAgJpH1wALqsXK4Wl+LmqiXA37fJ1uTU5vPPPyc4OJiAgACSkpKYOnUqw4YNa5ZJCIC6TQAqfz9Ml/IwlZZV9R1p4WP1mpC67N27l9zcXMLDw7l06RJxcXFkZGSwadOmGus+/PDDnDp1Cp1O1+jPbGlIHA3l6OjI4sWLbz1I8ZtJIiJqpShKow7RFb8vlUr1uw3RtYYLFy4wa9YsLly4gJ+fH0OHDmXu3Lm2DsuqFJXKKkN0b0VOTg7Tpk1Dp9PRqlUroqOjOXjwoEWzzLWsNbdPQ+NoiGtH/wjbkKaZZkSqDoUQwjrk99V67KeuVgghhBDNjiQiQgghhLAZSUSEEEIIYTOSiDQj1aMianvEuRBCiN+u+nfVnkaf2Qsp0WakepbRI0eO2DgSIYRoXqp/V+1pLiN7IcN3mxEPDw8GDx5sHhMfHh6Oo2PjPo5dCCFuJ3q9niNHjrB48WIGDx6Mu7u7rUNqdmT4bjNjNBqJjY1l48aNtg5FCCGajcGDBzN9+nRpmrECSUSaqaKiIrKzs5vkRF9CCGEvVCoVfn5+UhNiRZKICCGEEMJmpI5JCCGEEDYjiYgQQgghbEYSESGEEELYjCQiQgghhLAZSUSEEEIIYTOSiAghhBDCZiQREUIIIYTN/D9DH+fQlwn9igAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAG/CAYAAABlpLwqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9d5AkWX7fCX5ch47UOkuL1nJ6erqnR2EUBsTOgVyC5C2wIGlY2i45xILkmd0St2Zn2Ns9gmu0u4OR3DWQXBx5WAIEDjyCAEEOgJE9qrundVer0iIrtQgtXd0fv3rlnlGZ1dWi9PuYhWVkhLuHcn/v+37SiOM4RqPRaDQajeYmYN7sN6DRaDQajebuRQsRjUaj0Wg0Nw0tRDQajUaj0dw0tBDRaDQajUZz09BCRKPRaDQazU1DCxGNRqPRaDQ3DS1ENBqNRqPR3DS0ENFoNBqNRnPT0EJEo9FoNBrNTUMLEY1Go9FoNDeN6y5EFhcX+fmf/3lGR0fJZrM8+OCDvPTSS9f7ZTUajUaj0dwG2Nfz4JVKhU9+8pN87nOf4+tf/zrj4+OcPHmS4eHha9o/iiKWlpYoFosYhnE936pGo9FoNJqPiDiOaTQazMzMYJpXt3kY17Pp3d//+3+fH/7wh3z/+9//QPtfvHiR+fn5j/hdaTQajUajuREsLCwwNzd31W2uqxC57777+PKXv8zFixd59tlnmZ2d5W/9rb/F3/gbf2PH7Xu9Hr1e7/L/tVqNPXv2sLCwQKlUul5vU6PRaDQazUdIvV5nfn6earVKuVy+6rbXVYhkMhkA/t7f+3v87M/+LC+++CK//Mu/zG/8xm/wV//qX71i+1/91V/lf/gf/ocrHq/ValqIaDQajUZzm1Cv1ymXy9c0f19XIeK6Lh/72Mf40Y9+dPmx//a//W958cUXee65567YftAiohSVFiIajUaj0dw+vB8hcl2zZqanp7nvvvu2PXbvvfdy4cKFHbf3PI9SqbTtptFoNBqN5s7lugqRT37ykxw/fnzbYydOnGDv3r3X82U1Go1Go9HcJlxXIfJ3/+7f5fnnn+cf/IN/wKlTp/id3/kd/vk//+d87Wtfu54vq9FoNBqN5jbhugqRJ554gj/4gz/g3/ybf8MDDzzA//g//o/8+q//Oj/3cz93PV9Wo9FoNBrNbcJ1DVb9sLyfYBeNRqPRaDS3BrdMsKpGo9FoNBrN1dBCRKPRaDQazU1DCxGNRqPRaDQ3jeva9E6j0dwdRFHE4tI6zVaHQj7L7Mz4tkZX7/W8RqO5e9FCRKPRfChOnlrg6994jlNnFuj1+niey6ED83zli09x+ND8ez6v0WjubrQQ0Wg0H5iTpxb4zd/6I7YqdWamx8hlPdqdHm++fZql5XW+8Nkn+OZ3X9z1+V/8ha9y8MCstpZoNHcxOn1Xo9F8IKIo4p/+s3/Lm2+f5siheQzDuPxcHMccP3mBZqtNoZDj6KE9Vzx/4tQCM1NjDA+XOH324jVbS7SbR6O59Xk/87e2iGg0mg/E4tI6p84sMDM9hmEYxLEJsY1h9jEMg1Ixx5tvn+bpJx/CMAzCYAjDiDCtOoZhkM14fPO7L7Jv7zSHDsztaC0ZFCPazaPR3HnoZYRGo/lANFsder0+uawHQBiMEwQTxHFMrd6k3mjR6fYwTLGERFGBKCwCYhFZXF6j0+0xNzNOsZDDsiyKhRxHDs2zVanzJ998jiiKLr+ecgO9+fZpRoZLHNw/y8hwiTffPs1v/tYfcfLUwo3/EjQazYdGW0Q0Gs0HopDP4nku7U6PYiEHsU2z2eLEmbfZqtRpNFu0Wh3efPs0ruNQKsQoP3C90WZ9o0ohn8V1HaIoC8SYZhfDMJiZGuXk6QUWl9aZn5skiiK+/o3n2KrUt7mBlHA5cWqBP/nmcxw8MKvdNBrNbYa+YjUazQdidmacQwfmWVreII5j6s06p88usbZeIZNx8VyXoaEi6+sVXnntXZrN9uV9e/0+zWabUjFPr+fTqOcIg5HLz2dzGXp9n2arA1zpBorCImFQALhCuGg0mtsLLUQ0Gs0HwjRNvvLFpxgZLnH85AXOnrtAr9+nkM/SbHbI57M88dh9jI8Ps75R4cLCCmEY0Wi0OHHqAr2+z8ZmlRdeeot3T5znxKkFNrdqAHTaXTzXoZDPAle6gaKwRBQNXX4vg8JFo9HcPmghotFoPjCHD83zi7/wVfbMTbGxuUHf96k3+0xPPMCjDz7Bwf2zPPbwUeZmJ6jWm1y4uMr5hVX6fZ9yuYBpmQwPFXBch1qtyauvn2Bjs8rSyiaHD84zOzMObHcDAcQYQJLwNyhcNBrN7YMWIhqN5kNx+NA8H3vsHgwzwjJNwsig041ZWOixsVlldKTMU08+yPTUGM889TAH9s0yMzXOp59+lFIhT6XaxCAmn8+wVa3z7A9fxXFsvvT5Jy/Hewy6gQBUMnAcx1cIF41Gc/ugg1U1Gs2H4uSpBf7jn/6QRrOGaQwR9GOazQ71eou3TrzFkx+7n2w2g2lMUK83OXVmgbHRIUZHSjz68IMsLvrU610azRabWzUs02Rjs8qffvMFiCGXy9BsdXjkocMsLq9J/ZGJcdyMSafRYmllk5GREj/5had0oKpGcxuihYhGo/nAqGyWza0aURTQ6/l4Xg7Pc4ijiPpSi289+xK2ZTAz+TFOnVnk3MIbjI6UmZ+b4sjBjzE8BCur5wmCAMOAMIpYWd3gm9/5MX/6reeZGBvGdW08z2WoXGR6YoJGq0OvUsW0Gjx4/0F+8gu6johGc7uihYhGo7lmVFXTeqNFs9Wh2Wzz+rETtFodDDPEsmwMY5QgCIhj8H2fIIgwDBgudzEMi3anR39lg3qjTatVJgxser0+hhEThhFRKK9xcWmDjOdgWyZPP/kgnW6fpeUNhocmefrJh5ifd5mecnRlVY3mNkcLEY1Gc02oqqavvn6c8wsrdDpdTNOgUm3i+z6O4zJaKlEq7mVj8xStziZBIAXJ4hhMw8I0TEzTIIxi2p0Ola0Gtp3Fsix8P8BzSuTKRRrNFbq9PoYBm1s1VtYqeK7D5MQwGxs+Z88v8eUvPk2hoAWIRnO7o4WIRqN5T1RV0wsLK6yuVwjDkFIxz1alTrPVJopi/CAkjEJs2yOfG6HZ2by8/3B5D0OlPdQaS5iGRRgHBEFEGMUYUQSGQRSHTE3eh+vY1BrLuI6N7wdUa/Diy29j2xa2bTExepTTZy+yubVOoTCpe89oNLc5WohoNJqrcjkOpFLDDwLCMGRstEy706PRbBGGYvXw/YBut4NpWDhuZtsxxkePYNtZ9s4+SRB1uXDxxUvZLyZgQBwzNryPbKaEH7aIQ4iNGM8tM1TaRz7XIpcL8IOQTteg2VrnnRNn6ff7uveMRnObo4WIRqO5KqqqaamQ5+y5JSZGD+AHPRaXFghDG9uyCcKAQn6cMPLFyoGNaSbDS6k4RT43hmnaFHMTWJZFEITYloVhmFiWgetlcN0SQQdMK0u7U8UrZojjmDgOMU0bzzXJuBlq9SZ/9q0XcGyLSrXBzPTYNTXN02g0tx7afqnRaK6KqmpqWiZBEJLNjtPr5uh1DcZH78N1Szh2ltHhA2QyQwBkM6OMDu9PDhKDYVqASRzHRFGEaRoYpoVlWkRRTKvVwfdDwjAgCPoAOHYG0zBotcG19wHQ7fqMDJV4861TLC6vc+TQ/DU1zdNoNLcmWohoNJqroqqaRmGEbVs0mm2abSmlbgAZr4xtiyvGMi0AbNslKTkGMRGunSOTKRPHBlEUY1kWhmFgGAZhGBFGARgWUQxh6APgOFkcx8ax89QbbTa2qriew+joENV6k5HhEoZhEPSniCJ5D7r3jEZze6GFiEajuSqqqmm92WKoXKRebxJHEBs+YDI2cpCJsSNYpotpOhhGem/5J45jLCt5zjAgjsVa4bkZHMcijiLMS0LGMAw8t4DnZrFME9vK0Ov2GSoVOLhvFvPScUqFnBwfmygcuvyquveMRnP7oGNENBrNVVHN7ZaW19nYrBJFISA1QkaHD4AB/a0WI8P78dw8ANElkeHaWfywgwSkinqwbZtsxsP3A2zLYmpqjFbbodU0sUwbyzSZm3kEy7QxTYPh4RKmadDt9Tl6dD/5fJaVtSWGygUs2yKOZRgzjN7l96x7z2g0tw/aIqLRaN4T1dzugXsPYtsGlmURhiGmaeE5eSDGthwcWyb+OApxrCxzM4/j2B6lwhiZTB4D2TeKY6I4xnUd4sig1w0oFefIZcpgxDi2g21ZmKZJt9cHYhzbxvctllc2GB/L8oknHmR5ZZMolI68htmV19a9ZzSa2wptEdFoNNfE4UPz/Fd/9ausb/xHjLhAbFzEsS0wjEtxHB6WaWEaFoYBlmXj2BksywXDRrlpbFvKtQdBSL/vU6k2yWWnKRcmMEwb4ogogiD2yeeylIp5arUmmYxDFDrsmZ/iiz9xgFwu5Dd/6484v1BjfCRLJt+m1Wrr3jMazW2GFiIajeaamZ+b5Oihac6e6/D0kx9jdTlHrdElDEMM08K0XLJemXa3QkyM43ipNN4YDAloDfwAg5i+79Pr+RTz9iXRENH3+4RhQBTHdDp9Go0mszMTfPaZRzh6dBzPHebQIRPXhV/8ha/yh398juWVTZY3LuK5ju49o9HcZmghotForhnTNPniTzzC7/zea1RrFSYnhgmCTTzXlkDTOCImJo5iDGIs28U01ONgGCaeO042lyeb8TAA0zDp9n3Khk0cxUSRT3zp9cIwoN5sY61vsVWt0u9P4DoxK6urtNpSSfWnvvQ0W1t1Rscf0ZVVNZrbEC1ENBrN++K+e2f5wudcXnvjDVZXIjAgjMC1PRzHwjTAtCyiKMRzypiWRxTHEMeYhoNtO0xPTNLrNwn6FqZpY5kOlpUhiqWOiGEYZDwbMLFtk61KjX/zb/+YH79yGMe0COKzlzvy7pl5kscfPcq9R/fd7K9Go9F8ALQQ0Wg07wvThJnpcWZnPseJE02+/9xbbFY8MhmPOA6J4/hymm6xMMHE6AEc27z0uCEBrm6eQn6ESsXAsm2cS3VH4ijCMGNGhsYJAp9mu05MjOc5bG41WVpaw7ZsesEqH3v0XrJZlwsXV9iqVJmbe1i7YzSa2xBtv9RoNB8Qk0w2xsDg3qOH8DyHIOyjUnUty8RzM3hujiiKL5c3i6KYQn6WQm4Ky7bwPId8oSC1RYwQz3PIuqNY1hAGFlEYEvghnV4Tvx8xNFQgDEPOnl8kn8szPTVGrbGpK6lqNLcpWohoNJoPRBwDRpcgDBkqjzI6UmR8fALHyWLZNq7r4NguYyMHME3rcgEzA4MgiGk0O5QKefK5wiWhYhKFAf2ejx9GlAtTjI0evFQa3qXbbar+eAyVhul28jTqYmUZH3V1JVWN5jZFu2Y0Gs37xrgkCEaGbWzLwu8bxHGH0I8xDQ/bcumEMYbh4Dg5MpkMSVlV8P2QoXKe4aEiMESznsEwDfygRxhGWKaJaVlYhoHjZClkp7GszCUxE5DLTdLrufiXaohkcjG9dV1JVaO5HdFCRKPRvG+UENm/b5iM57JwcRWMCgZZcpkstuVg2Q5gXKorYl6qA2LgOjZjY8P0/S5nLyzjWBGF/BxZ07yUtiuVWw0sMGxsI4NpmpfiSCCMOjhmHssEx5YS791OU1dS1WhuU7QQ0Wg075tMBtptWFpeot3uAi69Xo1iIYdpmPiRCbGF7TiMF0fxPAeLHJ1eSDbOsbhcp9NtYdsWnmPh2h6mYWLEIaYBQRSR8UpgmPR6W1imiedJQGu/1yQMMpTLRXLZ7OVKqg/ef1BXUtVobkN0jIhGo3nfZDIQRRHf+M5LRMTMTk9RKNgEYYBpmtimhWFI35h8LotJTK/fx7VthspFxseGePjBQ5SLeQlOzWWxbYuQkEw2IzVGTAODGNN0cFyb8dERbMtgcWUR0zSZmhim2+mxvLKuK6lqNLcx2iKi0WjeN5kMbG3VOH9hg6H8XhzHY8/cKL1eHsIcUQyG2SQMQ2qNHo5tkst6ZNw81fomUxOjRJGPaQ6T8bKYho1h2mS8DNUaFPNZMhmXKDIZHR0jjiLCyKFULjI2McFIeZhmu4tj2+zdM8VX/9wndOquRnObooWIRqN532Qy0On1CXwLy7GIiTDiPLZZJsbBAkZGRmh3YmamhxgZytPtZqg3fMJImt01210MI4PrFDCwsAyHfLaEbXt0ewG5XIxhxowOleh2fe675yif+eTjfPWn9/LCCw36fg/X8ZibK7Nvn7aEaDS3K1qIaDSa943jQNZz8bwCYRBg2xExkmarmtvFETiWRblUIJfL0utJOXfLtOj3Q2zTwTRDSd01TcAEI6aQz5PP5QkCn8D3CXyHmekxnnziPqanxoljGBsbxnHA96FQuJnfhEaj+bBoIaLRaN43hgEjI2Wmp6ZYXm4zVM5cCjgzLm/T64fkclmyGe/yY7Ztkc3m2Kg0mBwvkc1E+EGAYVhAjB+EFIsFZqYmqNXFqvLwgwcYH8szMSGv0G7LseJLDWny+RvykTUazXVC2zM1Gs0HwjRNHn/kHjKeTbVWxe/3iWOIEXFh2w7D5SJqmJEyIiYT4yPkc1lW1yoU8llM0yQIQsIowjINSoUc9UaHbMbhgXv3US4Vcd3kGJ1LpUJUEdVM5oZ/dM0dQhRFLFxc5Z3j51i4uKor894ktEVEo9F8IAwDpqfGeepJi2NvrVCtVjENH8tycByXsdEinpfZtj1APpfj0598lGNvnWBjAzKui2na0pnXs8EwKBeHmJqYZGwsTxSBlxhVLltE1JxhJEYYjeaaOXlqga9/4zlOnVmg1+vjeS6HDszzlS8+pQOfbzBaiGg0mg/Fvr0jeO4QjUabxUWLMLIwDZtMxrzsPlFi4VITXiYnRpmceJJz59rU6xGVSoZuz2Rs1GV8PKbTyWMYJrYN/T5kL9Upy2YTi4hG80E5eWqB3/ytP2KrUmdmeoxc1qPd6fHm26dZWl7nF3/hq1qM3EC0a0aj0XwgHEf+5vNgGCa5nASleq6LYWwfWixr+74iUEyKxQLlcgnbdrEtm1KpQKFQJAi27+9KUVUymSQ2BLQ1RPP+iaKIr3/jObYqdY4cmqdYyGFZFsVCjiOH5tmq1HUDxRuMtohoLhNFEYtL6zRbHQr5LLMz47pAlGZXMhmxVqgYjSCQv2nrx+X2MkZyX43vaddKWlyAZMM4TvK4EjKD8SBKoGjufD6q8WlxaZ1TZxaYmR7DMAyiKEscZbDsCoZhMDM1ermB4vzc5HX4JJpBtBDRANpfqnn/ZLNQr4N9aRQJQzDN7ULENOWvEhLKNaPu74Z6blC0KCESRXJsnTFzd/BRjk/NVoder08u6xEGo0RRFoixqACQzWXorW3pBoo3EC1ENNpfqvlApEUBiEXEsra7S0xTnk9bRBSDlu+drCaD2w6KFy1E7nw+6vGpkM/ieR7t5iSZbAaIsZ3ly8932l3dQPEGo+3udzk7+UtN09P+Us17ojJZej35G0ViHVFWkLRrRlnQ00IiDK88pnLTKPGi/iq3T7ebvBYkQayaO5PrEc8xOjLOnpkn2Ko2MIwejruIYcj+qoHi4YPzuoHiDeSGCZF/+A//IYZh8Hf+zt+5US+puQYG/aUAgT9J0J8FtvtLNZo0SlwocWBZIkQGXTNRtF2ADLpbdjqmYtAColJ3d9tec2eRHp/AJAzGiGNVU+b9j08bG3DhgtS/yec7nD73Mo1GiyAMaTRanDi1oBso3gRuiGvmxRdf5J/9s3/GQw89dCNeTvM+SPtLFYYREsc2gT9LJnuR3uomp85c1EGsmh1Rlg/TlABTZRFJWzUgsXao2051QAbdN4OBryp1dzALR3NnosanbCZP4M8ABqZlAnLyXGs8RxzDmTMSBA3w9NPjzM1/Nok7WdvCcx0evP8gP/kFHRd3o7nuQqTZbPJzP/dz/It/8S/4n/6n/+l6v5zmfSL+Upd2p0exkAPAdlYIgyGiqMDGWoGFxQb/+ve+jm1ZOohVcwVKJChxkI7z2EmIpPdL35SYUZYUdV+lCUMykewUc6K58yjks2QyQ3Rao2SyYFkVDCO4/Py1xHMEAZw6JfcNA44ckb+HD81z8MCszhS8Bbju3/jXvvY1/tyf+3N84QtfuN4vpfkAzM6Mc+jAPEvLG8SpWcKyq9QaZzlxegHXnqWYn2F8dIjhoSJvvn2a3/ytP+LkqYVreg1dRvnOJR0DYlnbU26VwAjD7fEiivRpoJ5XYkUJEUiESPrYkGTraO4MBseJIAioVg3K+f0sr25immuYVuvy9tcSz9FoJCKkXIajRweDqU3m5ya59+g+5ucmtQi5SVzXS/l3f/d3eeWVV3jxxRevafter0dPRb4B9Xr9er01zSVM0+QrX3yKpeV1TpxaYGZqlGwuQ6vZ4dvfe55G0+fQ3idYWOiB4WPZCxzcP8vmlgSJHTwwe9WLV6cF39ko4eB5EiuSDlRVpN0wg7Eig8XJoihJzVWka4j0+7LCtW3I5a7f59LcWAbHiX4/gGiUTHaEIAg5fuo5Tp6NePC+g8zMjNNpd1la2bxqPMfSkqSXA8zN6S7NtzLXTf4tLCzwy7/8y/z2b/82mWvsSvVrv/ZrlMvly7f5eT1R3QgOH5rnF3/hqzxw30G2qg3eOHaKb373RdbWKvS6Lc5ceIE4cvDcEUJ/jtfeOE02475nkJhKu3vz7dOMDJc4uH+WkeHS+7aoaG5d1KXteSIYBjNhlAgZFCaQPJ52zcSxCA1lYYHtVVUhcc/o1N07g8FxYqhcolEfYW29z+ZGldHRKo8+fACAl18/zutvnmKr2uDB+w/yi//llam7UQTHjyci5NAhLUJuda6bEHn55ZdZW1vjsccew7ZtbNvm2Wef5R//43+MbduEO+Tu/cqv/Aq1Wu3ybWFBT1Q3isOH5vnb//Vf5C/+zOfJ57MEYYBlWQwPlchkMqxvncbvWwyV9pLLHGZ5tUG31981SEyXUb47UOmznpek2e5mDUnHiAzGh0BSDE0VRlMoQTIYoKotIrc/g+NEPpdlfb0Iscn4WJm1rdc5c26J+dkJvvLFp9i3Z5qD++f4u1/7K3z1pz5FEIbb3L3dLpw4kVjp7rlHu/BuB67bT/T5z3+eY8eObXvsr//1v84999zDf/ff/XdYO4S9e56Hl26zqbkq16Mk+2tvnKDd6ZLxPLJZH8N0KBWmyWbGaLRWiekzOnyQzQoQ+7sGiQ2mBceRB8QYZl+XUb6DUFaKdHXVQbGhHlekLSJRJM+FYZJx0+ttr9Carsqq9ku/pub2ZXt6rkWjNkm7dQ7HDWm0jzMz+TDtboN6o0WpmGdyfJh33j3L//rP/39sVmt0Oj1yuQwP3X+YZz7xafK5aQAmJmBk5OZ+Ns21c90u5WKxyAMPPLDtsXw+z+jo6BWPa94/1yP2Qg0KI8MlLi6uMVyeIOPNYpo+URgRxxHV2gqeB547w9RYnigcZeHi6hViaDAtOAjytNsGfrCB47bI6TLKdwRKiCixEIbbM1oG40XSDBrDlDsmCLYHFEaRPK7qlei1yp1Dkp5boN89QhRF9PwWhrlIqXCvBDs3K6xvVHj3xHmWljdYWtngjTdP4nkuhXwW13WobBU5e+ZH/B/+3Kd5+ulxPC8Jfj1zbgmAA/tmdEDqLYpeU9yGXK+S7GpQGB8dwrYtTBeIpHaDYfiEkQ8xLK8sU8jVGR35KX7v919jdWOBZvvdbWIonRbc7/ucPnucOJwhDGOCqAZsMDxc0mWUb3OUVUKJDd/fOR5kJzGihIiyoigx0+8nrp70a6RriOjU3TuDQj5LNjNCp3kEx/EwzQ3anQXGR+8BoNPZpFo/S/ukQxiF1GpNgiDAdTzCMKLXi5ifeYQojDlzfpHv/vDf86lP/XWe/f4b/Kvf/o+8+c5pur0+jmUxOlLmqScf4r/8K1/RgfK3GDdUiHz3u9+9kS93RzLoU1XVUFXsxYlTC9eUzbITSjxYtsXIcImz55bYqlxgavwRIJTB3zCJohjTsnnznefJetMMD88wMlxiq/oOP3rhDU6cPM/X/uu/yKED8zz342NUqnW63T6FQofRofsIQo+N9Zj1cJ12p/uRf0eaG8dgb5jdrB/KUjIYG6LSe9P9aNS26vRVr9HvJ/9rIXJn4NjjTI59io3NCuOjbfLFTWanH6RWa2I7VdY2zxOGIYFpks9luNBew7ZtisU8I+UjmEaJTqePl+mzsvYm3/iOxcLiCj947g3anc6lIGgDx7HpdHv8pz/9IevrFf7eL/0XWozcQmgb1W3GTi2sw2CcODY+UMnjNKqmyPLKJqPDZSrVBn7gs7z+Kt3eJjGSu2+aDtValdNnL7K6fpoLi4ucvxCysOCyudXnxy+/zf/tH/4mRw7NU2+0WF+vUMhncByDreoxup0uIyPjzEw8xp9+8wWiKNK1Ru4QlIAYFChpoTKYNaP6yFhWIkzUsdRxlIhRz+mMmZvPh71mz56FY8dgamKYnn+WE2ffoN3MMzUxTLN9mlNn3pFFj2mR8Rw63RyzU49TKpbZP/9FpiefxHGznL/4Ou+efJlao8XC4hrf/O6LtNsdwMCyTCAmCELanR6NZps33j7N17/xo8vvV489Nx/tmrnNuLIku0EUeUTRDJZVJZMNqJxb4o23pIrP+wlgVTVFLi6t8r0fvkYQhhiA7/sEYUinUyWbHSIIfCzLxjTBD31q1Q5x3GF4eA+5XJ589hynz57k9/7dN8llM8zNTtBqd+l0Y0aH7sGy1zmw72Ecy+P8+Tbf++HrvPHmSV1r5DZFiQQlQpQQGawfMji+DwafWpbsq4RJOhhVHa/fl0wdnTFzc/kwMWpxLALk3eM1Wcxs/JhuF1pNh3dPXcCxl5mZLjMyksX3fc6eX2Zq4mEs02Gr0mVu5mco5qcJgg7nL/6YenOTUiFPFEZEUQwk/kDbyrNv3ydY3zzNVvUsnbhHpVLjtWMnWFxap9vt6zpHtwBaiNxmDJZkN8022DFhMEKt5nFhIeT0uSV+9/f/jD/95vM7XlTvlW3j+yGbm1V8X2aEXHaUYm4Mw7QIo4g4DgmCCNvO0Wp1KeUnsCyHfq9HsTSFabpgFNjYvECr1eETH7uferMDWIyU9pHNHcW2awRhwOJqm3/370/T7Z9jZnr4I4t30dwcLGt7lku6Pkg6gDX9VwmRdDVVZQ1R+2Qy0vBOCRFtEbl5fJgYtSiCV1+F02dqvHHsOBvVF5gaP8L4qEOt1uDEmR/iuln+2s/9Z3zyqYf48csn+K3ffo0YA8ccxjR8splROr1Nzl14jnpjVc4vDHp9//LrjJT3cfTQlwCDOI5YXjmGAYRhRLPVYXF5g7fePct3v/fyRx5rp3n/aCFym6HcJ2++ffpyjIhpdqg03+HMaag3uhza/xT3HC2xuXVRYjZOneeX/pu/xNHDe6+6kgH4zd/6I5aW1yVY1ZC1xdjIQcLYwDE9iDoEYUQUhRiGDXFI4F9gZGgPnjuKYw4R2QblgoWbGeVHL/wnvvuDV3FdB9u2GBupcWj/ExQKw3Q7a6yunmZ68hD79z6B465jGP5HEu+iuXG4rqTcQlLmXTFYxj19P53am7aoKJdNWoioY6p9VJEzzY3lw8So+T68/DI0GhFnz59lo/oce+cfotXqcfb0SRaXjxEEIUvL6/zG//vfMVQe58I5i+WVLUzGyWYMHHuIxeVjLK28RK2xShhFTI4fJZsZplJ9EcMwOXrwixza9xlMw6HTq3Jx6RXqrdXL7yOOYrY2a3z9z37E5laNhx84dPm96rHn5qCFyG3GTiXZM1mPt945zcXFNWan9zAyvIdTp9s0mx7rm1XOnLvIr/6D/42//vM/zbeffWnbCqDV7vLjl9/izXdOk89laTRamKZBu9MlujTo9/pNxkcO02itYJkOQdgnmymTy44QE9HurLJVvYAfBOTyJSxziEzGIAjyTI49TKd/nuGhPH4QsbK2Sr3xbR598CfY2PTxvBGmpiIMw6Df3YdpdbCdi7rWyG1EJiNCxLYTi8hO5doHA1l3KmgGVzbKS++ra4fcXK6oDxQbELsYZu+q12ynAy++KBatOG6ytPo8U+P30GqZXLhYZ3n1NJmMh2NbeK7N2hr8v/7pt9nabOK60+Szk8REnL/4HOcWXiAIe5imzZ7Zj12KlYtw3SKPPvCXGBs+iOvkaDSXOXH2O/K+Jh9mafV1AKI4ZmOryvd/+CrFYp5+3+fQgQOMDOcwjFiPPTcBfVnfhqiS7MqyUTm3xNp6hfm5SWamS5w++woZby+e57J37mNU60ucOn2Cf/Tr/zvjY8M89shRDMNgc6vGqTMX2dqqsbFVo9Ptkct41JttwGV0eJat6ln6/Sat9jpjI4epVM9j2x5B0MO2pYjEcGk/G5XTRI0len6TffMPk3FHafqbzM3cT6s9QqV2CtvpMlQusLFZ5bmX/hMP3PNZhsojeO4UlrVOFO0j9EeJIw/HO33NLb41N5dMBmq1RIioeiD9vlg7lDAZFB7prJntjciS+0p4KGGiLSE3l3SMWhxbBP4UhuFjm2sAO16z1apYQuIYxseh268TR6OEYZELC2eoNyoUChZ9P6Db7TM6/CDdjs/aWodsZoZSYZJev835iz/m7MKP5HUyw4yPHsY0TTqdGpbp8KmP/02ymREc26Pfb7BZOQPExLHUQEoTBCE932cqnyPwZzh7FqDL6Ii36+fQXD+0ELlNSbewfuOtU/zu7/8Z99+7n1deP0EYZcjnPUzTI459yqUZiIssLL1GLifiYXOrwdmzDq1Wjky2Rz7fZ6siabbEYFoO5dIM+dwo65snCMMAA4ODez/NZuUcm9XT9PtNRob20epsMjZ8mGrtPJ3OFksrrzI9+QiuM4RjF8h6Y2S8Oc5f/CErq2fJ5zLYtsVnPj3GD3+0Rr8XYJrjOO5Z/N4horBAv3uUvv/ae7b41tx80kXNlOhIC4/BGBHFoDBRj6VLwadri4C4aHTq7s3jcoxaOyLjTQFgmklH3E67u+2aXVqCd96R33N+XoKMX3ipR6tZ5MLF16nX66ysvQIG2JbL7PTH2disU8zvIZ/Lks0M4YchQdCn1lgGYHL8XjJeCcv0sG2P8eGjTE7ci+fksZ0sldp5wtCn060SxxGN5iqV2vltnyOKYgI/Q+jP4hUz9Ht9Tp1ZZGT4XgzDuOJzaK4vWojcxqgW1gB/+s3nWd+osVWpk830iWMfYk8G9MgBw2R26mHiuEWt3uLUmQVCf5bxkXkw5lhd+yEABoBp0O+3sEyH4dG9uE6eXGYYy3KI4pix0QOUS9OsrB6j12/g2nnCyKdQmKbba7BZ2aDZ/BF75h4n4xXI58pks3lKhZ9iafVV6q23GRstc8+RvSwurfPu8QvMzx4iDCZwvNP4/b1EYZZ6/RCH9m/t2uJbc2ugKp3atlgz0jVDduszoxi0kChXzqArxveT57J6brhpzM6Mc2DfQc6c8ZmeirGdCqYpVoM4jlla2eTB+w8yOzPOyZNw4YL8lvfeC80mHD+xzmuvblJvnqPbrbO89jL9vo/njnJw35ewTZMoNshkRrBMhygK6XQ2MU2brFdmz+zHMQyDcmGGTHaIsaED2E6WrFfGMKDV2SKKQs5ffIG+32Jp9Q0cO8Oe2Y+ztvEuvX4dy7Q5uO/T+H6fVrtLrXGBQkHywuuNNqVibtvn0Fx/dBTOHYAKYF1cXsf3AxzHpNk+RRi1yXoz9AMoF8pkMzlMo0irOU6t1qUXnCaM2vh+xIG5r3Bwz6eIgVx2CNfJslk5w+bWGcqlWVw3TxD2qTUWLtUSsZmdeZx2t0ouO0QxP0khN04uO8TI0AE6vSaLS68TRpuEURfLzJLJDLN/zzOMlB9la6tNIZ/lK198ilKpx4WLb9LtdOn3R+n3TrCxeYFcNs/0xOfpdPRpeiujXCnppnWwsxsmfX+wM28YJp131WNK5KjUXdAZMzeTTsfk4fs+SSGf4/zC67RaGwRhSKPR4sSpBUZGSnz580/xxhsmFy7IPo8+Co0GtNsRL7+yQr11nsOHyvT983S6PqPD93LkwBcxsHHdUfK5CVxbfvhmaxXTcqnUFji0/7Pks6NMjt3LxNg9HJh7Gs8rkssM4Qcd2t06W5VznDz7LRqtFS4svsjk2D1MjB2lVJhieuJBxkeO8OiD/0dGynvw3AxLKy+zsXWRSrVO3w+oVOuXP8dPfuEpHah6g9AWkTsAFcB64tR5zpy7SKvlkstlqDcu0Gz2KJVmmJ2a4cLiO/iXuqMWcofIZALa3fP0egYZ53EmJ44wPLSXpbU3GC5l6HQrLK29ThQHlIuz5LJDhJHP+sZxhsp7ME2bqfH7CPwuhhmTyZTxvAKN5grjo0eoVM+yun6GuRkPbAtiG/AYH3mIfLdMu21y7z1JvMvpM6eI4ykcu8C+fSYP3LsPgzLPPw+PPQbDwzf7m9ZcDeWi2anB3W7/Dz4XhmIFSTe8U9aUXk9eQwuRm0O9Lq6W6elxfu4vP8y3vieW1d7aFp7r8OD9B/nSTzxFrTpPvS6/40MPweqqCJGNzSZrm6cYGelRLBocPvBxxobH6Pdb5HJj5DJDBKH0COj7LVqdCnEc0e832Tv/CWr1pUuiYoZ8fpQ4DMlmh4miANt0OX/xBda3TrK28S4Ae2afwLZchof2sVU9y+zUw0RRQD43zPL6a5y9+D1c18LzXIkFiaHT6fHwg4f5yS/oOiI3Ei1E7hAOH5rnl/6bv8Sv/t//BafPLtLr+9h2m5Fhn5npaXK5LuXCHmyrSrvTk1ofOGS9Q2xsHuPM5r/loXt/GtctMDPxCGEYsL51kimvzNLqG0RRQKdToVyew8sUabY3sEybUmEax8limS5+0CabGYY4otFeZ2T4AK32GpXKFpbZolicJwx9XGeI4SGX11/zGBneHu9Sq3epVkqMDJcpFk2CAM6ckWC3+++H6emb/U1rdkMtHtPBp4NVVAdJP66sHoaRxIakm3SrFGHd9O7GU63Cyorc378fPG+Oe+/9i9vqEY2PjfPyyyadjojFw4dFhKyvy/+O06HVPs3ExCS97jy2WcBzG5QLc0CMaXkEQZ9mew0Dg26vyWh5D9nsEJ5XJjNWxnEyEMvJZVnOpZMnptpcJpcdZmPrNMPlPbhunkJ+kij0aXcqzM88jmU6WJbN0tqrnF94DtuycF2X6alRlle3+MSTD/F//uWf043xbgJaiNxBHD28l1/9v/wN/slv/H/Z2KoxOz3G+LjU6zh/8Syjw0f42KMPc+58la3Nl2k1DRzXZ276ITxnhdfe/COymTH2zH4aDIM9Mx9nceU1ZiYfotlao1JboB902Df/FL7foe+32KqeY2LsCJbtUihMEARdrPw4luXR69eAmHanjesGGK0lysU5grCNaxVYW495/nnxHx8+LPEu88jkdOIEtFoyKd1/P7z1ltw6HThw4GZ/05pB0jEg6n91U2m86nHFTsKk399enRWSuBMlSnSw6o1lc1PEBMDBg0lNl3SMWqsFP/qR/M7DwzA1JfssLcHEhOwzOgael6HT3EMUz9LprlDMTxETYWDS7TaoNhbJeCWiyOfg3mdotTcpF+fAMHCdLN1uHdvOEBGCadDrNWg0V2m21ljbPMHk+L0YwPDQPirV88xMPkQYhdhWhmZrlbXN46ysvwFxgOs6uI5No9GmWMjxl//C59m7R690bgZaiNxhHD2yl1/+W3/lcmrv2fPLeK7DA/fNc++Rcd548yT1doex0f1UazXanVXAIJMpMTf9BAvLb/Dqm/+Gew5+BZBiZq5boFI9RxB2OX/xecZHD+M5BfK5MXq9OvXmKkPFacKgi+cWiKKAGHDsDLadxzDqOI5FGHXBqGEYDlHYplIpUS5FvP66SbcrgkOlf95zD5w6JTED1ar4mV99Vawj7TY88MDN/JY1g6SDVNMiAhIhMhgnov6mM2pU513X3e7iAZ26ezNYXYVKRe4fOrRzHZfNTXjtNfkdJyYkmLhSkX2np+X//fthY2Oc6fHPUqmaQBfPGSYmwrayGEA/WCaTKZFxCpRLs/T9DuPDBzFME8t06fsdMpkycRQRhRHV2iLEMe1OlWZnkyDsMTV+P77foXPZCuKBEXLu4vNsVs5Qb1zEc10sy8QwIJvxcFyHTz39MM889fAN/GY1abQQuQNJuzqU2bTd6fIv//c/lmJmU5OMjhTJFwpcvGhQqS0xVBrGNJvMTNxPEPZ49/TXKRVmuP/IT+PaWfbNP0Wv16BUmGFh6WVGhveRz45TKk5img6dXpNur85QeR7bcslnR2l3qxSdHLGRxbIiTFw2KsfJ58YoFUfo+S3a7TJR5PHuu2J6f+CBJCvi0CFZUdXrsLYGjz8uLpqVFdn2scf06vhWIZORVXG6gJliMCh10BKSrr4ahrK/be8sRPTvfeNQ1x6ImyXtJlMsLIj1Mo5FbPi+nAebmzAzA2NjUjvk3Dl4/XWTsZF9bGxcpN2OiAHPLRIEHeI4xrI8Rob2AzGGYZHPjmA74q4Jwj6uk5fiZQRUqucxDKjWLmLaDt1OnYfu/Qu0OxVsyyOKAjyvwGb1LBeXXmF1/XV6/d4lt5/Jnvkp5mbGCcKI2Zlxfv4vf0W7Y24iWojcoaTNplEU8U//2b8dKMu8TC5nEUZVxkfnyWUzlMsHqVZ61NuncZwM6xtnuLj8Kntnn6TbreO6eSbGjuI6eVqdTRZXXiOKH6BcmMbzivT9FpXaBcrFGVxi8tkRsYJgEYQ+URwwPnqUfKGP60CjsUUQSC2CSkUmpGYTHn4YRkbkc8zMyCS3tiYC5PHH4Y03ZPsXXoAnnth5gNTcWHYSIoNumMGaIXBlOXdV7t22rywB77o6PuRGceGCWB4Bjhy5UlwCvP02LC/Lb3PPPXLt1usiRmZnYW5O6oa8+aZsW6+3WFhsXHK/Obh2jl6/iW1naDbXGR3aTxSHOHYW4hjHydDzW7hOFgO5yDvdKs3WKr1+gzDq0+lVGc7s5f6jPw3EFPPjWKZLTMzF5Vc5cfab1OoXGB4qYncNTNPA81zCKKJYzHPk0B4dmHoLoIXIXcAVZZkjB4yQfrDJxtbbzExZNJse5dI4ET2GiochLtDvB3humfOLPyafG2Vq4n7AYGriflqdCqPD+3n31J8QBn2ymRKjI4fw/Tat9gZxNo9pjpHLFjEsIGqBCZZhgtEnClqYZouRkfhyNkStloiRBx+Effvk/Y+MyAS0sCBi5L774ORJ2e5HP4JPfGJ7fxPNjUdlzERRUk8kjkUkqmyYnWJC0uIknfprWUkXXhXE6nm66+6N4MyZJFX66NGdC9G98oq4TEFcqrWaxIRkszA6KrEkYSgWzLNnodFocf5Cg2arheMUsG2bdquGYViEQZfR4X2EUYBjZTBNE8Ow8C+5elWg0Vb1ImHUpVq9QCYzxPrWWY4e+hJB2MdzcvSDLoZh0u3XWd88yY9f+/9gWy4zk/cTRGv0+z5HDu3l4IE5tip1fv6vfIUnHrtXW0JuAbQQuQtIl2UGCMMCcZzDtTNY9jssr71GoxHT7tXxnBny2SlMI8/8zOMsLb+BYRgEUZ8LCy+wd8/TNJqrZDIlspkSD97zM9SbF9msniMI+4yPHsK2M2QykCt0CH0b18qAWQQupT1EBp1ei2Jhnrm5DLWarL5MU8SF78NLL8n9++6Tx/N5CVI9cwY2NmDPHjEd12rwve/BU0/pSepmkq6u6jjbV9AqYHWn8T4dI6JESxQlsSZq/25XhIhO3b2+nDiRfP9HjlwpQoIguTY9T67JalWuxbExeezIEbkuX3lFrtUwjFhcatL3e7hOiTj2ieM+lgWulSeOTUzTxjBMHDuDH3SxLBPHyWJgEAQ+rc4a3d4mPb+DlymTyQxx9NCXMYCsVyaMAwzDoFq/yKtv/h5b1TNMjt1DNjNENutycfk8uVyW++87wFC5QLvTpVTMaxFyi6B/hbuAy2WZOyIELKsJSL+GybFH2Nzs02iu0+qeIIovsrrxKqbhASYT4/cwXN5HFAZMTtwr2TL9BsQRrdYGI0N7GRs+yv65T9BonSKfbzA8bJDJDDE6MoJh1un5DWL6QJY4Nun5PrZZIpvJc+pUj0wmYmhIJhnXFSFSq4k599VXk9WZ64qvGqQuwfAwTF7qR/Xcc7KP5uagLFLKGgLJJDYYqArb76uJz7blvrKKpN0w/qUO77qq6vUhjuH4cfn+LWtnS0ivBz/8oYiQYlHcppWKiJCpKSiXxUVz4QI8+6yIkFYLqlWffj/AtYv4fhvbyhCHEYX8NK6buyQ4wLGzRFGI5+axLEdKrffr+H6dMKjgunmi0Gdy/B7y2VFymTKOncM0bcIo4PzF5/n2D/9nOt0t9sx+nEymjO2YrKy/SS7n8NQTDzA2Utbl229BtEXkLkBVXn3z7dMSI2L6OO4iYVCCGIbLB3FdG89dIYgXCeKQ5bUGWW+S0ZGDEMUMlR6h22vgB1VGhvZQqV1geGieenORrDdCoTDG2MgMhdIWGC2q1Sam8QizM7NsbK7T6VQwKQEOBnnCqMnmZptKpc3SMuyZLzMzU8AwJNANYGtLBsZWCx55BEqlZJA8cUJWZ44De/fC+fPS3fOhhyRyX3NzcF2xXsD2lN50QOqgCFEWEZWRoVwyysoCiRjVC9iPHiVCQMTf/v1XblOvy/UVx+IqzWZlMaCCUsfH5XEVDxKGYimxbRVrksGwAxwnT2xYeJkxiGMMw8QyDeLYAuJLjTRj4jik1a4Q06PXa1DIjxGEUCxITJnn5jFMA8s26XS3ePXN32N57XX27/k4lmkThiE9fwPDanJg/wT333OAsdHyFWXoNbcGWojcBajKq0vL65w4tcDM1CjZXIbV9TOcOv8G0xOPkM/niKNJDMPD771FvXeSZmudTm+T+ZlHsEwHjAxhGLNROUchN0a3VyMKAxwnhxXFZLPj+L08YVQl5nXmZh/E9wu4zhyG0aNW67C55WPEJhlvGIMeMTHdTpdTp1fxfZO9e3OEoQxymYwMZlEk/z/2mKQDGoaIERVQFwRiDj5xQgJZDx8WcaK58Xhe4mKBna0hcOXzqmNvury76yYiRafuXh9UzR4Qi+T8DjGbq6siMOJYrj/LEmHS7UpQ6vy8CI4XX5QFQRiKddJx5Lo1TROMGAMHx3GIQqkbEhNgmQ5+EGBfsoBggIFPv9ek290ikyliGBaeN4Yd9iC2iSI5QdqdLZqtRV469i8o5se45/CnKJcKFApZCoUNmi2TdqfEwX0z5PJZGo0WSyubunz7LYgWIncJhw8lpdRVWeZut08+l2F2tkuhkCUKRvDscYJoL+fOv81G9Q1WVl8jm7HIZWYpFefoRnWKuTFa3Sp0IsZHDuD763juNLXmIllvlDDMY1uPA31mZqBWM1la8mi2KgRhk6w3BPhABoMA143p+SaLyxU8L8PUlInniQnYMGTQ6/fF/XLvvWL+NQyJE1lfl1VZqyXxJG+/LYGs7bZsq7lxGIaIRyUmVGxIOmNm0NyfLnSm6o+ox1w3CVL1PJ26+1EThnKtgLhVdqpafOaMBJvGsQSPB4EIk0xGLI8HD8p1+txzch32eklRulpNLCfttoVj2YShh2lGxLFJGAWYpk0cg2MrH1yEYXaxjAgv0yPTyxPHMFTeA1GAbXqXAp+71NsbvPzG/06zdYG984/hOC6e62DbFUZGe/zif/kXAC6Pdyvrlctl6HWWzK2HFiJ3EYP1Rd5+9yy/8Zv/jjffPo1hGNi2xczkPUyMPkgmM8WB+Rky7muY1jKmNUarvYbjFHDdDGHkkc8ViNjANkt0ulUwQkzDwjQdsvYIr77aY26uxZNP5un125y90CPjFcDogBFDZCJhSllcJ8LvQ63mE8ce5bLEgDSbMmB2uxIncOyY3H/wQVmFjY/LJKUCV5UYWVyUKqyPPqonsBuJ6hOjRMduabsKVewsHVei9rOs7am7OxXT0nwwgkAKBoK4VHZyZ77+usR5gAiOXk+uK+WaOXpULCCvvirPqZojvi//iwiBMDQxzSJh2Jd6IaZNFIWAKVl8QBT1iOMOnh0AHpZpUixMSO0PDGJsICYIG3T9BU6d/1327NlD6M8RRhG2FVIoVnnskaPbhMZgPaXZmXFtCbkF0Zf2XYaqL3Ly1AI/fuktwjAkDCPGx8oEQcSFxbdYWj1JuXA/+dwse+c+zux0jm5/jSAwiaI8cWgTx3vYv2+CE6cWqVQ2McyQYmGeMN6imJ/FMloEQY7l5YgXX4wYm+jS6y+S8SaIoyGIYsQq4gEBBgVi2vR6SWl3x4GhIZmIajX5u7EhE1e7LSIjl5PYEdeVokm1mlhMTp6UGJMXXoCPf1zHFtwIXFcmICU81He+WyGzwcdU0CokVVrjOIkF0hkzHw2+D6dPy/3xcUm3TRNFknZbq8lvuGePXJOrqyJYSiVxxxw7Bu++K7/P1haXrZiqIJ1aPACYpoXrOoShRRiGl3pdARZk7IBs1qPVbtHrWcRxgGWO4jgxjlMGQny/ix/V6Uff5fCRMn/+q/89nuvQ7vbI5SsMD3mUivkrhEa6npLm1kULkbuQKIr4+jeeo1Jt8OTH7ue1N05SqTYp5LMMDxVY3aiwvv4sw0OTPPPkV4njHJ6zn2y2CvRYWdlgcuIApWKebCZDdnoE08xgWiG2OU2Mj2lEWBj4gcH6eo5Go4zrjBGFq9hOn9AfuzTTtAGXmD6Wkb0kdmQQLBbF9TI5mZh6LSspftZoSEGz0VExFR86JKu8RkMGz+Xl7bVG9Ir6+pLJiBBRFg7T3N5nZqesmbRrRmVspMu7x7FMaMWiTs/+KOj1xNUCkukyNLT9ed+HH/9YrImZjFglGw0RGtPTIkSKRdnmwgUuLxw8T+K5cjnZV6Vcp910Mt1EWJcqEJomFAoGhYKD70O5PMnaWgyERJF1yUprks9HjE9E3P9ATLPxVYqFMqZpptxJUzfo29NcL/Q68S4kXeBsbHSIRx9+lAN7nwZKVKpNHMtmaKjInvkSa1vP4QdniOKQXjvHxrpBvgCf+PgE2VyPvt8gkynjeS62VQRkSRvHWQyzRhT3yOX69PsWGWeWXn8e02pgu+uYZuPS9n36fhfLAsuyLseGtFoymC0tyeQ0MSGDoyp+trIiNUTU6s62xVwM4qceHZVBs9uV7VT3Vs31QWW5qDLtahK6mnsmnU3j+0kdEpU5o6wsoC0iH5ZOJxEhs7NXipBOB77/fflbKIgLptkUgTE7KwHgliWpuefPi0BptxOLZS4n12wQJJlTKh07itRvagIWmYxFuWwxNGTi+3KMtTUT27YA91KAqsnICPzsz5p8+Ut5LGMf5dIwpmly8KDuxH0nodeIdyGDBc5GhjMU8xPSeyEIMaxV1jdX+c//s89x/NQFTp05Rb9/kkL2XsbG9nFw/yOUimVsuwrGGlFoYzlFjBhiowtxDowecZgHWpRKMZ5nYpoeYThGs5HBdeqY9hoxWfp+CdvKUCrZhKF5Odre82RycpxkpaxWaCpupN8XM3KrJX1qVMM8VR2yXJb9KxX4wQ/gySdlkNV89KSFiMqASZMOVh1segfbhYgSKKaZpO7qUv4fnFZLKhODuFUGRV2lIgXI4lgESqEgrhjHkQn/4EFZELz8sgjDrS15rtuV38p1RZQoV4yybikRojBNESz5fOJ2azSS88L3ZUFRLMqi4tOfloWGOkcmJ2UM0NxZaCFyF5IucFYs5DCMAMddwrLGiaIs3Y5NMTfM0SP7+eLnP74t2CufG+f4cZNqFeK4xMTEGBcXzzA1sYeIPMRZoAuxR7fXJZfLEIYZxsfBslxcN6RaLROELt2+QxieZmTEZLg0ThR5WJYMbmolbFkyMFWr4pv2fRmIVCn4MBT3TTpuRFV8XF5OVmqeJxaUF16QmiSDfnHNh0cVINvJIgJXio9BK4kSIiptF7bXItF8MOp1EREgVo3BonBLS/DOO/I9T07K9ba4mAiSQ4ckfff4cRGFlYr81rWaiE/fl9+810t+r0G3HMg+mYwcs9uVbTc2tpfz9zyxxPzMz8hxVUCtbcs1rWO97ky0ELkLuaLAmWFIiW17HSMy2Ko67JmfptMeZ2nJZG52clvmySc+IRaHc+dM9s3fS60asbJ6gZGhEpY9SRQ4dHodHMdkZGiGbNa8LAjy+SyuG9Fs2gRBiWx2jv37LUolETcrK0l11SCQwarRkMGrXk8GvrEx2a5alYFMFT9rNiVuRPmPs1k5pmVJ3MiFCxLlf++9Ym7WfHSkM1+U9eJqQmIwpTcI5BiqSquKNdFddz84lYpYNkAKlQ02DTxxQiwlcSyFyaJIhMn4uFxDU1Mi3i9eTFyljrPdFaOsHmmhOdg5WVlBTFN+ZxXUCiJuXFesIA88INfvuXPJvjtZcDR3FlqI3IXsVuCs0+5eLvjz5S88jWGYNJuyEkqbRA1DTLVzc/D222UM7ufU2SE2Kxfx/bO4TpbhoXmmJ+fIZTOXVz+ql8zoqIllmQSBDGhnzsixDx6UAef8eRmsCgWxcriu7KeCGF1XahZksxI3oppvVSoyqLVa0qV3bk5Wda4rAiQMkyqs77wj2x05crN+hTsXJRYVu3XdTf9NN8aTWCE5B1THXb0Sfv9sbGxPv003hoxjEeSVivw/NZW4XKam5GZZ8J3vyDWqtgsC+W1dV66ffj8JNE6X9FdYllzT+bwcPwwTEeL7sk8uJxbKn/kZeS0lQgoFWSxoEXrno4XIXcpOBc62F/wRc8Hmprg+Vlflljbtep64Qubny0xNFdnY2EPf9xkfM5icLLG2ZtJoJKveel32bTa3Z0B0OvIa3a6sfvbvlwF0a0tWSZ2OmGZ7vaQCp+or4/siNup1eZ12WwbHH/1Iaorcf7+81sGD4msOgsQyoiqzPvLIDf3q73g8T34zJSzURJKeUAbdNarzrprQVAaOsojojJn3x+pqIh4OHdqeMRZFkvXSbMq1NDYmoqLZFKuIKhT40kvyG2xuJq4Yz0sCT1XszmCVXEUmIwJExYq0WslzSsyUSlIx+b77xOqiOHBAV9O9m9BC5C5msMDZTgV/RkfFZ7uwIJP2+fMyURw6lJjfx8bgk580OXGiwMWLMhhVq7Lf8LAMapWKiArfT0zuzaZsoyqnVioy8A0NyWBYKsnrqmDV9H75fCJsfF+OY5rixun3YW1NtldFzRwnKQMfhuK2WV4WwaNqjeiV14dHiQj1e6nHBlN3d8ugUSb9bFZ+R1UYS5vmr53FRbkOQM75tDVJVSj2/SQ9d2NDtpmdlUXAO+/IddLpJOJDpeaqlgpwZQ0YhWEkVpB+P+kXpawgqpv22Bj8+T8v71eJkLExuWnuLrQQucu5loI/qpx6ECQR7CdPbjedqmyVvXvhrbdk4FL1H1SsRq0mA6TrJr1k2m15TmVcdDoiIjodKSl94IAMVPW6CJNmU0RFq5W4ajxPVm1qcNvclMFPFT9rNkVo5PMSia/KWk9Pi0hqNKSr6FNP6cyMD4tpyu9pWVdaQNL/D/agUX+DIKmk2uvJLZ/XFpFrRVn54MoOuq0WPP+8fL+5nAj+pSWJBSmV5Np9+WURBdVqIhpUbJYKDk+7YuJ4uwhR1it1bXY6SdCqcquWShJnNj+fCJDBxY3m7kJ7XjXXjKrToRpjqfgRZQIGmYQ+9jEJOlPNsVZXZXDau1eEgloRqch605TBU9X98Dw55vHjYrWYn5d4jyhK9rMsWW2p6o2qpkGrJfEs2awcZ2tLBudvf1sEjmHIKrFQkONMTsogq2qNpGMbNO+fTEaE4uBklWanrJm06yAIkjReVY9Cm+nfmzNndhchGxuJCCmV5La8LNfj5KTcvv1tuVZWVxNroooBSbvalPAYTM3N5UTUgPyGyl2qgs7zebmOf/7n5TxZX5dtZ2bk/WoRcveihYjmfZPPi/VDmVBXV6XUc6eTbDM1BZ/6lAxwcSzColaTffbskdWYbSfm3kxG9leDWT4vk9D6ulhhstnE121Z8r8qJa18z7WaHGNzU4RGJiMTWL0uVpXvfS/pNDo3J5kBIO8pn5eB9XvfSwZzzftnsOmdIj1hwZVZFent1MSVDq7UXB2VWmsYSVNIxYUL0jdG1QhxHLlmJydF5McxfOMbco2urMjz1ar8Ve6xIEiEyKAVxLZF2ORySX2fRiP5LVVK7k/+JHzmM4kVJJMRAVIq3chvSnMrol0zmg/M2JhYMS5cEAFw/rxMQgcPJpkPDz4o7pU335TBqduVAWt+Pik0VizKgNXpyKBVrydVHbtdERadjjy2d69YOdbWZABrteQ47bYMiKoQWhDIcR0niUFZXk4KKD36qLx3z5OBcXg4scQ895wE0OnCSe8fFcisTPjpxncKNZGlH1NxDKaZiBTH0TVE3os4FnGtso0OH97+/FtvibhQIkRlqk1Py4Lg5Em5NZtyXSgRks3KtZXOblIWzDSeJ6I/CBI3KCRF7QoFWZR85SviBlKxK/v2Je5YjUYLEc2HwjBEHPi+mIajSAa2YlFMrsok++STMuEfPy4DXKcj26hgua2tpE+Fav+u3CvKNKxiR2ZnZd+FBdlWrdZUaqGqVaBqUqi4kXTxs1YrqbK6f7+Uvs7lZJ/NTfGV33+/LiP9ftmpzHvapL+TWyadVaPSdnUNkfcmjuV6ArlO9u/f/txLLyXNIicmEhfq7KwsBF59VcTBxob8RmGYVDZVImSwF5BCVUhVWTQqNVdt43myUPj852VbVVBtaEiEiUaTRgsRzUeC44iZVZWSbjRkkEw31pqbk//feksEQbMpA9b0dGKNUCJCCRKVWaPEy+amWElGR2XgXVqSFZyqOQKybS6XPB5Fsr1qU765mcSVPP20iKHDh0VAZTLyeltb8j47HbHoaK4NFeuhVsTpku7pbrxKlKTrTxiG7G8YSWl/z9OBqjsRRYmbMZ9P4rZAvvvnnkuql05OiiumUJBzfXJS6oM0GiLuVU0QZUlUAcPpGJ+0ZcpxkusqDJN+M+o3V+/nM5+R61xZxw4e1I0nNTujTwvNR4qKH1HFlFZW5KZMsbYNDz8souDNN2UQ6/WSsuyrqzJ45fMiAhxHJiXPSyapZlMe63QSobO4KBYUtY8ahBsN2bbfl8HTsmSbalX+fvvbEly7d68IqRMn5DgjIyJYVADgAw/c5C/2NiOK5PdS8SJKeKSLXqXdM+pxlaqt0j51DZErCcMk8yvpQCt0u1JDR/V6GRsTsTEyIjFRUQR/9mdy7qt+MdVqksGWLjC3kysmm5VbFCV9ZpTQ8Dx5P1/6kvx+Khh1py6/Gk0aLUQ014XB+JFz57bHj5RKki577pxM9qqB3fCwiJDNTRncgiBJ8W235XnVYGt9PYkdOXxYYlRUtUY1yamiWKr4kiolDfKaFy/KPtUqPPSQiBFV2XFyMhFSnY4IFu0muDaUCEmvqHfrNaO2UQGqti2/VxAkgcsaIQiS/isjI+JyUdTr8OKLSbzI0JCIkIkJcZNeuCACplJJAstVbJVyxaSF4WBAqmpUp6wg6v2oWJADB+QaUcUGdX8YzbWihYjmunEt8SP794vP+s03ZYVWr8sApqo7rq5ur66qovAdJwmwU9aR6Wl5bG1NBkZVSlpl5dRqIoDiOHG/+H5S/KzVkj4X+/YllSlnZ5PmeT/6kYgnPbBeHbU6VjEicGXQqYobUSg3gGqMpjJATPPK/ih3K/2+XEcg1o1048bVVbmGVA0W15Xze3parrVjx+Q8Xl6W71Q1lFSxVXBlQLEik5Fb2gqiREomI4Lny19OCqCBXL/akqW5VvSQqrnuqPgR5cdW8SOqR4zrSpbK448nrpiNDXn88GEZ6NRgqDq0qgqs5bIMpBsbYhGJ48Tqks0mk5myuNTrIjhUto4SNRsb4pb51rdEwExOyiBumjKQm6YMtM8+mwzcmt1RrjTY3RqSXnGr1bhtJ7VIVPaMtkLJ+a5EyPT0dhFy+rSIkChKYjdaLTlvJybg+9+XuK2LF5PaPqplgqqbk47VURiGHE/9Fqo4mQoEL5fFivilLyUWlmJRXLNahGjeD9oiorlhqPiR9XVxvQzGjwwPS+2RkyfFjKy6dE5OysS2sZFUVVXZGK4rg2qzmaTptloielRpeddNBkrVqlzFIHie3NptWc11u7LtJz4hA77risCZmZHVZBiKGPnkJ3X64W64bhIPlGZQjAw+ptJ9s9mkrbwmSY2HJGNM8dprci0FgVhJtrbk+1Pn7re/LdeG6lKtKhurANOdfguQ6yyf3zkjJpuVRcBP/qQcRxWd0/1hNB8UbRHR3HDGx8VCoibyc+dEfKigtyNHRJAoN0q1mkTdq3of+Xzin+73ZWBUvWw2N+WYnicix7ZlZQeJdcT3RbioYk3KOtLriQh69lnJmlGF1EAGd1Vk64c/TMzQmu1kMknBuXQMCCR/d0rpVfeVlQq0NaTZTETI/HwiQuJYMmM2NuT8V+m5Q0OyXasl8SLr63L9BEEi7JQo361Gi0rLVVYQ5b4xTTn+44/Dpz+dxImMj8sCQ4sQzQdFW0Q0NwXDEJGg4kdUJkCplKTzPvGEDKRvvinPVyoiKHI5edyyZDBUDdZcN3msXpdj5/MSp7K0lPS/iOMkqLVWk+OpwMhaTQb21VXZttGQ96Ea5k1OSkxJvy91Gh56KKnQqhHS9V92a3C3U9l3SFJA4zhJ371bqdeT+hvpAmBBIELY9+X+xIRYQlS59uPHZb/lZfkuu10597vd7b1iBlGpt1Ek53e6G7LKJPv85xNBo/vDaD4qtBDR3FRU/EizKT7sel1u09MiDMbHpR7BO+/IwKpcKqrC6spKknqorCPlsqzk2m35v9+X4xQKIiI8T15PuWhUV1Dl6lHpv+vr8nirJa6Ye+6RjIWJCVllNhpSOvvIEQnO0wiq++5OabtwpQUknTWjitIpt9ndmjFTqYgYhu0uj05HgqZVD5dyWc7FqSkRCi+8INfP2lrisrRt2W8nF4wi3TG510vSch1HrDCPPCLXkBIhgy4ijebDoIWI5pagUNgeP6Ii/NVK8P77ZUB+/fXE5w3irtnYkPvdbpKloawjrVZiHclmk26+uVyyfa8ng7Xvy+CqXDnttkwIqnfGpz8tK8DFxeQ1VJBrqwX33nujv7VbE2XFSBcxGySdNTPYk+ZuryGiavCAnN/KTVWpwCuvbK9e2m5L/JLnSVBqtZoU7kuf37C7K0aJPSXG1fFzOQmKfeaZ7fEhe/Zol5nmo0ULEc0txfi4mJjPn5eB9Nw5ERQHDsgg+IlPiNn5nXeSgmXj4zJoKutIs5ms7oaGZLButxNT9vi4CIe0m0ZVk6zXk34pxaI81u3K+/mzPxM3zcGDSb+bqSkRJouLcsyPfexmfnu3BumiZSoANf142g2jHk9PbCo4slBIfou7BZU2DpIxptwei4vSWDIM5TtR6bcqZV2l56oy7emicLvhOEk/H+WKUS6xYlEy2UZHk2Po/jCa64UWIppbjqvFj8zMyG1yUgbfjY0koO7QoaSnjKpDAUlmTdo6ksslsSO2LUJFuWgMIwnQUxNhvy9C5/nnZdX52GOyCl1YkOOcPSuPq1ojesUopC0i6aqd6VoUg9sq14xl3V01WxYXk6ZwR44kn/3dd+U53xdXTKsl18L4uAj1lRV5Xp3Hqnrw1c7BTCZxhanePoYh18X4uAhqlfWkysJrNNcLLUQ0tyzvFT/yyCNJnAbI/UxGetqsrYmYULEgcSz7tNsySPu+3KanxbqhXAIqbiQIkhWlqhzZ7cpx031qDhwQsbR/f1KR9dlnJetHB/HtLiSUEBnsRaNiS+62rrsXLiRZKEePJuesalzX7YqlsNUSK8XoqDRmrFZFfEMiptV5vNN3aFlJvIlyxSgrSKkkGTGq67TuD6O5UdxF6w3N7YqKHxkZkf+Xl2WV2OuJCfmZZ8SMrXrR9HpipRgd3d6jBmQQVkWf6nVZSRqGBKCqLA2VVdDvi+BQroJ8PqlY+e678Cd/IhPE4cMyAezfL/8HAXz3u3d3HQxVSG6wsqr6O9jZFWSSVL1mBt01dzKnT18pQqIoSRFvtcTF2GrJeZrNimVueTkJqFYdqFWBsp1EiEqL9v0kbR3ECjI/D5/9bCJCpqflvWgRorkRXFch8mu/9ms88cQTFItFJiYm+Jmf+RmOq77VGs37ZGJCTNYqGPLsWcliCUMRHp/5jAykmUySJbBvn6z0CoXtMQmlUlLorFaTleWePbKvOr7qBNztimWl05FJQJWXP3sWvv51mRCOHpXtDx1K4k1+8APZ7m5ElWZXPWdgu2smXVlV3VdVVdXkp4I072SOH0/cgffck1jkvvtdOd+aTRHbvZ7EI7XbUsRsYSHJCoMk02Un1G8x6IpxHBHrn/qUxD7ZdmKFLJdv1Deg0VxnIfLss8/yta99jeeff55vfOMb+L7Pl770JVqt1vV8Wc0djLI8HDiQDKwnT4oYsG0xLT/xRFLds9cTk/bEhKz8VLCd6muSzyf1QhYX5bGhoaRXCshg3+vJNq2WHFtV/1xcFFeMSuPN5ZIiamEo6ZTKdH43kc0momK3SqqDLhjLSiZMx7mzM2biWESIKmt/9Kg83mxK9osqQFYoyPcxNSWxICdPighRDR2VcNkN1URQxU0FgeyTz8t5+rnPJZ1x9+wRV8zdYonS3DpcV8Pbn/zJn2z7/1/9q3/FxMQEL7/8Mp/+9Kev50tr7nBcd3v8SK0mNxU/8ulPi8n73DlZ/fm+DLxbWzLQquqSpimDfa8nj6mmerOzEg+iLCKqroJy0TiO7NfpyASh3DhPPy2vAUkTv1dfldXu3NzN+rZuPKo3UNrFMmgRGRQlppkIF9e9c2uIKBEC8h3t2yf319fhjTeSKqaZjHwHw8OSJba5mcQzpW+7odxcKhZEPVYuS7C1csOoIHCN5mZxQz2AtUs1sUeUs3+AXq9HL+VYr6uEeI1mF1T8yNqaDNKq/sj+/eIm2bNHTNkqdsTzZNBVIkMF+KmVY6uVZG0UiyIw1OoTZEBvtZIeNSqVcmtLKsA2GrLKVO4d15XXevddES2HD9+0r+qGkhYiu1VWTdcRURVVQe7ncnemRSSKpO4MyLmrxOn58+Jm7PXknFKp564r4mRlJRHKcPXGiyrbSJ23qlhcLiev99BDibUvXadEo7lZ3LBg1SiK+Dt/5+/wyU9+kgceeGDHbX7t136Ncrl8+Tav2rVqNO/BbvEjtg0f/7hk2Ch3Tb8vFo+RkaS9OcjgrAqatdsiLlRVVtURFmSibLfFGqPqOSgRc/Ik/If/II/v2yeTzeysbH/+vFhH7gZUv5m0RSQ9eQ66ZdLZM76fFKS7kwjDRISUy4kIOXZMztVmMxEhY2MiPN58UzJqVH8k1f12N2xbbmo75foZHZW08kcekfNc9YfRIkRzK2DE8Y1JlPubf/Nv8vWvf50f/OAHzO1io97JIjI/P0+tVqNUKt2It6m5A+j3RYioM7tcFpdNHCc1GVSrdNcVk3evl3QRVdUlVS0G1Rm41RJBka5UaVmy0lQDuvLbT05KvMqhQzLJ+L78LRbF3P6JT9z5vvh33pHGa0tL8puoInOmKROjasamrFFzczIBx7Hcv+eem/0JPjqCQH5/kM8+Pi6f84UXkkrBpZJ8R2NjUthseTnpefReAkRlKcH20vkqZun++5NtDh26u+qzaG4O9Xqdcrl8TfP3DXHN/O2//bf54z/+Y773ve/tKkIAPM/Du5u7XGk+ElT8iApAVfEjMzNShn3/fimVrcpfDw/LwK1iPSARGP2+CJSNjaTWQqOR1BhRgiaTSYJY+32ZRJ57TqwqTz4plpKjR2VyjmP43vckW+FOnhCUNWRQcO3U82RwuztJpPX7UmsGxHI3MiLi4gc/kPOvXk9EyNCQnCsbG4lQgyTGYydUZpLKhlGPqU65Khh1bi7pQq3R3EpcVyESxzG/9Eu/xB/8wR/w3e9+l/3791/Pl9NotlEsyqpalc1eWpLb/v0SVLqyAm+9JYN3rSYDdaWSlIRXmTUqYFUF/Y2MJF16lXBRJeRzuaRuycZG0qfmc5+ToNr77kvM89/5jgTV3snm8bR7JS080um7O4mQO0Wg9XpinYMkkLrXExGiYpTK5aRj8enTcp6o2jXKunE10taSnawgqk7InSTuNHcW11WIfO1rX+N3fud3+MM//EOKxSIrKysAlMtlsndbEwnNTWNyMimHrdw2jiOCZGJCggFVKnA2KwJmbS0pTmbbMpj3enLb3EwKo7VaiavG9xM/v6pg2WiIO6hehy9+Udw9R4/KezBNsYw89dSdGZipBMVOAatqglUuhHRmjWneGRkznY7EBUHSrbZeF3eVEhqlkjzeasn5ubSUtBe4WkaMqkKbblKnLHZPPpl0xt2/P4mb0mhuVa5rjIixiwT/l//yX/LX/tpfe8/934+PSaO5FtJmchCz9dSUTASvvCIThCoLr4qdqVRftfJUAaqqOZhy1ahVqWmKCHHdpOS2acrrPP20vObqqkw6vZ5sl06nvFM4flzqq6TLlyvRMTws1qc4Ttxas7NiHXBdePjhxKVwO6LSykEyt1RTxrfeSjK1Mhn5vGtr4spTbQnS1qLdMM3tFqV8XkTHfffJYyMjIrI1mpvFLRMjcoPiYDWaa8Z1xV2j4keqVbnNzEjMxoUL4qMPQ7GITE6KBaTbFUGh/PG9nlhAVLde100qXaqOvVGUmNx9X4THd78rJvP775f3s7Ulx3/lFXlsauomfjkfMao42U6kJ9p0d94oSgrN3a7U6/JbQ9Kx9tQpsY7Uakn2VqEglrGVFTkf09aNndipJouygjzzTJIufejQnZdxpLmz0Z0ENHclV4sfmZ2VNFvTFEExNCQD/+amWEdMMwlKVT07bFsmlnRWjWqtrtJYTVOKVr38clL8DGTfxUVJ1Wy3pWrsnYCydOzUryRd1CzdayYMZZ/bNW6mUpFzCuR3dF0RmaoYWbGYVO09c0Z+d3WevJcVRAk1dT+fF9GhqrKqGBSN5nZDCxHNXc1u8SOPPSbm9ZdflomjWpW0y2YziQuxbZk8lRhpt2XytSwRMKoSqyqApmo81OtSO6JahS99Sd6H6yZBrK0WPPjgTfpCPkLSfXtgu+hIFzODJJbEtm/fmIaNDblB0rX2Bz9IatIMD8tnU12cl5aSZnXvRVqEqOqon/qUnDeeJ5YXHYyquV3RQkRz12OasnpV8SO+L6JgaEiyXc6cEYGismby+SS7Rlk8er1kZWtZiXXE9xNXjeru67qy/blz8Id/CD/xEzKJ3HuvpPeqlOBPfOJmfzMfjt2qq+5U4l116lVi5HZjZUWEJUj13CiCb39bfn8lYnM5sYitrIgwuRYriIoFUSKuUBALyMGD8vzevXL+aTS3M3dIkpxG8+FR8SOq70a1KhkvY2OSZjs8LPeVX35kZHspd9dNMh46HZl4lFsGZFJqt+V5VdxraQn+9E9FmAwNwQMPiMWkVpOMmts5zMrzRIgogQE71xBRmTKOc3uu6lWsEUh1315PfjsV7Kwq8y4siKjd2Lg2EQLb64KMjsIXviAipFyWc1WLEM2dgBYiGs0ApZIM8iprY2lJ6js8+qi4bEolmRQ8T4RJoZCkU6qJQblqQCwoKngwihJLiQo+XFsTE/7p03K8+++XbVotWVVfraLmrYyqYZGuCaIESHoSVmXIlWC5nVKZz5+XeB8QS0W1KoXsGg0RIqpS7IULsm2zeW1Bqen7xaJ0lP7850XsHjwo8SAazZ3CbWgE1WhuDFNTkgKp3DJnzshE8NnPSmaNacpkE0Wy4lWTj+PIZKPcMmEoz/t+UgBNuXFUiq/q0lutwic/KW6a06fFpfPss/JYJnMzv40PhuoAe7XmdyBCRFmKbpeMmdOn5TcFESGLi2JB29qS33V4WH7XSkXSc1Vq7nuR/k5GRsQaZ1lJVVaN5k5DCxGN5iqYpqxAVYXMfl9EyPCwZNi89JIIhEolqR/SaCQWElVfRLkfslkRF0qgdLuJ+6LbTZqf/cRPyOtevCj///CHsiq+3crpDMZ8DKafKmxbJvV8/vYQIsePJ3EbR4/C22+LEFlfl9/I80SQrK2JuLyWgFSFsgo99pi4CS1LzoU7pdqsRjOIFiIazTXgeeKuUTUiKhW5PfSQWEGOHRPRsbUl2zYa4l5JW0dUV1TVAVitkJUVxTTlscVF+JM/kYqrc3Myua2uSkXOBx+8vQpVGcbuZd7Tj6VTd133xr2/90scSyCzcicdPCiN66pV+Z3GxuSzqAJlymJ2rdi2HOOZZ+Q48/O3hzDTaD4MWohoNO+DUkluKkticVEef+YZWSU7jogQNVGpnjXKOgIiStIWk/RjKmBzdVXiQx5+WIIdPU+ybF5/XQIi9+69GZ/+/WNZV2bBpDNmFCrb6GpF0G42cSy/MYiY3LNHglJbLREhMzNi/VpakvNCuW2uBWUFeeIJSSnP50WE3qrfhUbzUaKFiEbzARiMHzl7VoJWDx4Ud00+L9kRnpcEniqLiIqFUE31VAG0KJL7SoyoviSHDknMiOOIW+j4cTnmvffe3O/gWlB1VQZJuyrSBc1uVWtIFCV1XgoF+e2/8x1xm6nKvJWKWMRUVsy1YttyPj31VJJKfqt+DxrN9UALEY3mA7JT/MjCgrhwgkACF9ttqapp2zJpBUHSsl31FHEc+asmL9+XSVkFwx4/LlaWJ5/cXmuk2ZQV9K3MYPquIm0VUcW6VP+eW40wFAEIkklVKMD3vy+iw/clg2VlRX7nev3aU64NQ2KGnn5asrBGR8X6pdHcbWghotF8SHaKHwH42MdkAstkZJIyTZm4Gg0RJqryqsoocZzEnK+ec5xE4HQ6smq+/35pnhZFMiE+88yta8IfrK6qGJyslQvnVssMCgIJIAYRCnEMzz8v8R+uK266ixfFlabcbNeCbUv8xxNPyHmh+8No7mZ0HLZG8xExWH9keVlqQDz+uJjyVSv4UkkmXNtOrCGQuGzSKGESBDL5fec74gpQQbLV6q1da0QJkcHPNShEVGXVW6mGSL+fiJCJCRGQr70m1o9iMQlKXV5+fyIkn4cvfhE+/nE5J44e1SJEc3ejLSIazUeMih85cyYREPv3i+A4d07Ew8ZGEswKSQaNSglVGTSQBLIGgVTqfOEFmbxUrZF6XWqNPPPMrden5VqsHOmusrdKhki3K78ViOvl3DkRHKurktVSq4lrZmvr2l0xKg330UflO9m799a1ZGk0NxItRDSa64Ayt6v4ERWQet99UmGzWJRMC9OU53q9JJDVMJIsEiVGlNUkDMUS8vbbMhnef79Mjpub4qb5xCckhuFWQvWbSZPuIpsOVr0VSpa321IJFcRi8cYbIjhWV+X/paWkE/O1ks9LIbxCQRrU3WouKI3mZqKFiEZzHVHxI7WarKhrNXHdzM9LMGuxKBPcoHUEdq4/oR7r9UTQdDpSW8Tz5DjPPSeFsEZHb8jHuyZUwOpuKLfMrZC622xKzAdIJsyPfywxP/W6pNVeuCAi5FpTcy1Lfv8HHpDffWrqur11jea2RQsRjeYGUC7LTYmRRkNW16p778aGTHD9vogMVY1VkS6RHkWJ1WR1NREje/bIRPnyy2J5mZu7OZ91kGz26jEiKpX5ZqesKrEIIhh+/GOxWvm+WDIuXBDLyLUWKCsUpEJuNivWsduxq7BGcyPQl4ZGcwOZnk7qj6jg1L17xWpQLouwqNWSipwqCHUwDkH9r9rMv/yyTHaHDkmA5ZtvSu2So0dv6MfbEdPcfRJW8TA3O2OmUpHvHqSfy4svJpaqKBIriWpu915YllhA7rlHrCjDw9fvfWs0dwJaiGg0NxjL2h4/EkVSP0JVUFWTYq8nQZOq5shuhKGIjnffFReCCmI9eVIef+yxG/fZdsLzdhci6QZvNys+ZGNDbiCxHK+8IpkxhYI8ruqFXAulklhB8nkpTKb7w2g0740WIhrNTWIwfgQkLkGVkV9eFnfNTq6aQaJIrCiq3shDD0mmx4ULEnvyyU/evPiLTGb3QmXKInKzuu6qUv0gouiNN+R7Hx6WoNRa7dqqpFoWPPKICEzdH0ajeX9oIaLR3GQG40eKxcSKMDQkfUva7USQXI1+X9KFf/xjyajZ3JTJ9jvfkayNm7FCv5oQUTjOja8hsriYuFvqdfl/c1Pex/nz1+6KGRqCz39e/t4qcTkaze2EFiIazS2Cih85c0YCN2dmJIsjlxNxsbIiYiQMr24dCQJZ5b/yiqSK2ra4GL79bfjMZ258GXVVvG0nlEXkRhczUxlHIN/r6mrSrPD8+WsrUGZZUj133z7dH0aj+TBoIaLR3EJYFhw+nBTUKhRkgnYccRdcvCgio9+/ussgiiQ+5PRpETdTUxLr8J3vSOGzGznpe97VJ2nDEKFyo6qLnj4tMR9hKFYQFY/TaokYuRZXzMiIxIJMTkqBM41G88HRQkSjuQXJZLbHj0xNiTgxDHEBLC3Jit73rx7I2u3K/t2urNqrVWld/+STNy6bQzV32w3bvnHWhOPH5fvq98USsrQk/9frYn16ryqpliX9fubmdH8YjeajQgsRjeYWplxOAldBSsWvr4ulZGVF7vf7V3fV+H4S9HrokEy6zz0nwZUzMzfkY+wqROJYJvPrXZo+jkWEgAi45WURIYYhbq9rccVMTEiczdycxPFoNJqPBi1ENJpbHMMQwTA5KfEj4+NJJ9jhYcmUaTSuHsgahmJdeecdiWloNOCllySg9eDB6/8ZrmYRud6BqmkRUq+LCFlfF2F2LQXKLAs+9Smp96L7w2g0Hz1aiGg0twmD8SMHD0pcg2mKyFhZkeeu5l5ot6Xg2fi4bPf663KMhx66vu99NyFimuKWKZWuz+tGEZw4IfdXV+VWrYob5lqyYqanRYTs36/7w2g01wstRDSa2wwVP1Ktivg4elTcDKWSBLPWale3jqj4iGJRBMm778rE/PTT1+895/Pby9Sncd3r06gvDKWoG4hw29iQz1mpvLcrxrIkGPXgQbFEaTSa64cWIhrNbcrQUFJ/BCSGIYoko2NxUWIhdrOOhKEIGd+X/S5ehG99Sybf6+F6KBQGhUgMRMQxRMRksxbw0b1wEIjlJwzlb6UiFpBK5b0DUufnpQDcwYO6P4xGcyPQl9lHRBRFLC6t02x1KOSzzM6MY+r6zprrTDp+5PRpaXZXr8sEurUlVoCrpaO2WiJaxsfl/z/7MynO9VFOwHEcE+FjmBYMxGNEccxWs8nb57vsmy4zVPjwdd77fYml8X35TjY2xErUbF59P9uGL34RjhwRkafRaG4MWoh8BJw8tcDXv/Ecp84s0Ov18TyXQwfm+coXn+Lwofmb/fY0dwGWJROoih958EEJYlUZN1ebhJWrptcTa8E3vgGf+9xHExNRa3VZXG+wttUHJoDt4twwY4LI5/xaldVakwf2TTA79sEDRtTnb7dFjGxtya3Xu/p++/eLFeTAAR2MqtHcaLQQ+ZCcPLXA//Zbf8ji0jrDw0XGRoexbZM33z7N0vI6v/gLX+XwoXltMdHcENLxI4Yh9UdsW/5fW9vdOhKGYjnodGB2VsTIpz8trp8PQhzHrFSanFmq4IcR+ayLbUO4Q+yK50HGten2At46t04h41IuZC4fp9X1CcII2zLJZxyMXZRCuy29dapVESOVinymq2XFWBZ85SvSKPBGl5jXaDSCFiIfgiiK+Ne/+3VefPltDMPg4uIatm0xMlzi4P45Nrdq/Mk3nyOKIv70Wy9oi4nmhqHiR5aWJCNmaytxU9Tru+/XaskkPjcnJeGffvragzWVaKg02ixvNFmttgiiCNc2iaIYw44YtIhghARxn27fx7ZMOn2fMysVHjk4Rb3dY3G9Qa3dJYpiTNOgnMswM1bAtqxt4qTVMrh4Uaw/CwvyeWu1q7/fQ4ekLsjs7LV9Po1Gc33QQuRD8IPn3uBbz75ETMzIUImRofsIQ4NOp8e5czaz0/fyzjttTp/+AX2/zcT4NNksdDqtKywmGs1HjWHIJBuGIkKGh8VdsbgodTR2y6zp96XfyuSkVGF94gmpPXI1as0uZ1YqrFVbNNo9wlAiQi0TAgMa7R4QMDjkGGaE6fbo9EMsM8S2LBrtHiuVJhfX6/T6IbmMjWWZhGHEaqXJwkaNrGtjmSamaeDEOQy/yMqiy8qKiK1ud/f3atvw0z8tVhDdH0ajufloIfIBiaKIb3z7BTrdHnvmJjBNi15/lYw3S7GYpdHosFmp0Wy2KZeKHDxwWLIGIsh4sH/PIZZXNvjDPz7HT//kLIZh4rqXzNQZ+as6sGo0HwYVP9LpJOLk7belpsZu1pEwFGtKqyVxI80mPPDAztsubtR569w67W6fnh8Sk+S/RIAfSnaM6frA9sAT04qwPVFEYQRhFNLu+ixtNOj1Q0p597Irxo9i+kFItx9gGgYTQxnqdYPl1Yi1iz5h32Jj/eo11++7TzKDRkev7bvTaK432m2vhcgHZnFpnaWVdQr5LH4QUciN4dglgqBGGPWIaHLq7DqdbovHH7mfWqOHa+3Hy5QwsYhjm5FyibW1FufONRkfL2HbUmXyaueg6laaFiyZjKzsdJCd5mpksxI/UqnIObO+LhVHV1d3t47UaiJgokjSX596KnkujmOWtxq8cXqVrh8QX8qLNS+l6cYAMcTEhBG4XkB74PimHeO421+80elJT51CIlriOKbe7hFEERlX3DKVLZNOw2HlQoZmw6DTvPSCO6QBOw589asiRO6yMV5zC7NTosPB/XM8+vARxseG7xphooXIB6TZ6mCZJuPjQ2xs1CgVPCwrJwNvHOA6ZRw7j1eErc0iG+t9stl1ykUYGS6SyThYjonf7OMHfUxThEQYSoS/7yc3FWxnWTKgOk7SKGy3pltKlCgrS1q4aCvL3c3wsMSQFItSc+TECTh7dvdKo/2+FAYLw5hKNeSpZwK6vs9mtc35tRqtbh/TMAjjmJjtdTriGC55aTCdK9WOZYUY1vZo0jCCarNHPxDRMVTIYBomPT/EsUwMA6obFrV+zMr5DO2Wjd9VA3UEbL8oHnpI0nKvR9E0jeaDcvLUAr/5W3/EVqXOzPQYuazH4vIGf/gfn+X3//232DM3ydjo0HvGE94JFhU9JX1ACvksmYzH7PQErWaXpZXj2JZFuxPQ60MYWhg4ZDNFSsUcxWKeIGyysrbFVtXh4P4ZbMvENLsUi/IzdLuJ+NgtuyGOLw3uYVKwKgjk5vvJNkq0KCuL48hjO52faSvLoGtIW1nuTJSLZmpKzo29e+HYMSlstpN1JIpCTp2C0pDPxbUeU4dXMQ2DIAwxTYMoircJEINLFpFLFLY2yJ05y5bxU9uOO7l4nOJUk+b49ojYGOj2fXp+QKsbMJT3iOIYI4LNdZtG1aGyVKDftrgiAPYSrgt/6S/plFzNrUcURXz9G8+xValz5NA8hmGwuVXnxMkLhGEEMfh+wPBQ8arxhLtbVI4yPjZ02wgTLUTeB2nlmct6HNw/x1vvnOGRhw5z7O3TnD23hB+EOI5Nt9vDcWyC0KZ66gT79kyTzWaIjZjltTphdIZyucCD9x/lsUdLBIGsPHu97d1UB6tAhmEiVpT4UAJkcDu1fxgmPUiUcAnDpNKlYSRiRYkX277S2qIGc8e5UrA4zkf7XWtuDOn4kVxOsmXeekvcNwmJKq5XXbpd6PUnmD+yTp+IKLqyVOmgCPn4v/vXrFvzvLpvuxAptzb41O//Gy7c9zBvffYr256LInBs6AcBW40Q0zDY2MzRqLpUl4YJ+7tHmj76KHz5y9e/q69G814MzhsAp84sc+ytBWam9hBHRaLY4My503S6PcZGy/R6PpVqgziOOXJonhOnFviTbz7HwQOzl0WFsqhsbtUoFnIElsnq2havvPYuv/8H32LP/BRjo+XbIkNTC5FrZCflOVwuYhoGFy6u0uv2yeczFPI5mq0OcRwzMzWGY9s0GkXCYIiMW8a2bVy7R7PZYW5mD4888Ay1mnlZBORy8ncnt4sSFUqspIVLWngoomi7WFH3B0VOHMsxgkAmpPTrRdGVq0llZUlbWwbdPYYht0HB4nnaR38rouJHJifFSnLsmGTaBMGVIqPfdamu2oSBwej+ZRw3INuoYft9AselUywnJ00c8/gf/S5ev0vJ2Ro4UkwuEH/Qnrdfp5/JcvITn728X6ZRw+73KPR7+G6Gre4ElW6O+mqJONxZ+Vp2xF/8Sz5zcyau6/BRlo3XaAZRY2q/v/2vsmovLa/z8qvvsry6SaPZo9XsYxg2rpeh1ZrFNoaIRkcxDBOTKnPTs9i2TSEXsVXZYnVNVgTTkyOcPL3A4tI683OTly0q5xdWqDdaLC6u0mx18IMQw4CM51Iq5Tl8cO62yNDUQuQa2MmX1+70OH7yAqvrW3Q7PdY2KpiGQb8XMDd7mOnxEYaHCoBBNtPHNEaIQoOQGMfOUy4FPPLAQ5RLE9RqIjquxXyc3kaJgEJBhEvaqqFEjLK0DAoXZX5PCxd1UaXFy6C/3zDkeVWFU1lV1HaD7h/T3NnaorZRn8e2twuWTEYe0yb168+2omGewZ59YLkwN2/y2qsmm5tX7hOFJrW1ImGzy1O1/8DepdfINeoYxISmxbl7HuLkM58nu7VBrt3EADLhYKhqTMnfuCwVDrzyPCc/9gyFepXJ428y+84xMt0WAKeHPsba+GepeTlgZxFSnF5n7z0VVttZKmctSlmPkXKWrOu8ZzE0zd2LWuClRYQaLwcXbunFnLIsK9f4oPV6a6vG68fO0ekZ2PYUUVDDsnyiKCDoh8RxyGZllUZzg7GxPL1ek2Ihi9/3aTR6rG+t0u6YLC37lMtlPKfMqdMx7RZsbNZ4592IbnuSTqtFsTCKH1yg21uCOKYV9jh1eoFDB+Z2tajcStyVQuT9BPfs5MsD6Pd9KtU6tWqDkZESEwyTybr0uj69fo1Cdoxao02j0aLXD4iiNRzbIpfLMjM1Srlc4MCBUTwvuRAGT+wgkBNeneCGIRO9uin3ibqpgNfdUM8ZxpXCRf1vmlzhJlIXJCTvRb3PdFBt+mJMb6eEjxIsKobFNBMBpt77oJUlLarU+x8ULNrK8sFRJdhr7S69fkCnH2AAnmvjlm0O3J/DvWCxerFAFFx5cjUbYxzrPEk5WKGIVBAzo5DDb7/KgbdfpZPLXxYabnRlcY/h/url+xZw+PlnmX3nDbJ+73KcyaniY7xR+jwXzUMQuhISkn4rps/+x87j5nxiDDKuTRTHnF+rcXa1Qj7jknFtyrkMs+NFyvmPoHa95pZjUFCkxy4lHtKu6cHxNgyvXoU3jRqLbVvGn3x++yIsDCNefv0Ezc4mE2NDnF9YodVpkc9liKOQWrOG7/ewLJMghM2tPpZl0u70qDda9Po+w+UpSiWJsG7WezSNHhsbbUaHoVL1qVYCgiCD52XIZi2CoM9QcRrTtAmjiFpjmbfeOcOeuUlmpka3WVRuNe46IXLy1AL/6c9+xLG3TtFqd8nnMjx4/yF+6ktP72i2Wlxa59SZBWamxzAMgzjyiCKbs+cW6HZMRodHaXd6ZDMlCvksI0OwvrHF2Y3nqNakwYdlSqR/NuuxvB5xfsHliz/xNPfdWyaKrnSdKN6rSyjIhaOOkb6g1H3Y3vX0akJmt2BWdQzYbuFIu4/SYkpl/ajH4/hKS0t6paFQbqcwTG7qPSvRko5dSbuIVBZR2oqiBonBWJY7fVF8rWXRa60uJy5u0uuHWJZB1w8JI6n5QT8g69rkhhtMWD5jEz4XThepbaasESGAwUr+fr5plXlq84+4p/7C5adNIN9uXf7fIMaIe8SG+MmtqEs5JUQADr7x4uXQ0xiDE8XHeXH0p1jPHuCy+kglxpRnV5g8UME0Lj0bQ98Pafd84ji+5F6M8WyLrUaHVq/PkblRLUZuYZRYGLRQqL9qzFPjyaCouFYxAcnix7KSEghxLOONck2nEwfSY6a6rxZR6e23KnXW1rcYKhTodg2CfpZiPodhBpiWw1CxQLfbxXE9iC1M06GQtak3WpSLIQYGpmVgGiYxYGVjXMfmxIkA145YW7fJZffi2DGWZWMaNp5TZKtymjAKME2LKPRZXm1Sq7coFLL01rZotjq7fhc3k7tKiJw8tcD/45/8DidOnZfI5EucObfIO8fP8X/6pf/iCjHSbHXo9fqXg4zCsMTaWpNKxcWxZ4ljKOULDJWnZdDP5xkpBWxs1YjCCNOUwdOyTGzbIQoD/DCk0ypx7Fi8zRqh/npekna7U8bLoDVi8H76wrmamFEXTVo8KAGwW12JtFVmJyGzk4tJZeRks4lQSFtH1EpEPb6Tiyj9ftT77vW2D0DyPSeiZXCwSFtblIBRomanWJbdUqNvZdIWjnRZ9EFLQBzHLK5L0bBizmGz3iGMIjKODQb0+iHtXsBoMUOj3SMubjB3b53MUo7VsyMQbB86qpk5vjn1CzSdET62+XXgyswZgC9c/Od8f+Jn6dl5DjVeIRttd9ckhdBM3il/gh+N/QxN98oVnONXuOfRUwSjBS7Hyl6KS2r1+oRRjOdYRLEUQYuBUt6l3uqzuN6glPO0m+Y6MygoBi0Vg9aIQUvw+0XFpaUtw4aRWELUGKD+qvtp0aEWdZ1OYhFOj0Fp8ZMewwbfb6+XxbMexsCBKGK43MOyTaKof2lBG9LNtCnmc/T8CL8f4jgulukDBoZpYhomYeQTBj5RHGEasLIWUMg3cF0P1y4Qhj05VlDFD7osLr+CH/UuzW8xnutQqdSxTAPPdSjkP3x36+vBXSNEVF+Y14+dwHUdSsU8jmPh+yH1RovXj53gt3/vT/i//sovbnPTFPJZPM+l3elRLOSoNc5w7J1zrG82KOYLGIZNu7OFYfgYhks/KOPYLr7fxXM94tjAc4uUS3NiGTHBwCCKQo4dazEyUto2eaYnUXWRqAvsvdwu6iJU+6cFjhI26rFULOG2C0z9HXSxXPl9bh9AlPtlUAjtxE7iJS0a1CChSFtH1GurgUUNNOlsovT7j6LkvalBJj1oDFpa1OCkviclXlw3GcAGXUO3ipUlbeFIl0XfyRLQ6vrU2l1yGZsgjFM1OuSDOLZJzw9o9wIMEwI/ArvL8FyH0e5Z+m9GnCo9AUbyQ/lWge+P/jRW3OfRrW8BV4aK3t98ifuarxACDlfONgYQGhanC4/wnYmfxbdHrtjmMxf/Gfe3f8yPH/4F+nFSHMSyTBzLIgxjHFs+i2lAEEaS+msY5DI2tXaXVtenkNX13a8FZdHcSUx0u9tFRHox817jQHqMUSJAxZ2lrbiDLty0FUJdd2nLclpApMWNEg/qddRrwvbxcKf3lx5P3mtsBIhiC9PMEmOAGWIYYqEzDYcoDgmjEMu0sV0bx43pWhG5nMWFi6uEfkAQBcRxiO93aLVXsGzwAxElfX+SL33+MSrP/ZATJ88AITEB7U6NTr99ubCgYUDfD3j35HkmKsM89eSDzM6Mv/cPfhO4a4TIwsVVnnvxTUzLZGy0jGUVyGX2ADA6HFOtNXjneMyPflRlYmKISrVGr9clm3HYO/cQZ85eIOsVubCwShQZuI5J3+/S7YZEcY9Od4Nmq0GMj2WZNJsdysU8xWKOmcn7yYcmYWRg2RZxFNJotrHdEuPjpcsXRNoaoS5w9Xj6Qt1ptZBeAaRXBVcTOGkLxm7xJYMCJ21NUG4aZSVJB6+ms20GL9hBEaNWILuthNRx0+9pJxGTzSZFq9LHVgNietAatMao+2r7RmNnn/FO1hYl9JR7SH0/O8WyXC8rS9rCkS6LbtoWpbx5hSUgCCXt1rJM+n5IFMc4RiLATdPAD0Lq7R5xxOUU3RiY6Zzi6PK3Odh8jR+M/wVaaYuFVaBtFokxMK6wh4jQMIh2qfwh1J1xltx9V4iQTH+N/+rMf48T94kAu5/EnMRAHMXYrkkURQRhRBBEqHdtGollMuoFBOEOJ9odjhIUO4mJbje5JgfdHbsdC5JxKX1LX/9q7EgvLNQ6b3DMSwsGtWBIX5/p46v90+9np3Fm8P2m/x90Q6v3OzhGpq/5wXi8wbEoGUtNjr21zMbWJvmcw2ZthXanRTHvERNSq9coFh0mp2a4uLTM2EiRxdUNjr1zXBYB/YB2p0mrs4ll2QSBZGIahsHqZpF68xiTk8O89Op5/CAgjmLCHQZP27JY36gSBCH3Htl3Swaqwl0kRM6cW6JaazA+OgwYZNxJspm5y8+bhk+n0+X7P1qGeJlqrU4QBsQROE4exzrE4mKMbRxl//y91OoNLLOEabnYpkEQBYRBSBSHokjjGMexKBaz2LbM0pfqTuIHEZZpk81kL1+Igxdd2mSYDlZN39Q2g7e0OVJN+mmrQfq1BlcJV2MnsaNeL32BXotlZ3DgUBexCphVx0y7UNT3MegT7vWkJ8rgwHM1K4V6f8rSkTaxpgParmZ+VdaWZnP3AXsni4uyTCnRksmIiEoLlvdbsj9t4Rh0OexkCZBiegZhGGEaBqZhEMUx1qV9oygmjGLavR7hQJ2Q0LFx4oD76s9zoHWMl4a/xEujXyI2czj+JrOtd68QITG7J9IOPlfurzLWu4AEg5hAzFfO/SPu6by1bb/A3V4kJIhiOpfcMpdXrYBjycDuOpZ8XtPAtm7NAflaSK/+lajodkXMdzo7uzx2Ir3ASd/fSUyoa1ZdE+pvOshz8Kbea7q0wE7XUtrCkLaEwJXXwKBl+Grj0KDrOC2K0uPS4DWatsAoV7LaN/050gua9GNyMxkfK3L23AVWViUuIwodavUYx3EpF+cZHSmxsRkyXLoHYgsjLjA/M0QYhpfdjZXqWYLQp1pboN5cxjQhk/F4690znDmfEdEd7Dz4xDF4nsORQ/PYts07J87xE5/92C0pRu4aIXIZQ37hXn8N0/To932azRbtTp9ev0+1foZCPsvY2DD1RotGo0PfD3Fsm3xWaoS4rkev5xPHdTJeCcOQ6o6mYWGYFrblYBo2hmkT+A6uk8Gy1VI4xu93GR/N4bkjrKx0CIIQ27bIZj0MY/tJkp5QdxMK6Ytz0GQJ2y+Y9DHVxTZ4UQ0eR63iB32v6cFKTcppS0P6Ar3mn2cXsTMocAaFzWDciRJzOwWVpbdPC45Bdrte1edTxx8MahscoNJ+7yi6enZTOnhOCaVsNnEB5XISoT/YGDFt4diJQUtAPuNQzmXYanQo5hw8x6JzqZmcihHp++EVIoQ4ppMvXY4XzYQtntn4Ax6sfY+OkcMhoOQP1guBnuvh9Xu7ipF0X16TmHuarzBx6r8niiMmglXMAWETAoFzZbUyP9y+nQlEccxKpSXBqxiMFLPkMze/Ap8SFGkx0W7L33T802CMFOwuIgZFwU4LnfR9uNLtsNPkupPLIs1OgiF9P/2/sqQOiofBazV9U8dR99V1l7b8Do4Zap/0wu5afhO1ePswbG3VOHd+Cc9zMS2Dfj/EMEz6/YAgiLAdGzAZHy0zMT7KsbdO0utJlck4hk6nQj/oEMcGpmnheUUmvBKWZWOZDtkMWIbNzNT+y52u05gGtLprjI+bfPzx+2i1Ojpr5lbgwL4ZhkoFarUW3rhLEDZZ23iN5dUNfD8gCEPCIGRouMj6Vswrx5qYpoHj2NiWSd+XtMYgCMlmPbq9Pq7j0O35ZL0pSsVZHNvBtj0ynkccRfSDEDAo5LMMD5fodfu02j0yGZcHH3iIC4uvs7FZIwhCLMtibKzMfUf3MTkxsm2yHDRNpq0YOz02OOAM8lG5B9RFPzhI7eZeGRRDgwPDbqJlcD/FTvEz1xJLM/j/Tvum/w4Kh51EkHJVpeNMBk3Q6eDg9OQyKGR2shSlV33p7029dhTb1NpDZDKQyYLjxmRyIdlMhOPFYAYYRmIJMAyD2fEirV6fRtsn6zn0g0ia10XghwHBgAgpbG0w/9arzL35KoOnUNnfpMwOBUcAH3j9i19l9t1jTJw9iRMlJ2XX9Xjr01+muLnGkVefvyxUbEIm/KUdjyfPw6P/7l/zxp/7izQnpnbdzrJMLBP8IGKt1mZ2rMjsePEKq9GHIR2flLZMqIDHwVoTOwmCQSvGoHskLSjSAsEwtrsed3JBXI3B813dT48/6XN50Pqw2+Sfvm7Sqfk7LQ4Gz+33uoZ3YnAxNejOHhwv1T6DY1d64ZcWPbtZngfHHzlOxMkzS3R6ARMT44CB7wdEYYxpmdQbHUrFAocP7CWbzVKtNun1Mniuh2OPEgQxpWIP329jGBZ+0Lm02Ivwg86l92biOTY9P7hsbY9jqNUv4gdtbNshn83TbVdYWtnEskyWljf41ndfZO+eaQ7sm2F+bvKWsY4Ycfxep+qH53/5X/4X/tE/+kesrKzw8MMP80/+yT/h4x//+HvuV6/XKZfL1Go1SqXSh3oPURTxq//gf+Mb334Bz3Mo5HOsb1RotDrSMyMIyeWGmBgbYnFpHdsqMTa6l2wmgx/4tNs9YiIcyyI2YsIgIIxioiikmJ8kmymTz42Sy5bJeEWiSEzA/SCAKBaJGkWYloHnuUCMY5nk81kMU6KoO70erm2zd880pVJ+V0vH4IUyaKnYyfWx0/l2NWtFeuIfvJB3ei79WNosO2hmHRzw3us5ZW0ZHEh28hun/9/tMwx+7p1EzuAAc7Xb4Hc++N2mjzlI+jjpIN30AK++x/QKdjBNUV4jptf3CaIYxwbj8oAZy3cYh7iORbngYdsGliWWltj08aM2sd3DtHqEpo9phvRCHz+MLguD/NYG9/zwW4wtnL1qfMcgMXDx8H0c+8J/BkC2VmV4eQErDGgOj1GZngPTpLC1wRP//nfIdNvvqxZqDCwfPMprX/7zyfcK2JYh348Blmkgw5zBY0em2T81vOOxlABQYkJZJzqd7XEVO4nKdKPKtKDYaUKERETs5JbY7VqF3Vf1u1kkdjpn0+6GQdeFOv+UxSItGHZ6TzstlHZ7XH3u9L7piX+3RUL6O1Hbq23V/+mxZtAVlH4fg9f9TqLkamLuWiy8YRjS6XQwLRPjcu5YdOkGMRFhGFEuFXEch2q1TqVegygmunTwIOhJXAjg91v0fCkFUW0sEUcRruPgODaNVp1et0YYRxgYLK2+QbdXw7YtHEfsDCNDJWqNJt1OD8e1yWUzTE6M8vTHH+Tn/8pXrlu11fczf193IfJ7v/d7/MIv/AK/8Ru/wZNPPsmv//qv8/u///scP36ciYmJq+77UQoRkPTd/+c/+R2OnzpPt9dnc7OGZVnYtsXYyAyF/B6CIKTRaFEsjJPNDOO6Lo5VII5NYiQdUmotSOBQFIUoJ7dpWJRLk1iWROO7jkMQhkRhhOM4ZDwXz3VotTr0/ADHscnlMlhGsr7s+X08N8PE+BCkhvz3Mo1e6zaK3QTBte4z+NiHWWCmj5EemGDnwWHwsZ1Ehfp/0JIx+Jrq/uCksNtxB1deg8/vJoLSr7XbYLrTex7cb/Cx7e8/xL8coKkOEhNHIkwcy8Q0rW2TkNyPiOIIgxjz0qq13esRxhGWCYYVMrZxgbG1cxT9LUr9DUr+OoWgghd1dhUOEVCZnuWtz3yF5sjYLlslFLY2ePw//B65VuOaxYj6Si7c+zBvfU561cSRQRwaEDoEvoUZu0ShRadtMl4s41oZut0kUFMFbaZdaGl33eBve63sdB5fi8UuvdAYdDmkLWRqm/R+aUGz03tPWwzS7+1qon23zzH42ODjg8fe7bGrjVUfZHa6lrHoIzSIXT5e8l2H+H5/wE2aCBGI8YOQQiFHGITU6nXCUGILxbIREwQdwkuWwzDo0+lV5fk4AAwMA9qdLYIwYHPrDFEs5lUVS5LGdR36fV9iwUwD07JkMZ7L8rHH7tuxbMVHwS0lRJ588kmeeOIJ/uk//aeAWCbm5+f5pV/6Jf7+3//7V933oxYikBQ0++Hzb3D8xDlK5QJTEyNMT47x7sktqpWWNK6zbcIoZmriCIXcvKTcxmCaNvnsMIZpEV9SsMalE0MVoTEME9uyLje/wzAYGS5hX+oMV6+1LwUKxriuTSFfRE0ccRzh+yFTk2M4jrttVawYXEmkHxtkt8eutu1Ok+FOk+DVVg7X8npXY6d9dprE3+sY7+f1BlErrcHndxMsux13p0H7Wia1D3ZlXi1ncief3HvkWF7eLIJLwdYAZhxixiH76q/z08u/sU04xEDHdli55yEuPvDoNYmQZOeY7MYa933rPzK6tbbNd1yzh3hz6DMsZY7Sdkv4Zoa+4RKaNrFhE9ouXJOEuQ0LxNxGXG3Bci37vNf9QQG+0/M7WY53cqMMLk4GLZ3v9R4Gjw/Q7/e4sLCM49gpMRJf/huGAX0/4MD+Gd559yz1Rgu/3yeMI+LULbr0t99vEYRdiCIiYqI4wjIMao1F6s3VSwIlJo5Dao2LRLsMLq7r4DgWgR9imiaOYzEyUuYv/4Uv8Lf/65/9yN0072f+vq4xIv1+n5dffplf+ZVfufyYaZp84Qtf4LnnnrueL70rhw/N80sHfpZPfPwB/td/8W8ZGxliemoUgNPnLlKpLaAKQVl2hiCYZ2X9TYjViiRPWJ4lm83Q68ZYdm7b0GcYJo5t4+QytDot+j0fy7apVAMyXhnDNJEAJAfTiIkjmzC2sS5ZPwwAw7/cywWubUJ6r22udVLbbSX/QY51tW3fz2d6P8d4r9XXh33N99rv5mORmIIVJjtP0NcoQi4fIyEybCIDTpUeobuaJRslFRtj4PWf+s+pzO59/0tPw6AzPsnbP/UXeOa3///s/XmQJOl53gn+/AqPOzIj76uyjqyjq/o+0UCDAAg0DlGERC41HI1mKGo5nNGMtLO21KyM2DGbWZrNGk0makcrmY1M2uFKpK7hSCRFkELjIAgSBNBodDe6q7u6uo6sM+8rMuMOv/ePt77yyKjM6qruussfs6isiPDwiPBw/77ne9/nfd5/BpF8Dx+D/zD+t6jkDt3c/hLccewUIdktmrfT9fZB0cEPwgcRoVsVDdl9PxYpaxQ/CK+kZqA7QhkEIbals7RoYltTDA1wRXAaXUnJOHhBe9cxRTwy5cntMq6IRnOdKNpORDy/Rbuzia7prG+eZnXjhERggFqtyfF3z951EettJSLr6+sEQcDIyPYvODIywqlTp67Z3nEcHMe5er9Wq92Wz6XrOs89/QjPPPkIJ06eY4wBNE3jwN5JLl5aot1xcL2AjOGxvPYWtVoTwzCwUiatZodCPkOk6fQXHiFCVM7d52TKTpFJ27Q7DlEYMTJUJoxsWo6F60C77VEsZrBt2WY6O4mdEaMpz3HxAo+ZmScoFlO7hjB3C292P9ZLjHd7zW6r8+u9x04XyW4i1d7Pdb19XC/asFs6pPf/vZ+nN9y80+MflFq5ke98o8Toevvo3e7DERyND1713yrmFFKzhsg4l7e/exh8pBHf9Fy0rgE1IqJmla7/ooAbDHZE3FjkJMFHxYdZiNxo2ubehQ5YYs4XRBIlR6XDoitnnmiYTDMLgGnIF01ZWaJIWi2EobfTzq9GhqMoILiaY4uu7CeF67W6HyIIi5hmBi0K6ThVKltnCYIWlmnieh6Vzdpdt36/p6pmfv3Xf51f+7VfuyPvpes6X3r5RRaX1jgzO8f46ADj44NMjA1z4dKC1GcHAWYQkE6n8H2fVqtNGIU0Wi2CIKRe/xGapolPCNIIT6IpOpm0RalUwE6laDkF/EjSMmEYcv7yApEWsWdiBMf12L//cdLZHFEUcWlhjseOHeCnf/pFdH33kjp1+6CSu97HdivzU9v3Cux6lfrqvO8uz+0Vg/V6k3wQabjZ3PsHYbf9936e3R7fbZudPv+NkJhe9H7fbrFx92t7xXW93++j4cMc9OjqX42IVNDi+fU/ZMBdunarj/gZfSu1bRcWIR/b/DavDn2ZQM/SG6G5OYTcz+mZm1nx7ySq3m0fN7Jvdf/DRit6sdu5/FH3fat0Ih8+ImPgeS6tVgfXc/H8gCiM0HQNyzQwzBSmqdNo1ImI8K8qbOUf12/juI34MU0iIbqmERERhiHNVgXXaxOFkjaNoggv6IhukQjXa9FsrcefC6g25nHcFtoVvUgUSg+bu239fluJyODgIIZhsLKyvbHVysoKo6PXltx95Stf4Vd+5Veu3q/VakxN3R5FL0ia5pd+4cu88q1XmT0/h7NaYXx8EMsyCEJRNjuOS73eotLuEEURuWyGVlv6eEBEuW8ffcURSsUxDN1E1w3QdAxd48D+Ker1Ns1mm3y+hI7YkA6XfdptF93QKeSymFo/9c2QVttlpPwshcwkv//7MtDe6kn6VmKn6MWHIUi7RTRu9H179Sq77e9GxLa75ZV7ywpV1YN6j26B427VEteLrNwu7PQ91C2MfFJ2iJWKsLMR2WxILh8S6h1qnQaa6WOYIYbp8+K//xeU11eu/2ZdSFW3PtJs1TZT+GhYXXTkuco3eK7yjR2399H5/hf+Kusj+/Aci6CTwnVNAs/CCFNokUXg6bgemLp5xS32WsK523m4k1boVmM33UHvrXvbnXQSvfevF5X4oO+jtuneZ++xuBGys5sWo/e20z6jaOfXq8+12zW82/jQjZ0Evt2v/TCIX5sinQ5YXqkCPum0hWmYRHg4bgvfN4joiAlfl3pYzkcf121u228mnSKXz1KtNvCDAMtKYxpp6s0V/MBF5J4ScfEDj2Zrg8Xl4/iBs20/slBO4Xk+uqHz6NEDd936/bYSkVQqxTPPPMO3v/1t/vJf/suARAS+/e1v87f/9t++ZnvbtrHta02KbicOzkxxYP8EC4trNJpt8jkhGt/449eYPT9HZbPG5bkVUrbFYLnE0vIG9UbcqEvTdHQjg2HkrpQNmgRRSBBCpdKkr1TA9zTCMIdhGui6hk1E4HewbYu0nabZ9NB1jVKpyMTYIIV8/uoFdj3Pj+4B4YNW492T4E4Xqvrbe1M+IddbFan3Vv/fKVKy02e+2cn5RgRv3USi+/PvdBwUWehuFrhbOfBug1Tv4Lnbdh/0/XabULoN3JS9fndjPtUYURmfZbOxAVomE/+/uysxQKMd8O6FVWzLwDK3n2SVWhu30h2q1TjxqS/yyd/9rSv3rg8deOJ73+LQG9/njb/0V2kM3Nwg9/Hf+U1KG2s3VTnjptM0902Q0X0yeR9oo2tgWwYpy2S4T8rhHS/gsX0j5DPXj4hEUVzK6zjinttoiPV/qyW37sZovQ3bdiPeat/XOx92Om92ek3v9XA94WXv9t3XyU6fZafXq8e7icFOpONGJ/HbScp3Ike9Fgi99gfdpoe9z12PAO5OBEPePTmH69UolnJ4Xh039DF0nWzOYHOzimnqbK2vUa82CQmukonoSmRE07Qrn0cjnS7Tdkw2ayJ5MHWDjtthq7a8o0B1df3UNSQEIIpCOo6LGRjs2zvOf/Kzn7vrfiK3PTXzK7/yK/z1v/7XefbZZ3n++ef5h//wH9JsNvkbf+Nv3O63vmHoun6NUOfggSkWFteYPT/Pv/6drzMxPkQ+l+Gb334Hz8sDBrpukLFLeG6breo8umaSsm0sQ4wcao2AIOgw0N9Hq1Oj1fKv1okbhsFTTx6jr5SnWq2DBv19BYqF7I4TbvdkuFOKoXdS77Vi7t3X9fZ5vQl/pwux14xI/e12T9zJuGgnp9OdoguwvalWdxO7bqvp3QyLdtLK7IbdVsTdj/eu1rqPR69LpLKo7+6s3Nv5s/f5dPpaIqE6MN8qdDurFnNxszsi6LiSm9Y1uR8CjZExlg4cZuzc6RvavwZkOy1e/D/+Oce/+DOs7jt4Q6/7iX/5v5Kr126KhITAyc986RqzC13TSKek+i0IQ1wvvGFXVU2LbfgBhj7iglGdy93kRjmpNhrbyY0qJw7D7QRnp2t+p2tYvd+HieLsNFH3kuLec3e363unRcZOj10vYqHQ65TaO17sRvZ6H7veoqL7dbcCnh/QbpZJ22WaDR/f9/ED8ckxTQPLzBMR8cjMfhaX13AchzAMiQiJQh8/6KBpEuHI5TNkMyn8IMQyfSwTiEKyRp52u4bnO1eqbCKiKCQI2qC5GLp+TQ8audY1ivkc//f/7j8nm03z/umL5HMZJsaH7gopue1E5Od//udZW1vjf/wf/0eWl5d58skn+frXv36NgPVegyInjWYbw9DJZdNUqy20aJhSUUJpxcI4xfwIIAO5ho6m61e6gFqYlomuGTiuzUC5iO+JUtnzAvpKRXy/xKlTW7Q6DlEQoRse6XSTgf4S2WxmxzDlTpP+TpO9Gix2WgnsxOSv/f7Xvk+vh0GvjXv3wNDdCVMNvt2Darfp0/W0G9240VVk7/2dVnXqO3avhrrNnBRZSKW2P7eTy2JvAzxlv95LJNRvci+g21m11nSvduttdzw6XoBl6oRhRNB1IN/+ws/g/ekrTJ08fo1Co2H2kfVr6D3aEzMKOfad/0ir9NdolMqMnjnJnrd/RLG+SahpbA2N8eMv/AxkMgyfOkGufnMCdc80eeflv7Qj0Qmj6GrfmZbjk0tbt9xV9UahafE5lblF6XhFVJQXiuPEBEeRnGZTyM1OnWh7rz+49hrsvi53IkQKOxH4naIIveaLvWNXbxuHXlO1nRY8u42B6rHecbQ3ytG7+Oomdt1p1u7Fzk5jXi/pqTdcFpfXcV1xKzZMgzBo4HgOTscTOwjDwPf7KBYMVh3vyufSMIwUYdQmigLSaZtUyqDeaJFJp9F1EyLQTZMw9Bjo30MI+L4v0SoNLNMknxsiDB3CSJ5rtTfxgyXyuSx9pTz5QpbXf3ySr33zBziOi22nmNk/xZdefvG2mZzthjvirPphcTt8RHZDGIbb0jOKGc7Nr/D3/z//Ck3TOPn+Bc5dWMYwLHw/YGLsSexUkSjyr9Z826kipcIQOhFeKAIltIiUaV7pLwC5bJpHDu9lY6NK23FEJ3KlvrvebJGxbZ58/BCDg6UPvNC6H++dSNUF3R0x6E5DdA9Mytyp2yFyp1bavY6eu63CunGzZ9hOk/VOUZTulEV3A7udml7tdtzU+/U2peuOSigiod7nLsxjtw3VZoeFtTrVluiegjCk2fbI2Abr1fY1Nu8AuC4f/99/k75GlZZRYDE7w1pqglOFZzGDNj8//w9IRXGzjghYH5+ib2kBK7r2BImA6sAQ2Y01Ul2P/8HoL7Oe2cPh2ht8rPI1zCiuImjpJse/+JfZ3LN/V3anIa6q+UyKqeESk0NFSrn0hztQDwHUOKEiMorYqP+7rpAaRXR6U1I7ucl+UOVa73O7LSi6NVnd6F1g7URyuhdQvc05uxds3fvfLXJ7vahu92tNE+r1Kv/m332NVqdBsWAThj6dToOVtTUcp4XjtjD0kE9+4jHW1ivMnl+gVCzi+gG+G6AZLlGos7axhdPx0TQDTZOeZqZlkbYzZLNpLMsi8CPqjQ6apmNZKTLpDI7jY+jSB800TTx/haeenKTcX6DjuPz5D44zPTXKkUPTZDM2rbbD4tI65f4iv/QLX/7IZOSe8RG5X3B2di4WrPYwwwP7JygV83z9W69KTxq/g+93iICFpbexzDRhFBCGPmEYYOgWk+PHME0dx3XwPBffd6+enMNDfRhmlu/+8IfkczaPH9uHpkuAOQwjMnmdCxfXmFte54UXvoDv61cHht4BoptEdKcsejUPvRUu8MHkQT3Xm4KAa3Oi3flUlT7oJgrquW7C1Pu3+zW90ZydelJ0o3tQUemQ7qhEN5FQ6aIEglIuTTFr0+x4+EGI6wXMLm5Qa7q7vibTaaEHcK7wFA2jwOvlL1C3407Wfz70n/CTq//2agdeDRhcnLv+59hY23a/oeU43/8JQOP19B7eKf8kX577R0x2zgFgRhGb0weuywqFpBoc3jPI3pG+uxIJuZ+g0qmWJY0VPyrU2KI6ZHePXWrB0/1Y7xjW3XNnN4Kj3qc3ItM9rnWPfzt9xusRnN7nuwmPIjjd9vjdZKfjWJTyh8hlA9J2iigKqQRLjAxMYloGnhdgGDr5zBTl/RaWPk8UwdTkCGEQUtmqs7i8Tn9RvsiVTOnV76FrIW1nlSBoo5kB2YyO4zjksiaW5VOrb6ARYqctwgimJstMT40DIcffPUsQ+Bw8MEUhLyXEhXyWQzNTnJmd4+t//CoH9k/csTTNQ09Ezs7O8Zu//VUqmzXGxwavMsMTJ8+xsLTK48dm+OHrJ9jckpCx6kZq6Bp+0CIiwjRSWFaGTLqPfHaIbKZILlvE8yPE3CZFIZ8nDCPy+Tzl/n4WFjfJZfN0GgOEUQqwABMijdEBqKzCv/23IZqm73iR7XRh7ZRr7b2IYOeUTfdkv1N4tPt+L3aKNuxEJHabB3qJhGFsj0r0Eole4WWCjwZNk6gBQBRFLKzXWNwQ3ZJt6Thel59HBM3NHHP2ES5lHud08WnQs9v217QK177H9d4frnH28IwU3WW2jtnHv9v3P/Bo5U/45PrvYwZNMvUq7WLfrvvN2ha5dIr+fCYhIXcB3ZqpW1WDcCNRm+5ITTeh6X1MRXd3Srn0vudOOhfP2z3aq+vgeQaF/BR+4F8p3YVSPgN5iNDQNY0gbENYJIosRodzVGt1Wq0U2axFu9Umm+4nCH2i0CeMXHzfR9Mk7Rihk830k7Ll+mu3O9QbbSZGhxgdHWBhcY16o4VlGdi2zYG94wR+llazTeBNceTAUbLpfXiuhmkto2kBmqYxPjpwxzv1PtREJAxDXvnWq1Q2axyambo6WBXyWQbKJb77g7f4o1e+j+9pZDN9RJGBacJg/z7Sdol8bpBcbhgdDU03yGb6AfEVkT2Z6Loldu+pFDomIRFRYDJYHkPTNLzQwsCCrjJFeW2A71+7gt9JC9KdjtmpWZWmbW8EpWnXilN3IxPXIxAKOxEJ09ydSNxq4WWCWwdN0yhkbYi4ej3omji8dxoZfMckdHx+MPgzNCzRR3UjFdb55MrvXY2G9OJ04XmWrEnqZp6UBl9Y+W15357t0pHHwdrrnC1+rOtRgxPllzlfeJpPLf0rDHf3qI2uwVApQxDFHYcT3P+4HVEbRUpUelpp2xSx2Ukgr6IsOzU6VH/bbTBqIYZp4vshvh9gWrkr84WOrmuEYRp0izAwsC2LfNbAzqRotzqkUmVSlniEBKHDVvUii6tvs7r+PmEQEoQhmXSKkZEBMmmbXDbD0FCagwfH2ajMY1hN2k6FMLSZntpHLh/Q6Wwyt7hOpLlMTAyiaxqa3qHbVyiTTeOsVu6oydlDTUSkKmaO8bFBNE0jigx8b5RGo8XFCy0yqUNYZWmfjiYiUztVolgYlTydacOVkwrAceqARhDqRKGPrmfRdRPNtjCCgBANz3fQdYtWu4HvOwwM5DAtHU0LgQBdD/FcF9d3efqRxygU8ttCkTuVxEbRhyMQCrsRCZXi2IlI3EvCywS3Ftm0hWHoVyNsXjtFq2ETBhq1tTytSg5SJtu5RkjO3+RnLv0D+v21HfcbovOD4S+zlYpzzy+t/XtyYeuabTNBg59c/tfYYYcTfZ9AIoaCljXAK3v+W/oW1ynnm1jpax0o0ykTx4sYKN5YlUyChxPdIuJ0Gm6FFDFOR1n8b//iDc6eW2HPxATrm23mLq+SSefJZPM4nUAi5AOjaGi4jgu6z7FHjnL6TJXLi/OYuoVlWjKPGBYdZwvV2T0IIZ22efToAYYH+1le3eDxYzP8N//lz7K0vEGj2WZtfZO3jp/h3IV5Ls4tY6cs9u/vB62DnV7BTNWv+fztVgc7Zd1Rk7OHmog0mm0cxyWbuRI3jEw0PBaX12m2m4RRB8dt4bodLCtDX3EK13PY3LpEEHiEoX91KSc/mo7jBGTSGUwjhR9CLpNlcGAAxzeFTfsRheIgvr9Jo9VhaGgcw1AukuKOV2+1GRzop3CllLc7JbITueit3NgpxdFLJB5E4WWCW4NiJkUmZdJoRjiNHIEf4TZMausF3JaQb3S6WtV49Dsr/Mzl/zclf2PbvrpTLmvpKerW9jrY2cJzPFH9sx0/Rzao8dLqvyMdNDnR9xIdoxQ/aVhsbY1Re8tjaP8KxaEGui5XkalDGEYYhnbXqmQSPLzQdVX6rfMXv/QEv/nbX+X85bcYGymjWRe4uLCBYRhks2km9xwmnV0jiiIuXnHV/is/9xz/8Rvr/Nbv/DPCUDq3W6akTj3fQdO0qyW5mbRNPptmZbXCQLnEFz/3IqZpXk2pPHJ4Ly+9+MS2Qoyx0QH+1//v73Hi5LltmQCQ1Ozi8gaPHbuzJmcPNRHJ5zLYdopW26GQz6LpDs3OBS4vvAMRLK+u4XoB5b4DZDMD2KkMKatAGIWYhnllsDNIWSnSGZsojKR189WyLB/LNNA0C9CuCInSpCwD09TIpE2arTo6WcyUhu85bNVr5LIpnn5ylNFR/aq3xE5EIhFeJrgdsIwUTq2PerWN60a0trLU1nMQ9gwXBhA4jLQv85cv/y9kw+aO+1NYSu8n1Lbv43LuyDYiEiLERQ2NmaDBc+t/hB00OdH3CaqpiW2vDz2LldMTdLY26R+vki3Kas7UdfYMl5IqmQR3Fb3u3alUCjQNw9A5dGAPpVKOer3J4vIG5XKRL37uRXRd5wuffYGR4X7mF9fQA/9qWa+u66Qsk1bbwTB0CvkMrufz2LEDfPFzO5fd7uST1dveJJNN0251rvkcdwoPNRGZGB9iZv/UNmboeh6+H4h5jB9QyI3SXxrGcT0ct0MuO0AQhKKrMMA0DIaGStQbDfzQIZsxqdeaaFHIQDlHpbLGwnKDIHCwbYP+/kEWVt6j1JfmL770BKfOXuDchTkc18NOWRw8MHXlhBq424cnwUOGIIALF6Da8PB9aNcz1DayOI0UvVoQALSIfL7Fx7yvkgqb26IfEeDYNo6dpVjbBGA9veca5cimtX2ArBX78fIFyouXr3aDSYctHt/8E4ygw/Gxl6kaY70fhOpKmXatyIFjNUolF8sO6cvf3f4ZCRLAte7d3emS8xcXsVPWNUQilUrxN//PP8v/6zf+Ba22QxBGmKZBGIZ4no9pGhx7ZD//3X/z8xw7su+mjch2am+y0+e4U3ioichOje8MXSeKIqq1Fum0Tb25jGHWCYOQzcDC0HM4nkMYemj4WLZGtjjDM88c5snHDzE02M/a+hZvHT/NuQuX0Kwqna1N0DT6Mn0Ekc7RR6au/tgvf/aZHf1LEiS4U4giWFwU+/IoiphfcthaT1FdyeJ2djkX9ZC+wSbjj6xyOvwSl154kVxlnf6ledA16gNDLO87zIG3X8M+/jq206Ft5umVpXas/vhzAMuHH+X8My+SqW4ycv4MxcoagW5QHZ3A2XOQodAguNShsXVtpMNtm5z6cZnN1SYHD4VkDifakAT3BrqjEjulS3Ya93/hr/0UAP/4n/071tY2cRwRZ2fSNi997Am+8t//4kciDDu1N7lb809iaMa1PiIXLi3h+wFTE0O88dYpogjSmRSGplFvtNF1jdGRAfZPj+MFAf/tL/8czz39yLYfsNsgTWlQWm0nIRsJ7ilsbMDaFW2p60K17vH6mwH1TZPdim41I2RkosGjz9Zpux4d18fxfLzg2qEkX1ln5o3vMXDpPF8d/78xnz1Cd3TFCtv87dN/kwiojE7w3qe/RKM8ePV5XYOUZWCb0sPJ0E2W59JsLZeob6aIgp3Lr0ol+PSnDaanob9/x00SJLgv4Lour3zrh5w6c5FSMc/nf/J59k6P3/NzyM3M3wkRuYJu4rC2vsV//Pr3qGzWuDS3xOLyBkQRrieaj317J3js6H42KjUeO3aAv/Vf/dw9f1IkSNCNRgPm5+X/USQkZGEB3j8V0LhWSH8VhhVSHKkwfWSTlGVc8dMJCcJIDP+CiF5/vHxlnZETJ/mh8yUq5iTdRESPXP7b0/8VG3sOcObFT18lIZmUNOMzDOlk7fshTcfDNHT0yCBslpk7n6Fa0/DbKa6FgWXB0aNw5AgcOCCaqgQJEtwZJM6qHwLbQ2cwNjLAK996Fc/3qdVbuK7L2OgAh2b2SBfelcpdEfUkSPBR4Dhw8eL2/hvtNrz5Jqyvy/M7QoOU7TM4tUXfxDop08LQNbwgREO63GoaRF4AQYip63hBSAQ0y4O8/+QXqb83CN72KEuIyff+01+m3dd/tYSrkLZA00hZBhoaURTR8Xyi6EoDsKzJwLCPnXK4NGtTb7g0q4qM6KhIjufBO+/A3Jz4QwwPw/j4rT2eCRIk+OhIiMgu6M6fvXfqAq+/+R5r61s4rsdmtXHXRD0JEnwYBAGcPx8b2+m6/P/sWRGobm0pcqLDtphGhK5r2DmP4T2bDE7UiCKTMAJL17A1A8cPMHWd/v40jZZLLpNipC/H+eUt1rakksZppwnYKd2j0ywNoGuxHXwQRZi6huNeab4XhXh+iH6FnBSzKXQTxqYkZz5/KYVpuFQrqWv2H0VQqcA3vgGHD0t0ZM8eyGZJkCDBPYKEiFwHKkoyNTnC53/y+XtC1JMgwc0giiTl0mjIfU2TSXh1FX78Y6jV4ueubBH/zwjR0CmUPAb3rFMabmKZBoVsinrLvUoUTEOj7frouk4+azM+UGBxo46GRiZlEUQhnmMShTvrObTAJJUKSBlCaoIwwjI1bFOn4wW4nrCnfCbFQDGDnZJhS9dhfMoFDeYvG+QyKdbXJM3Ui04HTpyQdNTHPy7GVft375eXIEGCO4iEiNwgdqrFTpDgXsb6utwUhoZEmPrGGzIh12riAHktDFJ2QBjq5EsOew5voWcbZO0UpZyNnTJJmQa1lovj+YRhiB9GFLIpDoz3s7jewHED+gs2YRTSdnz00GTHEmBAiyyytk4QRRRSKSCi44ZkbZNiysQydFqOR18ujWVtJzOaDkNjbRzXIhVmsFMGm5sS4elFEMDmJnz72zAzI999eBjK5Q95gBMkSHBLkBCRBAkeMNTrEgVRGBoS0nHunGgmGg2oVnd+rXKF1HWD8amIFz9hki4UOLfokbOtq0TATpkMWSauH0jb8iDk8OQAaBrVVods2kTXdYpZG9cLcRydXdrPELo2QdTENHTKhQymqVNtOOwf66e/kCFrm5y6vEGl3qZo6tc4QbYdn5kZC8MxyWfFMTiblZLka94rhFYLTp6U5194QaJDiZg1QYK7h4SIJEjwgMBxRO+hUCzKhLy8LGmJpSWZhDudnV9v25LKMQyYnIQvfEGjvz9FFFls1TvXEgENLFOn7fgMFLLkMimqTeeK47BEP+yUScHOQbj7LN/YtBmddChmJdri+QEpy6C/kLnaFXhiqEDTcak1XbJpE8PQCYKQVsfHThlMDhcoZuVzpVJCuvbtg8uXY11MN3xfoiN/+qeyHUChABMT126bIEGC24uEiCRIcJ8jCGTiVY0QLQumpoSUXL4M774rlTGxIPVa5HJSZZJOw/Q0vPyyTMwg1tIfRARUTxfTkK6iQRCimxI9cds2emRKY8kd3jv0MgyW/CuNJyNaHZ9yYXuzulIuzaHJARbW6lRbHULHR9c1yoUME0OFq1buR47A6dPx3/3746qZnY5bqwVnzghZe/55iSZNTd2azq4JEiS4MSREJEGC+xRRJFqP5pUWL5omq/tKRYjJu++KJqTdjrfphaYJ4XBdyOfl9Z/9LGR63NFvlAjk0halbFqiJzmJntS2jG3donvRaRlEgO8H1xCb3s9QzNo0Ox5+EGIaOrm0dc12hw8LuTh6FN5/X4jVysrOuhHloVKtwve+JyQEJCp04EAiZk2Q4E4gISIJEtyHWFsTV1SFqSmZNM+fl8dPnpQIx+bmzqkJEOJhmjIRl0oy8X7qU5La2Ak3QgR2ip50OjpBIHqOuCrHQLXvdR2dRsvdkdj0QtO0q+ma6+HQISFjjz4qx2J0VFJPKys7b+95cYnz6io8+6yQmUTMmiDB7UdCRBIkuI/QK0QdGYG+PknDtFqiBdncFL1IrbZ7KmZ4WNIVngcDA0JCPvEJISbXw40Qgd7oSbsdEoRIyiYErrazM4AIHTi2d3jXCMeHxYEDclwUGenvl9TTwsLO1UJhKMfE9+GHPxTy8thjQkz279+doCVIkOCjISEiCRLcB+h0xBFVoVSSibJaFS3E2pqkIcJQHtvNIVXXhYQ0m0JSRkaklPX55yUdcaugoieNtsfqOY0tUycwdAK/l2RoeJ5BKWdwi/jHNijB6rFjEuEwDEnVLC5KyqoXUSTkrF6XY7i5CU8+KZGSfF5EvAkSJLi1SIhIggT3MHxfJkGlsUilYO9euX/mTGxj3mhIimVra/dUTCYjlTSKhIyOip7imWe4LSRA0zS0MEUUgBbtrrdQkYheXcqtwp49oqU5fFiOpRLzrq+LnmYnqIjJxga8/joMDsITT8CpU4mYNUGCW42EiCRIcA8iiqTao9WS+5om6QHLkjLcalVW9YqkbG3JZL5bKmbwSkNbFQUYHxcx5+OP3x4SorCxIe95vfeIottLREAiGUtLcgwXFiQFpesS5ZifZ0cxrSIjtZp8vkZD0jwgr52ZScSsCRLcCiREJEGCewyrq9tX6qo3SrstAkzXjUtyOx2ZKD1v532pShoVKdH1WPtw9Ojt/y5ra5LiCMPdyYgiIrcbY2MxcVhbi1NRSti62zH0PPnsa2tii9/fL9GRM2fELG5g4PZ/9gQJHmQkRCRBgnsEtdp2N9CREZn0oiieKOfmZAXv+7JCb7V2s2mXCMPeveKREUUSTRkakoqQ/fvvyFe6+vnCcPdoTRSJJmNs7PZ/npGRmIykUkIwtrYk0nH6dByB6oXrCnFRkadOR1I9IAQlEbMmSPDhkRCRBAnuMnqFqH19ErUAmfiWl2Wbd9+VCdF1hYS027tP7v39MrEvLck2mYys3D/2sTvrHqqiDNfzEYminT0+bheGhmIyYllCRjY3JUJ07pz8fycEgXwPz5PfxHHkOD/2mKTIcjlJAd3OVFeCBA8iEiKSIMFdgu/LxKfIhG1LRYeuy3Ozs/LcpUuSrnEcISTN5u5VMSCTYSoVR1eKRfHCePFFqZi5U1DkQ9OuT0TgzhIREFKm6+IrMnKll2W1KhGkbFYI3E6fWaWR0mlJn6noyIED8vzp03L88/k79lUSJLjvkRCRBAnuMKJISkqVcLRbiAqxGLXVkpJcx5EoiCIhu6Vi0mnZT60WT+zlskQAXnhBVu93Eo2GfNYbKQu+00QE5Hjouhzv8XH5HZpNEfYWChLlcN2dX9vpiODV9+X1jiPH+uhRSZ1pmohZb2VJdIIEDyoSIpIgwR3EbkJUEOJx+XKsCdnaErLieTKpX68qpq9PJj5VARJFQkBGR0UTUize7m92LTY3ZSI3TZmQdysrBvl+dwOlkpCRhQVJWa2syGdJpUSQevLk7vb4SvuSzcbVQe22iIMHB+HsWfmrKpYSJEiwMxIikiDBHUC1KitnhdFRIQ8gk9mFC0I4mk1JybRacl8ZlO22MgfxtRgbkxV8KiWvGxqSx59+OiY6dxrr63HUR9evn57ZbbK/EygUJJ0yPy+/y8aGRJWaTTF6O358d7+RMBTiYttCtJRR2sAAPPKIHIP19UTMmiDB9ZAQkQQJbiPabdF4KHQLUUGiBisrMqHNzsqkpiZl9f/dJnDTFG1CoSCvTadjErJvn6zobfu2fbUPRKsVR3F0/fqeG54nN8vafZvbiXxeolOXL0sEwzDktrwsEaV335Vo1m5RHceR5woFIS2OI9//8GGJRp0/L4RwaioRsyZI0IuEiCRIcBvQK0RNp2WiU5Ox58nzIKvvy5elhNXzZJtKZWcLcoViUchGFMkkl77SI25wUMjJ44/fvUldwXXjSpMPQhAIabmbnzmbFbHwpUuiH8lkhDTMzcnxPHdOfqfdolO+L7+hbcfakU5HoiOHDwsxScSsCRJci4SIJEhwC6GqXJRBl6YJMehuJre4KOQjDGVyq9dlkjKMuFndboJUkIjK9LRoSDY2JORvWTKRHjkifVXutkhSfX5NE3K1m7ZFQdm8Fwq3/7NdD5mMELwLF4Tcqa7Gly9LeiWbld/seroR35ffpFiU38d15fc9elRen4hZEyTYjoSIJEhwi7Cyst2DYnp6u225EqOCbLe8LPoPNXGtrso2u03amia6g74+WW232zJJZrOyCj96VG73gu14oxGbqKn/Xy8loYjIvQDbFtJx/rx85r175fHFRUl75fMS2ahUdv+tXFd+20JBft/lZfm9BgfFydUwRMw6MCD7TJDgYUZCRBIk+IjoFaKOjUk1hkIYyqSmqiwuXpRoRqslBCSKZJJz3d0ntlxOjLNMU16vvEb6+mTl/sgjcrtX9Aebm/JdbTv+Tt2frVe8GkXXT0XdaaRSEsk6d06O88xMbHaWzUqjwBMnhHzuphvxPDk3UikhJBsbsSD58cfl8Y0Nue3bd3f1PAkS3E0kRCRBgg+JXiFqf39sjqWgxKggE87aWtz3JZuV+7WaTMTXa1j36KOStjl7VkhJrSbvp9IxR47clq/4obG5uV18qmkfHBG5nknb3YBlCQGZnZVozaFD0l+m05FoyLPPSnnvTroRTZPfMwhiB9y+PvmOyim3XIYnn5TXXriQiFkTPLxIiEiCBDcJz5MIhyIOmYwIUbsnkG4xqu+LLqBSkShINivPLyzIhHQ9Mef0tOy71ZL3LJVEU1Iuyyr76NE71zfmZqAa8qmS1RuZXO+Wl8j1YJpw8KAQwGZTCN/srHy/+XmJUhUKcOrU9oiOSkWpc0T9zrYtqR2lHXn11VhYrMSsExN3XyuTIMGdREJEEiS4QYShrH6vJ0QFIRj1uvy/UpGox+amTErFomhBlCB1tyiIbUsUxLZlgp6bExLS6UgkRPlUTE7evu/7UaCsz9Wx0fXrG5qBpDHuRRhGTEbqdYmSXLokv8v583I/lZJoSbUaE8ve39Z14/Tc4KAQm5UVeO01+T2ff172v7CQiFkTPFxIiEiCBDeA5eXtNuR798YlswrdYlTPE93HxoY8XijIBLSwIBPY9aIgpZJYsrda8p7r60JglE/F6KiUg3b7kdxL6E5TqHSL0rQo9EZIokjI2b0Kw4hTM/W6RKmULuj0aXkulZJoyU5+I8pZNgyFgKhGhNmskFXfhz/901g7EkVCfMrlO9sfKEGCu4GEiCRIcB2o7rcK4+PX2qV3i1FBVsWKuESRTCYqKqLcUnfDyAh84hMyma2tSbjftuMusXv2yKQ3MHCrv+mtQ70epyZU993eSp6dUjWKoN0LVT87QdeFAJ4+LZ91ZEQiProujyn/lrNnJW3TTciCIO5NA0IyfV9+25ERIWFraxIdKZfhk58UElupyC0RsyZ4kJEQkQQJdkB3dAN2X5lWKkIaQCae5WWZQJpNESdqmkRBFCnp1Q4omKaE4o8ckXD93Jw8FkVxNGXfPiEh3RU59yJUiiWViv021HfZDapqxnG2lzzfa9C0mIy0WnJeqMjYxYsiNn3sMdGBnD27XTfi+0JaNE2IiUrVgPzGURRHR775TdlPuSykR/maTE8nYtYEDx7u0bVHggR3B54nk4wiIdmsTDy9JMTzRKCoSEi9LlGRpSWZYIaGZBI+f/6DSUguJ6mYyUnZ38WLsvpVHV3DUDQKjzxy75MQiKuAbDuO/hjGB5uaed694yVyPSgyAnHEamZGyITydzl4UPr8FIvbIzxhKLdUSvYThnLuqAaHo6NyHNbX4Uc/kptKwXU6cm7eyymsBAk+DJKISIIEyIRw6VKsadhNiArbxahBEFfENJuSMjEMeUwJVLtLc3sn48FBeOklmXjqdSEhpZLsa2hIPs+RIzKx3cuRgm6otIPSOsCNpVt8//4gIiDnx5EjQgxcV37zxx6TnjSbm/K7TUwISXnvPfl9lW4kiuQ1qkFhFMVNDkF+f9eNy7z/438U4fLkpKRvFheF8CRi1gQPCpKISIKHHktLIkJUJGTvXlnx9pKQZlOiIIqE1OsSfl9elkl0fFz2MTsr6Rm1+t0plK5psv1P/7RMUp2ORGH6+mRSGhqSienYMZnw7hcSAvJdXFfIh2HId7Ws7SRsp2NyL7mr3igOH45TLVtb4i2iollzc/I7PvOMpGx6u+8qAqO0H8oArVKR/Q0PyzaVCrzxhohZJybkuCoxq4rIJUhwPyOJiCR4aNFtNgY7C1Eh7gmjVrSGISkXFQUZGpKJdm4unkQgnnh7xanptKxmn39eCFCzKSvdYlEm4sFBec3Ro6IJ2Skqc69CddtVZaoqHfVBYtUoku13ayh3L+PwYSGfvi/n0wsviOhUiZj37RMRazotUbde3UgYCtF0nDhVo6IjfX2yvSoL/upX4/NCnW+Vys5VXAkS3C+4j4a4BAluDXqFqNfr99EtRgWZEFTTOtOUlW6rJRORipT0pmO60dcnbpqjo0JCNjeFiGQyMimVSnHYX/UkuZ+gKmYg9knZiYjsBEVG7kfMzIig1HEkLfeJT8Drr8tz586J6dyRI/L7njwp54wirGEoZCObjYlcpxM30Mvl5Pzc2pL9//jHQnC+8AV5rNGQlF4iZk1wvyJJzSR4aLCbEHUnEuK628Woti2r2fPnJe1SLsttcVFIiKoUUamYnUjI8LBMHum0rGaVsNU05ZZOy+3QIZm07jcSArGQUpXuqqoQlU5Q6J4su/9/L/WbuVns2yfnFAgx+NjHRMCaTst5Yxii83j+eTl3eiNdrZY8lsnIMfF9IcKbm3JcBgYkvVOryTn4B38gaUHlrJuIWRPcr0iISIIHHipEfu6cTIa6LuLPXlt2hYUF2R4k5VKtwltvxX4i+/bJ6y5ciJvVdZOP3lW9Zcl7/dzPCfmo1eLy3CCQUk8Qx9T9+2Pdwf0IZdNuWTKRBkFcsnojaLVu32e7E9izJ/49z50TzYhqTHj5spwbAwOSvhkb21k34rpCYFQUqdEQ4rG1JfsZHIzFrMePwx/+oaQVVWXX4qIQkm4DuQQJ7mUkRCTBAwvV1fbMmVh7oLw4doo29IpRMxkRBKoS3KEhWcmurMh2lUo8wSrBYm9KRnXN/ZmfgXfekSjB/LysnINA9tfpyKQ0PX1/kxCIIxqWJZNuEMQkbbe0S/fxUqW/9zMmJ2Ot0ewsPPWUkIdUSn77Ticu2Z6eFnLR3RTQ94V85PNyHHVdzt+NjdgUb2Ag7k9TqcAf/ZGcq+rcjiJ5724NVIIE9yoSjUiCBxK92o7rNRLrFaMWCrJ6PXlSJgTblkiF40gURLW4V+kGJcqE7ZPowIB4SczMwJtvygSyshKTkIEBmXj37hXNyPT0bTkUdwzKlCwMY6KnjpPv756OUf+PIiGBvh937b1fMT4ux2BzUwjCY49JlGJxUc6BclnIyvPPy/mgOvyq8ykM5dzIZOSmXGe3toSUlMtCVDIZ2c7zpEx4bg4+97lYPL25KbdEzJrgXsZti4hcvHiRX/qlX2Lfvn1kMhkOHDjA//Q//U+496MsPsF9AxXVUCRkcFD0FruRkI0NiZiolXs6LV4QFy9KSmZwUAhDpSL73diIX6smjN4oiK4LsfjiF+Xvu+/KBL22JvtX4flaTbxKpqbufxICMQlRq3ulgbCs7db2O1XMqMeazfuvhHc3jIzEVvwqWqG8aba25FzyffEIefppEbIaxnZhr+pi3NcXP9dqyfldqch9FR1pNmW/X/uapGwOH47P+4sXhUTf79GmBA8mbltE5NSpU4RhyD/9p/+UmZkZTpw4wS//8i/TbDb5jd/4jdv1tgkeUrju9oE2l5MQ+W5pDteNdSAghOP0adGBqCqWAwdku4sXZdLwPJkM1GSr9A/dKYdMJvYHOXdOJot6XSYPZfBVLgvJOXxYtt2tYud+g6qYUcRCHXvVY+V6FTFqW9W1dzfieL9haEjOkbU1iXrMzAgZfe89OSdcV9JyY2Pynd9+W3xlVARJWcHXakJUlA2+60qkw3VFG1IoxJ2afV9I8+IifPaz8vy5c/K606flve4Hh94EDw+0KLpzHPnv//2/zz/5J/+E890zwHVQq9UolUpUq1WKOxk8JHjoEYZCQJTvgmFIGmW3ipMoijvgQhyhOH1ayEGnI1GMdFoIiYqMQEw6VBSkd2Lt6xNy8elPi4+E5wkRUduZpoTT63Wxa5+YEIHqg4KzZ2N32lRKvufSkqzYGw1ZxSsyl0ptr5BR0ZN8XvQ0e/bcne9wu9DtWXPggBybt96Kz6OpKXnO84SMqIoqRUZA/p/LybnZfV6l03Lu9fXF6S3Pk2OczUq07eMfl2iJ+gzXcw5OkOBW4Gbm7zt6GlarVcrl8q7PO46Do+wtkS+SIMFOUEJUJSyFD+5Q2mxK3lxhdBROnJAoSKslg/zUlEykly7JxOm6EvZWLdxVCqeXhAwPi4Pm44/D978vz6+txYQom40n36NHZaJ9UFb9CopYdAt3VYqmW0ejoiXd2hCF+8nm/WbQ3y9EdmlJohP79olY9fXX5TgsLsZVNC+9JD1m5uZih1oVaVIRj6EhidIpz5H1ddl2ZES0J64r53utFmtTPvMZSQ+pTtGzs0JeVC+bBAnuFu5Y1czs7Cz/+B//Y/7r//q/3nWbX//1X6dUKl29TallQoIEXahUZHBVJGRyUnQgu5GQMBQdiCIhalJ49VWpYmg2ZTAeGZHJ9PTpeEWqSEgUxakYtUIFmTjGx+Ev/AUpCVYkpFLZvso3Tdnf4cMyCT1oJATk2CnzMpVSUBbm3UQEthMRdR+u3e5BQqkk5wpIFM80xWtEiUjX1uLj9slPir1/oSDbKbt8EOJRqch5nM3GYuDNTRFZu66kCJWuRFnHf/Ob8L3vSSRERZy2tiSNcz/7tyS4/3HTRORXf/VX0TTturdTp05te83CwgJf/OIX+St/5a/wy7/8y7vu+ytf+QrVavXqba57+ZrgoUejsbMQVfk27AQlRlW6jrExIRrHj8sq0raFHFiWkBLVtEyt5JUXhoqAdHszFAqSBvr5n5ftf/xj2Va9XtfjMs4oEqJy4EBsevUgQa3MDUNuQRC3ve/tM9MtxuwmI70VSA8iikUhziBpP5DKmUIh7rrreUKyn35abt3mZ6qaSKX9dD3WoWiakOqFBblGsll5v0wmjqacOwf//t/LdkeOxOenMut7kI99gnsXN60RWVtbY6O7dGAH7N+/n9QVp57FxUU+/elP87GPfYx/8S/+BfqNeD1fQaIRSQDXCkvzedFXXM9vo/c1Y2NCNC5dissdR0ZkAvB9KdWtVuVx247tyX0/nli70zGKBH3yk0KO1tbkPZVgU+XnHUdWvHv2CAm538tSd0OjIbqYTEbSXJ1OvMLfv1/I38aG3LdtmVhVuSrExziVgp/6KakkeZDR3WZgakrOkePH40haf78cp2JRonMqhahErN2EOJ0WsqI6/KpzNpeT60RF41QEUdPk3BwbEz2T7wtBURgdlWhKggQfBbdVIzI0NMTQDcr8FxYW+MxnPsMzzzzDP//n//ymSEiCBEEgIWw16JqmpDWuZ30eRXG6BeIQ9VtvySrRcWRw37dPwtHLy7K9Eq+mUjHpCMN4EFcwDFmBPvec9Iz5wQ9kQlWTbxTJIK/ErioHf/Dg/WnZfqNQBEx9R5USsCw5hjulZbrdaJWeRPVdedChRKSXLknKcHJSjM/eeUcI3MZGXNkyMCAOrcePy7nqedt1N+22kJByWf5fr8fN8y5dElKzZ49ETdptOb9Vf5p/9+/gU58SUq0EtcvL8jcRsya4U7htp9nCwgKf/vSnmZ6e5jd+4zdYW1u7+txooo5KcB30VraArKp77bB70WjIQK0wPS1pmffekyiI78ervSCIe8R0OrL6VKkBterUtO0kRNlrf/7zEk35znfkNd3eF4WCfM5qVVac5bIIBB90Dq5W2+p42XYc4TDN7aSj2422Gyqd1WrFRmgPMjIZMRq7eFHO2/FxeOKJOPqxubmdhKtGenNz2/U4USQEe31dooWjo/L6KJJj6fuy/dSU/B6dTvx71Wrw7W/L+fzpT8u1cf68kJ3ZWSFDY2N35/gkeHhw24jIt771LWZnZ5mdnWVSJUWv4A5WDCe4z7CxIStChcnJ62tAQCats2fjiW5gQAbuV1+Vwdlx4gG11ZL3OH8+Xjmm03FIW0VBlDZEob9fPsvLL8s2f/7ncd5dTb5KHFipxILU+92y/Uahit26m9wph9Ve3YciGDs1B9Q0+V0cRybqBx3ptJDs8+elsiUMJS2VyUg0UDmq9vcLmfjUp6S898yZ+DnbluOlRKmeJ6RDVX2pvjTKs2RkRI5zpyO/hTqHf/d3JdV44ECcOqpW5TY9/XD8HgnuDm7bmuMXf/EXiaJox1uCBL1QQlRFQoaGPliICkI0zpyJQ/v798vq8gc/kIHY92XwnZiQ7S9ehPffl4FZ+VkoEgJy33W3ezcMD8tK9ad/WlaQb7wRh8QVCRkYiEnIwYOS/jly5OEgISCTWvd3Vaktpf3odp/trpjpLuFVjzcaD2YJ725IpWTyhzgScuCAeM2oqMbWljy/sSFpwaeflvNSEb1UKj6WyrOlv19uKr3SbMq1ceaMPF4qCYlRkZVaTaJ83/ymPH7kSJweunQpbhqZIMGtRpIBTHBXofq3KBQKEqL+oAm8V4w6MSGT13e/K2TA84QMjI0JYajVZACu1eLyRiVIVd4gsF2fkEpJKub558UfRLlVhqHso92OhYWqWkbZye/bd+uO0b0OpetQVu4Q+18oIgJxBGSnlEu3bqTZlP09SGZvHwTLEtdV1aguDOWctm3RhijNU6kkZOXoUdGZnDgh52R3f54gEPISBHINTEzE563SiLz/vkT4Bgfj6Ihty/uo6MjHPy7bDA0JCfE8ER2PjDxcv02C24+EiCS4KwgCIRJqkrIsyZd/kKCzV4yq1P/vvCMDtEq3jIzIIBsEojdZWZHB2TBiN9UgiKs4XPfa0tz+fvjCF2T/P/yhvKfvy7adjuynVBIy1WjICravLy7PfFjQaMTEz/NiLwxFONRxVatppWtQxKS3hNfzHq6IiIJpSjTt7Nm44mh4WCIgb78t55+uS5RwaSnWfJimnOPqmKly6W4b+L175fpot+Py4AsX5Hw9elSuD8eRfZimEPbvflcifZ/9rKQYt7ZiIasSsz6oVWAJ7iwSIpLgjqJXiKppEj34ICEqXCtG3bdPoh9/+qexsK9UErGeqmR57z0ZdNttIQ7dZluaJiSk07m2NHfPHnGitG0JVysRa6cjA3ahEJfnttsSCRkYEAL0sEEJH5XgVxlsdUdEejUi3c3xIE4PQKx3eBhhGDEZUTbuo6Nx1Yw61tmspF9KJYlcvPWWpE+U0FeVUavqGd+X89p1heQokXUQiP/N9LSc0yo6ks3GvXB+7/fgxReF+JRKQmBcV6IkxWJs0pYgwYdFQkQS3DGsr8tNYWpKvA4+CKrCpVuMWirJwLy6Gg/OIyMSRvZ9WbEtLspgG4ayilTRjCCIS0pbrfh9TFMG60cflXSM48iqUFXTNBryWhXOVkLAQ4dk5ao6rT5saDTiFBfIsfE8IRamud3uXRGVbr2IIiUQR6oeZhiGnFNnzkgUIgxlsn/mGTnnq1V5LJuVyIVliS18sSjXiSLlmUzc9Vh17C2VhGQvLsoxdxw53mfPCuF55JG4akYR9VpNHFn7++FznxMdVrsde/LUarLPB9GoL8GdQUJEEtx21OsSBVEYHpay1htBN3nRNMmjLy3Bn/2ZDLhhGGtBXFcG0OPHZXBst+PQtYqCqCqZ3lV3JiMD7YsvirX26qrk39XEWanI/0dHY0IThvJ5HvZupr3RC1W6qyYyJei9nsW7wsPgrnoj0HVJh5w+LedyGMZeI+++K6JVRUZAztfnnhNif+qUEHHVkkB54bTbccpm3z5Jsyhy3WrJNVqtSkRGlbB3OnJ9KW3V7/2ekPS9eyUKuLwsZOnyZXmf/fsf/LLrBLceCRFJcNvQK0RVhOFGKkl6XzsxISu/N9+UQbfRkAFPRUEcR8jCxYsSBfG8eJD2vHiFp0LWvaW55bKU5o6MbO9LYxhxKHvPnljMl0rJxHCjUZ0HGapiRhES244jIun09qgTyDHtrpKBaytuHnYiAnJMFBlpNCQCMT0de42obsaWJcd0bg4ee0yO+XvvSdRDRUS6Uy3qehgelnTM8nIcNQkCee3UlGhHTp+OOyan00JIXn1VnIhfflmI+eCgpGl8X66dRMya4GaREJEEtxxBIAOT0l1YlqzAbmSlFEUyoKrJK5uVCf/SpTjsDCKyGxmRwc/3ZfCsVoUoWJaQgzAUghKGsYCvu1uvYUg6Zd8+CW3ncmJTrlxCTVNIiK7LNpubsTPr0JCsClXDsocVyixLNbaDWBdimkL81OM7le92P94N171WyPowopuMtNtCzvftE8Lx/vtCNpaXhRAYhhDxmZnY0XZxMS4zz2bl/0o3EkVyHe3bFxNvRVzOnxdi/+STQnh0XQhnf79cZ64Lv//7ki46cEA+Y7Uq0cpEzJrgZpEQkQS3DL0VLTcjRIVrUzj79smE9qMfCSFoNmMdh4qCNBqS31beE9msDH6+L2RG04QsdDrbXVJTKSEhjz8uQkBNEz2IWtVHkbynbcvqsFKRQXxoSAbvG3F6fRigzLD6+uT3UcTB9+X4pNPbq5EUuiMhSqjaLRhW1R3JMY7JyJkzcs7PzgrZOHpUzvVLl7aTkUuXRFOSz0vk5PLluL9SNhvrpJR+ynHkWltbizUprZZcA6+9JoT7kUck5eO6cg3mckLMX39dCNHnPy/pyWJRyJDjyGKkUIg9fBIk2A0JEUlwS6D6YyjcTMqiV4w6OCgk4exZGVQrFRlgVd8WZZZ15ow8V6/LYF0oyOvVJGaaMpG1Wtt1DMWiDNIvvSR57nZbws0qCtJsyi2Xk/erVOT+xIS8bmYm6cGhUKvJ30wmNt1SpdCplNx2inh0+4YodEdAVO+ehIgIuiMjvi9/Dx8WPUc6LfeXl4Uop1ISCRkYgBdekHP23DnRWinthzqunY48plI1uZwsBpQ+JAxl39WqmKidOyfks9GQ96pUhHT8h/8gz8/MxH2cLl2Sa/PUqSSFmeD6SIbTBB8JvVGMm80PdxMYJUZVxGBjQwiAil4MDcng2OmIVqTVklsmEzcBU6Fny4rtwtVKW9dFCzI0JH01hodlcD5+XCbBfF5CyioE3d8vq756XVaFmczD0TfmZqAEkKoiRvUy0bQ4LN9dFQPbq2Z60euumjTd3o7Dh4W0d5ORqSmJ3J04IdfT0JAc+40NmfyfeUYiIadOxR18g0CeazTkfq0mj/X3S0plfj5O6SgvnlpNiM/4uCwSWi35vXM5uY66oyOZjJD8lRW5hubmZDFx4EBy/SS4FgkRSfCh0OlICFZB+XfcaE6/V4w6ORkr/ufmZPAyTSEOIyNxFOTCBclZK3OxfD7u9KoMnzKZuEpAIZWSQfbgQfjYx+S9zp+PP0OpJCFsZQmfTssqsNGIc+4PS9+Ym4FKu6jjYhhxhKo33aKw0+O9qZruRoIJtmNmRs5bx5Hr5fBhIdVPPx2TkYEBOWebTSEMSsSquvu223J88/ntHXm7UzWVSmyspjpLKyHrCy9I9U6zKZGwsbG4t80f/IEIag8flmt3YEAiKUEgUcybqZpL8HAgISIJbgq+LxO4mkhSKYkW3Ogqp1eMmssJCanX4fvfj7UYth1HL5Rb51tvxQOrZclqWeWzlcW1SsV060FyOSEaTz8tA6RpiolTpSKTXj4vA3sUSVVCFMnqr9kU4mKaEglJSMi1UNEP9fsroaquy+PKRr8XO/WagXg/KvKVYGfs2yepj3Y7joz09Ym49L33JBpSLgspjyIhAocPy/E9eVIiFc1m3FzQceS3UCZonicLi0xGoiO+L9tEkVz/GxuirwK5dmo12TaTEfLy1ltCOj73OXmsW8y6uiq3RGeVQCEhIgluCL0EQjWYuxlVfG8aZ/9+meTffTdu9qUiF6pDaBjK+87Px7qBfF7eV63ioijuDKpCzQr9/bL9Jz4hZCKKpHOuGngzGRnQQQbLej12Tz14UD7PzMxHO3YPKrqt7tUxt+1YhwA7m5PtVLar/qpoiaqaSbA7pqflumg0YjKSzwvZVmSkVBJC4nmSTjlwQLbpjjyqUutMJj73Ffkvl+U6XViIy+JVl+o33pDP8NJLkiptt2V/e/YI4XAc+MM/FIPAo0djMeulS/I+588nYtYEgoSIJPhArK5K9EDhZl0UdxKjDg7KPk+ciMtilanY4GBsvPT227E4zjBkIDMMGfQcJ/aqcJxrXVLLZVnVvfRSrC/5/vdl37Yt283PC6k5dkz+HwQy2B44IJGUPXs+8uF7YKFIm2pRD7GfiIqSKCLSa2amegr1ClYVEfG8hIjcCCYnRZhaqwkZOXRIrofHH5fIx9qaEIeJCbk+zp2Tc/qZZ4QEqL426pgXCrGuSpX4ep5EPVWvGRUdAYl6VKtSeba6KoRlfV3Gh3RaIi/vvCPX/8svyzW+d2+c2k3ErAkAEtlQgl1Rq8kgoUjIyIgI0G6GhKytyWCnBI0HD0oI+fhxWVEtLcl2fX0yQPX3y7aLiyJ+29qSz2HbQkJ0PS4ZVX4h7fZ2EpLJCPE4elSa1g0NCdlRJKSvTwbSpSXZ9vHHRR8SBDIA798v2yQk5PpQnizdof3uKhnVc6YXuzmqdlfS7PS6BDtjfFzOVxBiEARybTz6qBDxZlOiEOq6vXxZjvHjj4t2ZGxMttd1uY6KRblvWXKtbW7K9VgoyDWRSsm2jiPX4eKi9GNqteCTn5RrMghk3Ni/X7bb3JToyIkT8hnSaRlLlLB9bk4++06aogQPPpKISIJr0CtEVWWzN4OdxKj5vBCT994TguF5Mmj19UkUBGQCeucdWWWp0tBCQQY/190++WlabD2tUCjI7bnnZJA1DBl4FRkaG5NBudGQ7Y4cic3XLEueVxGbBNeH0gzo+nZhsO9LREqlz3YSpu5ERtTjyuY9wY1DeYhsbMi5fvCg3D92TH6fxUWJShw4IL/V8rJcd8eOye+kRK6qWaRKt2laXA7veRJlnJmJq2GCICaNp08L6Xn+eVnArK/L+xQKsr/FRXmf8+dFO5LNxl2yZ2fjkvyhoYe3b9PDioSIJLgK35dJWa1obVtywDdTbhdFMvGriUmJUX1fBGxrazKAZTKxO6oqva1UpPzPdWVAs225GYYQB8eJ3TrVgNnt5tnfL+/30kuxruP48bhXzdSUDJCdjgx2e/bE1tSFggx+o6Px6jLB9aHSLkpwaJqxtsAw4p4z3Zbt3VqQbkLSncZR6Rn12qTc88YwNCTHSkUhld/N0aOxBfzsrJz3QSCLgXZbzMoMQ0jAyoqQe0Uucrm4oaFK1ahUT6EQpzOVsPjCBbmOH3lEdFmvvx4bD87MyGeoVuGP/kg0LU88Ie99+LC87+KifP61tUTM+jAhISIPAcIwZGFxjUazTT6XYWJ8CL1rdO8lDx9GiAo7i1GVuZIyRQoCGcBKJZn4VUOukydjzw7VOTSblf+r16VS8ni9vn0FbttCQsbHpWnd4KBMYD/8YeyuOjIiqzHfF0LS1ydRH8+Tz1EsxoNrghuDSsco+/x0OjYzU7+f6hSrCGNvNGQnfxH1mOPEbrkJbgwDA3IcV1biCIgqPU+n5bHLlyXyp9Ir3ULX2dnYRFD58pRKcs0FQew34rpShrt3r5ARx5GbYch1/Pbbso9PfUpSsCq9UyzK9To/L4uOixfhs5+V9y4W5frrFrPm83JdJhVrDzYSIvKA4+zsHK9861Vmz8/hOC62nWJm/xRfevlFDs5MXQ2xKkxPxxUoN4peMaoKrTqOlMkq6+hsNnYr7Y6CnDgRr5qUINWyZBBUze2yWflbrcZCOVV6WyjIqu/pp2O9wve/L/u3bXnPkyfl86mBWTUEU26SSRvzm0OnI79PJhNHLNRvpozNbFv+3ys63W1SUVER9bzyukh+l5tDf78cy6UlifipBcH0tPwmp07JcyMjseX76dOSzsnl5DY7K9ctxGnMVkv2qzx8XFdSNQcOyHW8uhqLWzsdee9aTUqKDxyQ6GS7LaTmyBGJnngefO1r8t5PPSW//d69cWpXVQSp1G6CBxMJEXmAcXZ2jt/87a9S2awxPjZINmPTajucOHmOpaU2n//MX2BsbAj48CmJ7ooaXZfwq67H4jOl81Cle+WyDIZqoOr2M0inZcCLolhDoqIgjrM9ZKzrss9sVsyVVPi5WpUVWBTJ82Eog6quS7jYceJKgvFx+Sz79sVVNAluDN0VMyosn07LuaCs8lVlTHdEBHYnIkoforxIVLl2gptHqSTHc3FRIguqQePoqFxTp07JtacWDfW6pHP27RPdiOrgq3x9mk15TLU2cJzYIyYIhNTk8xLN8Dz57Wxb3r9el/1+7nPwgx/INXr5spw7qZT8//Rp+fuTPyljgG0LWVHjy/x8LHZPUnUPHhIi8oAiDENe+darVDZrHJqZQrsy+uezfezbM8PS8gZvHj/Ff3Z4gPHxm7+ydxOjttuxh8HWlqyu0mkZqCxLJqStLRGkuq4MSiqyoUL7tVoczehOxXR38y2XZcD6+MdlkAMZ9N5/XwbHqSkZxJaWZD9PPCEERKUKJiZkP0mH0A8HpRdIp2MBsWXF0SpVhdErVr1eakY55KrHEnfVjwZVZTY/LykQFe0sl0XIfepUXLo7Pi4T/oULci0/8oiQDlVer0ThKmKprlF1PalUzfS0CFSV54hpSlTjzBl57Lnn5P6JE/IXZBGhTNFeeUUiOM88I+fB8LAQJRVxPXPmWjH5B6WeE9z7SIjIA4qFxTVmz88xPjaIpmlEkUbgjROhoWlQ7re5vPA6QXgIGLnh/UZRnMOFOIcLsWV6rSYDYF+fkIVyWSIXrZZoSNTr1aopm42bpjWbMtjl83Eqplu7ks3KQDg9Ld4FSl1/8mRcCnzwoKywFBF66ikZiFVH0YkJeQ9VWZDg5qF8PlR7+O6uuxCvnHdKy/QamnULVru3V5NZgg+PfF5I+dycXHcqBVkoSHnvqVOyaAgCuabW14W4DA3FLQ0uXBByoZoaNhpyTasIZb0uv5NK1UxPxy6qqqpG12Mh68yMlNV/73uyjwsXQiyrRRC5rK3lCEOLhQWdT39aIjtKzFqthrx7osrcgksmneLZZ0rMzS9cN/Wc4P5AQkQeUDSabRzHJZuRnEMU2UJCiDCsJTKGh7Pm0mi2P2BPMZSqXUHlnptNIQKVigxWxaI8ripiFKE4cUJIRbUqE04uJwOi70v0QnVsVamYbqt2XZf9ZrOymnviidi++rXXZHDUNDF0eucd+UylkpAQ1b48DGW1p+tJ87qPCtXIzrJiUhqGcclnN8HrJSM7HXclUu2usOkuy07w4ZHLCTm4dEnSHyp6mclIGubUqfj6279fiInqR3P4cFwSf+mSEJVUSq7hbt2I6lcTBLI4GByU9zh/Xn5H1SBvc1OilvW6VNX86EcbvPHjBs1mmyAICKJFNrceYXCwzDe+kWXvXomizJ6LtW5RMEwqledb386wWV2n485dk3peXFrjl37hywkZuU+QEJEHFPlcBttO0Wo7FPJZNM3BtJbQNKmRbLc62CmLfO6DlameF/Kj16u02i5p26Kv5GOmWiyvZGi3hpib06nVYjfTfD62Vm80JBet2pc3GjJ5pdNCKprNOMyvoiPVqkxuqpzTNCW6YtvSsE6VG3qeiFIVgZmaEnFspyO58KNHhZSA7GvPnpiEJCr8D48oil1t83k5/op49Dqp9nqCqLJdhW7vkF6799361CS4eShH04sXJeIxPh4vGJTXyPKyXKczM7KgUCmVQ4fkusxm5fnNTfld6nXZr+p0rVKt6vwYGpK0qUqJqvNE6cMWF5tcnH+TytZpRgY+hWll8b1p1tbP03YW8LwnCMM0p041ODn7HSqbC1cIR0SzucXJ95dxXJ/Hjj5OLttB1zsU8lkOzUxxZnaOr//xqxzYP5Gkae4DJETkAcXE+BAz+6c4cfLcFY0IgMwSURSxuLzBY8cOMDE+dN39vP7GMn/252dZWtmgVq9zae4N0CJGhqcYHXyK/r5JRoammJrMYRhxFMS2ZfBRqx8VZrdtiVSYpqy82m0ZnLLZWGyqUii6Hm/f3y9GSXv3yudqNCQSokSpmYyQEN+X1d/evaLSV1B+KIcO3Y6j/XBBVbNkMrGTqm3H0Qxdj6Me3YREoTcd0/u8QhIRubVIpyXicf68RDbVtdNrfHbqVGx8FoZy/9AhWQDYtkQ2q1W5rpVuRFnDB0FMOlSqZmJCFhKXLsVNKtPpkKWlCIKjlPIl3jrxb8hmHqO/dBhdy1GpeFjG6wyUP86lyzUi/1n2ThwllTktxJUOleoJTH2ApZU+8vkpDMPBMNfRNI3x0QHOnptjYXGNqckbTz0nuDtIqOIDCl3X+dLLL1LuL3Jmdo56vYkfBNTrTc7MzlEuF/ni517cdbXQ6cB3vrPG73/1TS7PLxOES8wtvE6r3SGbfoSU8RhEwyyvbHHq7HFW1+eIWMH1NoiikJUVIQqVityCQFbPfX3y/6UlGehSKXk8CGSl1WzGk1mxKARkZkYU94qELC/HJGRyUgbCkydlHwcPympPWUlblmxj2wkJuVVQ4uFsNo6ApNOxMNUwhGh2l+52G5p1d+tVj6ltVcpHpWm6X5vgoyOVEpIBcg2q0n1NE6KhzAfPnJHHVcnsmTOxVuPxxyX1Ui7HhLLRkGtVmRA2GpLKXVkR4tHXJ69V1WntdoTrOQShTac9TF/hC8wvvsep2f8Dz28QYbC2nmdz6xK15nkydj++P0Sn8TxBkMP1PIIgxEzVmVt8nXanQxjG7meZbBrH9W4q9Zzg7iGJiDzAODgzxS/9wpdjMddqBTtl8dixA3zxczuLuZQYtdUKefPtU1Tr6+yd1nn9x3NEUYGjBz+HRoEgSDO/eAHT1FleOcuJk68yOpqj3D9Cue9x+orjEGWuko1CQQa1jQ2ZyFTn2+5UjBImmqaQkHRavEFUOSHIgDg3J/8/ckTCzJcvC+E4ckRee+aMfA+VIko6fN5aqKZoliXEEUSH0GjERCSVist2d2ps15ue6SYhhrGdiKgy7gS3BpYl5H52VohCGIquQ9PkGrJtiZrMzYnx2dCQXLfnzklk8fDhuERfRTaV/qNUiiunVBdfEHLT3y+LiUoFFhcDwtCEKEDXTEYGHyFj57kw92e88c4/54lHfhbTHGFuwYEoj2mfJwomCcMMXmeGlDGIaZ4iiiJa7TYr628zMjhA0cqgadpNpZ4T3H0kROQBx8GZKQ7sn7ih8rZuMWpls8rlxR8zNJSl3gjptHNMTz5PFEa0HYd2Z4NOe4uOu0m9tUrWHqDTSpMefop6LcJpuWSzJsWiRX+/rJYXF2VwsiwhIKYpuWhl1a7Ej916kCNH4mqKN96Iy31Vd9GVFSE0Tzwhn185u6qKnf5+SRcluHVQ2g1dj31islmZiMLw2j4z3aSiNyXTrRFRUKmdbnOshIjcWphmTEZUCe/QlSyt8tWZnZWoiao0W16WRcrYmEQX83khoKa5XTeiKtuUS/LWlvztdCSKMjwcMj9/AT8oo2smmWwZz62Tz40xOf48aCbvnvo6+Vw/tv1TZNN9+E4JM1VBNx1Cf5SUCfunfpp3Tr5CtV7lxHvnmE3PkctmGB8bpNFs8/wzRz8w9Zzg3kBCRB4C6Lp+3TxprzPq8DBEWhXHaZHN9LO5mSGXyQIulxfeQ9OKuE4Nx+3QbG9Q7htnePAIg+WDeK5BLtuH53dwPZf+/gGaTZ2trdh3Ip2WgU9pRCDWieTzMiA+80zc/db34dVX414zjz4Kb74ppCSfF1W9aj+uaTLYZTLyPcrl23poH0ooYalhxB4iyi5cuapalvxuiogodKdleiMl3ekbZf2v9CjF4p35bg8TTFNSmWfPyrUYhjFpHx+X3/HsWSEgYSgEZX5eyEmnI9vkcnLdnjol54IyujMMufaqVSGTSj/ieZBOd2g5Var1JbLpA2T0IqaZwfOaWGYfY0PPEYU21fplvv6d/4VPPPsL9Pftob+vD8PwMFPzNJs2UWgxMfoJRodb1JqvsVWtMze/wsnTFxjoL7F/7wTnzi8klTP3ARIi8pCj2+LdMCR/rOvQbGVI23lajREsMyIIN1hbr+B0AkIqNJor+H7E8MBexkaexLaKZNJlNM0gwME067Q6DVZX+/A8/WoqJpORVXS7HYsRu1Mxhw9Lea5qD95qCQlRqZapKdGHNJuyzQsvyGC5uSn7UXlqVRWQ4NZCRSmUQHF5OSYUqoRTuaqqkunu4JuKduykEVEkpZusdDqJqdnthGFIdOPMmbib7vi4PDc4KIRydlbKe8NQxof5edm23ZZUy6FD8pu/914cHQG5zvv7hYSoEl/fh1ZbJ4ry9PXB0tIpbKtIX99eokinmB8nDBbZu+fjbGwMYZo5/vy1f8nw4AEePfxFhodGsO0JVlbeZXl1nqmJ5+g4FSzzM2xV/xzDcEgZOqVSjqWVdX7zt7+alPHeB0jEqg8pOh1ZxahBY2oqtk+OIgiDIabGn6KyVSOb1cnnMtQbDkEYsLZxgs3qZYYGZhgffRrTyJDPjxBGIa7XIAwqaBiY+gS1WkgUOdhph0wmvFoW6LpxKqa/XwjKc88JsVAkZG0tJiGjozIwvvGGvH5sTBrcqe+QSsnzti3fJSEhtwfNZixUzefjxneq2Z2ydzfN7a6qvd13e/92Q6V3okiIaDvRG95WdFeT1WpCNBRKJUmPDgxIxPHMmZiodDpSzmvb8QJicFC2VRqiSiXuMZXJXLHub1qYRhnPLaFpBrXmKpcuv4rjtghDn/6+CVJmhoH+/UxPPs++qY/T6TR49c1/xvziOTa3auj6KNOTj5NKL+B4HYh0ZvZ8ikdmvkwmnabddhgdHqCyWePrf/wqYXcNeYJ7DgkReUARhiFz8yu8f/oic/MrVy/EKBKHw4sXZbtCQQaaXE7ut9syuLTbOs88eZh8zmJhsYphGmzVlri08CPanSbjI09QLk2TSQ9QyA+jaVBvrFJvXkY3CgRBP36g0XY2qWwts7S0ztxci3o9uNomPpuVQSubFXOjxx+PVfXnzokHSBTJIBeGUo7rOFKC+NRT8nytJq9RnXz37o2/S4JbDxV6V/4REPcOUqkY5Sni+3ETu24iYhjXRkTUX0WEVVlwq3XnvtvDDF2X6wyE6F+6FD+Xy0lFzfCwEItTpyR9qn6j06dlu8OHZTEzPS3bqoVGrSb/l867IRE+OgV0rUC5f4pctg8/8llaO8FmfR7f97DMNJnMAPnsAPv2vMTU+Av0lY7wzqnfYX7pu6RS0N83jNMeo1Zd5/LSt7FSBul0nv1TP02t7rO+sbWtjDfBvYskNfMAYreOuz/x8ZdI2+NXt+vus9Jr3Z7NwtjYEE88epQ//s5rHD/5XTYqa2iawcTIUwwNHGZk8BFMK0PgO2xUL+D5bSZGj0KQo+20CUNHrNopEmERBDYQYFkRhYJ55T1EaDo1FX+OH/84tpNW9uznzsUrt337RCPi+5LOKZflOeX0muD2QQkQLSv2e1Hdmnvt3ZWvyAdVzKhtFUnp1pWo90hw+6FpQiZkISILFtXHybaFjKiuvqdPy3PZrFyrZ8/K9XfokCxusllJ6TQasc4nDB3qjXXqTQ0NEztVwvMsSgUT13Vptbeo11eo15YYGjyE73XIpAdotNcZG3mctF3ED4dYXDnFwsr7HDv0sxh6keHBRzCtiHOX/4hy6RCZ9CiDfYepbuXYM2XjuBVq9SZz8ytJP5p7FAkRecCwY8fdlsfsrMby0lu8/BmLJ54Yoq8vbhRl6FkCfxBNkwvTtq/0hVla5Qev/4B66yyHD09QLAxSyO0hbe+nXNqLaaXpOFWazQ1Sdp7B8gyGYdNo13CdOsViCp1BwEbDwjAC/MAjjByy2T6OHtU5dizu+hsE8MMfxiZnTz8tlTFzc3H4V6Vn1OdUZYczM/EEmOD2oduCXUUrbDvuwgoxidgtNdOtBeneXj3eHUVX5CbBnYEiI2fOSPRxdlauLZDrS5GRhQUhH3v2iMB1ZUVKfqemZHGRzcoi4eRJed36ukOtVsMPUrTbS1hmgTAMSFl5dM2gvzRNyirQbK0RRAELy8dxnCYD/XvIpPvx/Bb5/DApa5RUqkCtscj3fvTPePLoy2TShxksP0oqlefC3DdwnfeYGH0C3zPZqvThuav83le/w9r6VtKP5h5FMnQ/QNip427g95G2c4yNwuLSCu+e+lPGJ57nX/3Oa8yen4NwHMvKMDYywCdePEh/3yiOAwuLq/xv//KfcPrsOVKpFOPDT9FfKjLQ9xhpu48gCKhsXaLVXmdo4Ai5zAAQ0Wxt4XktTNMk8Iv4mo1mmBiaDwQYRgPP85g5qPHss/1dBkfSIlz1oDl2TKIea2ty/4knZJB8+235m07HDe+SvjF3FoowqIoZVaoJQiBVaqbb3l0REUVAepveqZtpbicfSdO7O4/uyIjvy1+VttF1SeVqmpCRc+ektFc11pubi6vVbFuJWEPm5tcIozRB6JC2h2g0VkFrkM9CBOTzwxiGhWFYNFvr+EGHjrPFmfOzjI08RjpVIJcbwPN8ysVHSVv9tFshC6vvEPjH2TP2MoaRZ+/EX+Tc5W/Sds4SMcLCUgrPK3HhfEh/2WRibJB2x0360dxjSIjIA4TejruBXyQM80CEZa0zMFDlreMrnDl7ET9IMzr4KCk7heu6XLh8kcpmjZc/8zy5fMBv/dt/ylvvvEfG7qOYO4JlTFLuP4xlZnDcFtXaIp1Olcnxp7CsLH7g4Ptt2p0a2XQ/6UyJVKpAFIWEfgdMD0Pz0QyXWv01hkeexbZFlVqpwFtvyWQ1OCjujj/4geSWi0Wxdt/akhWX6sCrBK2qQ2iC248wjFu7FwqyCgZJzSjRs67H6T4lOO1Gb/fdbih9yE7+I0pXlODO4fBhiXoEwXYyoozPTFOIx8KC/G779kkadXU1Lu89eBDWN6r86M1FDL0fx7Hwg4BcdhDXq7NVmyNlFQgCn7RdxDBSBIFDFIVoRZNspo/ltZOUS3vYrJ5jbOQYZmiSz08yPmLiByM0WvP8+L1/xZ7xlygWJtg/9Xm2ahc4e+FVMkvnmBh5kk7HorJlcOHieQ4fGkn60dxjSIjIA4Tejrua7mNoNXRDXKfSGZtLcyvs3/M8B/bvubKybaLrOSbHp1haXuOd9/+MjUqF906dY7DvCAPlKQrZKTLZUSzTpt5YotZYIZXKMj72BKZh43lNXK+J53UoFcaxrCwpK0sQeBA5dJwahgmlUhM/nAXtEsXCJwnDkDd/vMW58xq2ZfHkk1kMQ79anjs4KFU0Fy9KXhqkUqNUijvtJiTkzqHRkAlGVcxcuiTHP5XaXgWlolyKiPSW73b/hWvLd5UjK8TpHseR901wZ3HwYNxB99Sp7cR/ZkZ+60uX4t41MzOyfa0mqbuZGSgPVGl1LtJsLkI4TNou4YYBaDrpdB/N1gau1yQIHNLpEvnsCJpmYJlpOo6NZWVY2zhHrXYJP2gxPXmUtD1FJl0knR5hes8BLKvE+UuvU62fYe/kZyjk9vLooSneO/MHzC+/TjE/SSY9TBhm+ca3f8iLzz2a9KO5h5AQkQcIvR13dX17ycHaWkA+e5iBcj+6BpqxSRiUgQjDaFEub3L8xAqX5iqMjTxN1p4glxnBtgdx3Aar9TPUm2sMDRykmB8hiiIct0HHqaFpOqXCBOl0EQ0d1+sQhh1CWmh6yNLq66RzAStrl3ns2AFa7Q7/4B++zvp6hB/4BNEpzl18hOHBx7FTBaamxNTs9GlZYSnfinxeJjvVLyPBnYPqMVMuS7pMCZujKPaEUY0K4dquuuqx3lv3cyoq0v36KIoJUII7j/37hWyoirpuMjI1Jb/3hQuyWAhDIR+XLkla59QpGZfazlnml3wMfZDJ0WeIsCCCKJLQl+e3CVsRURRimTaF/DDt9haapqO5JkMDB0jbeVZWXydtp6g3Vhgbfoog9Gi3BiiXDmPPZLg0/y5vvvtvOLTvi2TSfTxy8Musb5yi7ZzHDzaxrAKNRotXXz/B5z7zXNKP5h5BEo96gKA67i4urRN1xcSjSMNzx6lupbBTFv39DmZqAQ2DKApptc9R2bqA5we0m3ky1gxjQ0fRdQ3LKtJqV2h3tmh1NhkeOEghP0wQ+Ph+m7X1M0RRRC47RCbTRxSGOG5dtm9vEQYBa5t/zuXF73H2/PuUy0WOHNzLb//LSyyv+GTSKQaH5unL72d1tcTZs0sUChs8+6wI3VZXZaDr6xMSks0mJORuodGIUzMqhaJKdx1H7ltW3Beom1SoCEevYRnEk5oiLYp8qAoax0m8RO42pqfjsvjTp7en3IaHJTrZ3y/pujNnJL2qiGOzMcTYyAgr62eobL3P4uprhGGLVmcdz2/TaK5immmCsE21Pk+zXcHz2qTTRbLZAfmb7qOvOEGptJfZi99nszZPoW8Zw3Bx3RqG0UfKmqFUOIJGihOnv8qZ89+WdO/AEYbKjxMR4npbFPJZWq02756YTfrR3CNIIiIPCMJQqmAOHdzDmdlLnJ69zMToIHZ6EKeVpbK1QbGYxY8u0HENrFSWzeo8s+fnqWzWCAONvtIRUtYY+ZwsaTtOg83NH2GaaQwzRT47hK6b+H4Hx6lTrS1QyA2Tyw6Qy/Tjew7tziaR5hMGAR1ni/nlN1hafYsoCjl25Cl+5qdf5o++VqXj+JT7bVLpWTxnL4YxRiHvsb55krMXPYaO/wzVqk4uJwNgKiUpmbGxu3ygH2J0Tz6qrNa2hSQ4TmxipfrMKI2Hae7cdRd2j44oP5IgiMs/E9xdTE1JCqZWEzLSLRLv75ff/fx5EZgHgURSMhnY2NB58rHP8N0fvMP6xhrtThOIyGemaLU3KebH2arOUcgN0+5U8fw2uUwZw89g2wUyqSJOp4ZtF0mnBpkce5ZW5zyut0CzU6Fe1ynm96PrBhOjT+D5Lc5e+C5btQV+/O6/4cD0TxCEHdLWAB1vHd3Q0XSd5ZUKn3jxiaQfzT2AhIg8AOj1DXFcD6cTsKoXMa0Qy2xzYL/NFz//ON/4Y58TJ88xUC7y9jtnaXecKzqQScKwTBQGuG7A4soZwjCFphuYVupKqsWHK8Zl7c4mudwQQwOHMCwb12vT6myiEQIRq+vvUamdoNa8SLvtMDhQIpsZ5PXXU2xuNinkfVL2Aq6zn9AbQtM7mKlF+vsqzM9PMT/WYmwsTz4vE9ngoNwS3H1oWtx1N52OoyHK+l1NTiqy0Vue20tIVOSk+9ZdQaOswRPcfYyPy++1tSWRj4MHYxFxPh+Tk5UVOSemp+U1UbSPT338L/C9177Nyuocsxd/yNSYVNA4ziZ2Kku9tUYU+oSBxlZ1kVxuiFZ7E9vKkk4X8P0246OPUW8uEdXg5KmLaHqHKJhis7LM6PCT6FaaqfHn8TyXC3PfJ4pCZi/+KWm7iGUZZNIRERFhEGJaJs8+fTQRqt4DSIjIfY6dfUPSbGyAZZq88NwjvPixMpMTwvpXViucOHmOP3/1OFEIB/Z+jDDI4nkZNM1lbGSAs+dPQJTBtqTixvXaEEWkrCy12iKtTpWRwcOU+6bRDQvPbdFx64SBBxqcu/QdllbepuNsoGk6qZRFuXSAixdKLNrnqNZnmRnI47kHiUITTW9h2RfRjRam8Shh4GCYHYrFPLou9u7KayTB3YHvy822ZcJpNOTxbFYe97w4+qEEpt0dd7vLd3eDIiHqNco0rd1OTM3uJYyOyu9YqUhVTTcZUf2idF2iJ74vaZp9+3Q+88ln8P2A85ffp1K5RLtzjigMcJwm/X0TFPN5XDfE9dtARK2xRCE3SrWxTDZTRtMtdM0gZRYp9x1mvXIGTTNZq7yGrvXTdlvkM8Pk86MMlg+wsn6KVnsDDeg4NXxfx/dNDNOgVMxx9JF9HDuy724eygRXkBCR+xi9viFg4HtjpG2N8bGQS3PvsLrhMznxc5w7v3A1arK+sUmrCeW+g7SbJYIgIKJDvblMZWuJUq6PlJXBcV08r8NWbZ6B/v1s1ReAiFy2n4G+fQShR7O9fOXDREQEnDz9hyytvoMfOORzGbLZNFPjHyObHqfZbLFWOUnH9ZkY+TyplIlpbWKY8xBlcTsz+G6ApjXo7zfQdRnE8vm7eZQTQFwxk8nI76HKdXM5WR0HQVxiq0iJimLs5KzabWjWHUHphmXJY4k+5N7D8LD8buvrQka6DQUtKxa0LiyIkLXTgU9+cgh4njffLrGwPMGlubdZXD6HxgCmFZHN7Ac9Ip/pZ7O2jKmnqTeXSNsl2u1NDMMkiHxsu4DTqTIx9hTrG6cY6LO5MPc6tfoKum5iWVlGh48xNDBDrZFnq3oJDchkbIIgYniwn8nxYZ558kiSlrlHkBCR+xjX+oYUAA1N8zCtFUZHMpw9N8f3Xn2Hr33j+xI1GR0km95HGKxjaP1s1hbpdLbQDZOs3Y+h5XA8C9et0upU6Th1XLfFyTNfY2bvpxgePEI2U8bxGnScKrpuARHV+iJLq6+jm3Pk8xYD5UFAoy//NJZRxnU9js/+AROjhxgZfIzNap3xcQ8rtYLv9+E7e4iAzfoCkxMFBgdKTE/H9uEJ7i5Ux+SBASEinY5MNN1ERJELJWT1vGvLq3v1IN2pm+4ICsS9TBznjnzFBDeJwUH5zVdXxYG1u2WEYWw3PpubE2L6qU8NMTo6wHunLhD4Ftnsu5TLRVZXG9Sac6TMEdarc/T3TVNrLEIY4Th10AxanQqO26SQG6bcN4rjtRgeeoyNynmmxp9hdf00HUc0Jhcuf5+p8WewzMyVEnINXTekQ3dfkenpMb74uReTtMw9goSI3Mfo9Q3R9Ta60QQ8avUm7Y7D5maNb377h1Q2axw8sJ/AHyH0bdIpncXl91nbmKXtbHJg7yco5AcJo4hWu0GtsUgYBmxsnkeL4NEjX6ZUGMdOlWh3tnDcBvYVw7K5hTdYWHmLtrPAoQN7CIIQyzTpdAoEgY5pOFxc/DaTY4+TS4/juD715nucv1RlfPgYhjFJ4Hls1hbI5XWeffII+/frV8tAE9x9tNtSoqvrMsko8ajqqNrbyE6lcrp1H7CzMFURmN5UjprUut1ZE9+Yewuqz9Pysrisdvd7UsZnqj+NMj47eBC+9g1p8Xvs8KexUkvsmQip1TzW1l0azYit2mXyuRHanSqdTpVUKkfKyhAEHhtbF7BTJrZdpFZfI58fJpvpQ9cMNrbOU2+sQBRweeFHaJqOaRpkM+krlgYazz1zlP/0//Ry4qh6DyEhIvcxen1DNN1lo1K9WgnT6Tg4jsfcwipPP/EJAn8UMVT22dic5fLC6/iBy+jQUfqL03i+Q8dpUmusEEURC0tvMTryKGPDx8jYfThOg2arQi5bxjJtIOL9M6+wtPYurldjdKSMnU6RapusV6pokUcYvYvj1hgbfo5cZpR2p8nC/Pf42At7GRt+ifU1Cz9ooOkVpqb6eeaJw3zyk0NJ35h7DN1pE1VSq6pjugmKSqcoF9Zu9PaWUfvtjo50P68mNKU58bykqeG9iL6+WBNy/rw4rHYvIg4dkvsLC7LN2nqVSwtvUy7vATRq1TKuf4FUSqfRvMzmlkYxP4LvVbHMFOvNVS6ffZ0jM18gky7RaK6yvPY++WwJL9CwgxKmmWJ89Ama7Q3qjRWunq5RyMjQIC889yhr61s8enQ/v/orv4CZDDD3FJJf4z6G8g05cfIch2amqGzWeOv4Gdodh3w+g+f59PfnMbUplhbbZNMN+vobuEGNtcrbBIGHruvUmyusrp3ENPM4bpOOU2N1/QxT48+Qyw5Kq+9z3yKdLjI0cAiATqfK7KXvsrB8nDB0mJwYYXx0kFw2Q7PRotFokcuG+H7E6NCz5DKjOE6V2Qt/wsR4CS06wP7pJzm4DzK5NrmMxcBAiUOH9MTK+x6GpsWajXRaIiOqWkYREdiuEemGipqoqIjyC1FRE0VoVERE02Ii0m4nROReRbEov938vGhCetOq09Py283Pw7kLQDhJx1nj7LlzeG6BytYsaA6+53N5cYE948+A5mIYJS4vvk4QuLx3+g+ZnnwRCAlDHTdYp90y6Gh1cvlhUlFIqTDK2saZq++rGzpDQ2U2t+rs3TPKz//sywkJuQeRJMjuY+i6zpdefpFyf5HTZy/z3vvnabY75HNpGo025b5JHnvkJ8jlstQaG5w5/33AI5UKSdspdF1D0zRa7U02aws4bouNrXOsbZwiZWXRNI1qbZ63T/wOjdYqlpVB02Bl/RSzl77L5YXXCQKHQj7H/r3j5PNZRobLpFIWmqahaTn6CofQ0Gi2VnnvzCtYKXjysS9BNAB4zMwUmJ4aZnCwnyNHEhJyL8LzhAyk0zK5NJtCFNLpOGUDQiCU6ZXvX2vTDrs7qkIcHVFGZr1maImXyL2NfF68RkCcVVV35jAMmZtfYat2kUx2nYEy6NoQFy+YrK2v0XbPUygYpNM2ruejaRqX5t9gY/MitfoCU2NPYadEsX5p/lUazVVSlolpFKjWl/BDl1ZzGQ2XbNbi6KFPkcvkr6T7IhaX1lhdq5BKWOw9i4Qa3uc4ODPFL/3Cl/nf//23OHHyHLqp03E8Jsb2s2/Po+RyaRaX32Nr9RKOm6JWb1Is5Bge6mdltYIfBGTSNmgNltbeQtNk9He9JovL7xAEHSwrz8ToExQLE1Sr81Sql9jYvICmaWQyNsVijrPnLlPIy8Xfarv0lSbpK04QhCGX5l+j3amTzxf41Md+FtsYxNerDJT1q2ZXSd+Yexf1elwxUyhAtSqPp1LXmpnl80JMuvvFqL+wc8O7bo2I8g8JAiEzytQsCBIicj8gl5Pox6VLcPkyuN4Cf/q971/1OLLtFHunZtjaaqBrA0yOPUnbmcP1KtgpnZHhMiurFZzQZWn1LKVilcH+A4wNP8raxlma7Q1q9cukUj75cA99xXHqzXUK+UHaziqmaaDrOiPDj7O+MUsQ1fnki0/S319gaXmD3/ztryYdd+9B3BEi4jgOL7zwAsePH+ett97iySefvBNv+9Dg4MwUP/uXPs2Zc5cZHRkgk7bJ5wYh0tCNRfbtHaCyucZ6ZYutrTq5XIa9e8Y4PTtH0O6g6dqVySEk8IWIRFGIYQQYpkV/YQ/53DAblXOsrL9Pu7MFgGWZGLpBp+0wNNTPs08dIZ220RhhdbVDFIY0O7Pk8gZ7pw9x7NDnSKcHWN+YY2Kij+HhIoYh4rUE9y5qNSEB/f1CNLobEDabEjFxXYmQ5PPiL6Ga18F2k7JenYhKxyhTM+Uf0ukIEVGpGcch0Q3dJ8hkYO9eePXVNb71nbepVFcYHSmKx1Hb4cfv/JiLFzcYHnqMlD2Jbe3B9es4nQabWw0GyiVq9Qbtjku9sUbHaTA69BjZbIkwamDbFo3mBvXGJpPjTzGSncQPAhrNVRy3TqvVYqD/IJNjR7Fsn2wuTbGQo5DPJh1371HckUv77/7dv8v4+DjHjx+/E2/3UKJYyNHfVyCXFXU4tK/cYKBc5Mihad4/E9Fsdzh/cRE7ZfHFz36MN98+xXpli07HIQzFdTBlmfhBiGWZTE88h+sFVLYuML/0NkHgXvGB0EnbKdJ2ioMHppg5MEm5v0TgTXB4Bix9josLrzM5VmZifC99xccJXIu1jUvkcgbPPHmQbFZn7967edQS3AgcR4iBrgtJcBwhCNmsEBFlaJbLxRqR7iqYXhHqbuW76jnVy8b3Y0FsqxWnfRLc+0ilQt45+ec0mgHTk49hWpvoeotCPsvk+BDvn76I659H13OEYYFG3WRlbQ2ATNrGTlmYpnllsaMTRhc5dHCU0ZFpMhmbra0Gm1s1TLPB2PBTbG422arlWFh6myBs0nZnGRt9AdBImdL0RtO0pOPuPYrbTkReeeUVvvnNb/K7v/u7vPLKK7f77R5a9ApXta7RPYoi2h2Hv/jFT/CXfuonaLXFbGxifIizs3P8zu/+MT968z0Wlteo11oYhrihDg8+ShCIZXtl612m9wwxOlzGdTzGJ4ZoNFoMD5YZGx0ANHxvAtDQ9ZDxiRa1VoHxsf0E/j62tnzQNpic6OOZJw9z+NAQExN362gluBn0Go2pFEk2G3uG+H7cUwa2Ry92Kt3dTSMCsUZEiWA9T94zcVe9f7CwuMa5ixcol8to2hCBXwYzQtfbpK40mqvXqzz1WJtWM0e9OYyumRjWBlEYYVkmYRjiuB7PPHmE4aEyxYLo1qIoYnOzzksvPkEUwXvvn+bwoUdpNoeYHB/h3fdfpVTUWd04zsTYfgqFIiCsOJNN46xWko679xhuKxFZWVnhl3/5l/kP/+E/kL2BHt6O4+B0uRfVarXb+fEeKCjh6uLSGmdm5xgfHSCTTdNudVhc3qBcLvKllz/O9J7tXeMOH5rm53/uc6yuV8jns+RzGRqNiFarRK3aYLO2TD7X4rOffo7BcomllQrlcpFPf/IZfv+r32FkuB+AWq2N7zUxLQfPX+Ls+TkqFZOh/v2kLJNctsPjjx3h6JG9DA7qDA/fjaOU4KNAVbmoNIkq0VTC1O50jIpqwPbOu2o/vWmabpdVFU1R3XxbrUQfcr/hqsdR1kDXl/G9UaLQBF2it0NDfVy4uITjdZhbegOiCUaGJomiIS4tvMXoyAAH9k7wp9/7MWdmLzMy1E8QhteMZwBLy+ucu3iC8ZFJisUB+ksHaLXXyWQ8pvfk0fVYpNRudZKOu/cgbhsRiaKIX/zFX+Rv/s2/ybPPPsvFixc/8DW//uu/zq/92q/dro/0wEMJV682wFutYKcsHjt2gC9+7sUdBVphGPKNP34N1/V59qkjhGE/gZ+j3e6wXjnDuyebpOwUnh+wWW1c3Vc6neJr3/g+C0vrLC2vSwff8F08r02z1WGwfIg940/R31/E0LaoVCv8+O0O+/elGR4evwtHJ8GHQacjpCCTEeLR6QhBUGmS7iiFrsfeISqq0btNbzSk10dEPa/KfxOb9/sT2z2ODExr8epzmqYxMTrE8kqFcxcWWF/fIp9vkQmO0WkH9PdNMbO/n4FykeefOcr7py+ysLSOYRo7jmfdY17HWcIySxRywxyaeYb+/q2r7xtFEYvLGzx27EBi7X6P4aaJyK/+6q/y9/7e37vuNu+//z7f/OY3qdfrfOUrX7nhfX/lK1/hV37lV67er9VqTE0l6uabwcGZKQ7sn2BhcY1Gs301BbObMKvXJj4KUmgaFEublPoGGBp8hoWldf7qz32emf2TV/cVhiF9pQLf+pPXsG2LfD6HZerMLWxRzB+hlD9MKpUim6mi6QZjmUEuzZ/g+681efKJn0uEYvcJ6nWJgKiKmVZLiIFty+OeJ2RB/ZzdpbxKiNpr3b6TRqSbnHRX21jW9lJe5eKa4N7Gtani+DlJFbt89lPPEgQh35x/DTRwnLcZHznM5OQEA2WZmsbHh2g7Ln/15z7P6MjAjuNZ75i3urbJ1785R73RxjIHSOc26LSbLCyvY6dSHJzZw8Li2nXHxQR3FjdNRP7O3/k7/OIv/uJ1t9m/fz9/8id/wquvvord49P97LPP8tf+2l/jt37rt655nW3b12yf4Oah6/oNC7F6beINqwL4VweObC6DYRqMjgzsuE/fD4iiiFTKIgwMBvqeoFiYRgO2au+C9hQAlrXB6LCdWFG2WwAAMaVJREFUCMXuM9TrEo1QFTMrK/K48hDpNjPTtJiIqDSO+tur79hJIwLXRlGU9UO3l0giWr33cSOp4v/8579EKmWyvLJBJmvTXype1YEotFsd0naKmf2T1x0zuse8Rw7vZXx0kD965T3mFzpUqjat1irNdhvP9vn9r36Hr33j+8zsn+JLL+8cKU5wZ3HTRGRoaIihoQ8Oa/2jf/SP+J//5//56v3FxUW+8IUv8Du/8zu88MILN/u2CW4TrrGJ17bbYe6WU/3eq+/ww9dPYJomzVabaq3FnomPUchNYhkR9eZJ2u4q7c40xWKApnmJUOw+hBKKappEQZRJlXJVdV3Rg5imkAblpqqiGKostxs7eYl0O61C/FetS5QpWkJE7h/cSKo4DEMef/QgJ06eozi5nYR8lFTKwZkp/q9/a4LZc+u8+WaF13+sk0oVGR4Or5YRnzh5jsWltcRX5B7AbdOI7NmzZ9v9/JVe7gcOHGBycvJ2vW2Cm8QHVdvsNBCcnZ3jX//O19moVBkdHmB0pIzjWvQX9tFobNFyzmLbLn4rwPWW0bQ+IBGK3Y/ojWa47vbSXc+LCUe3caUiDt3kYqfUDGxvlqfeU70uk5H/K4KTiFbvL3xQqvhGIicftkuuruvMHBjklW99Fz+ImJo4gK6Bri9QyGc5NDOV+IrcI0gsgh5y3OxAEIYhr3zrVZqtNv2lPJquYaf6KRXGIaqxsPIDXK/B1OQIpmmQSskplgjF7j8oAqEqWUB0ISBEZGtL7qvS3VwuJi5BENu1dxOQncb67v13vy8IuVGmZlGUEJH7ER+UKv4wIvsbxcLiGucuXKJcLqLrA0SRQRSl0bRO4ityD+GOEZG9e/cSJUYA9yRuZiBQ4tYD+yZwXY+19SqDfVIF02yfpa/PYnHJZ35xlX1To2Szaer15kde3SS482i346iEMipzHCEO3R4iKj3TLWZVEQwlWlW4ER+R7sdMU95PmaYlp86DiZsV2d8o4jJiG8NYIgzTaJp79fkkXXxvIImIJABufCBQF3Yum2Zm/yT1eouNzUXQNkUnYFtYlsVAf4m+/iIXLi3dstVNgjsLVTGTzQrJcN24gkZVriiCoOzdWy15TBGR7mjHbqkZta8oivvNKAKj9CdBkHTffdBxMyL7G0WvBk7Xt4fUknTxvYGEiCS4ihsZCLov7IFyiaeeOMTs+Xkqm62rFTQjw/38D//932D/volburpJcGehmt2VSnFfGRDSoaAiIoYRt30Pglg7oozNel1Ve4lIt1hV17eTE+VJ0uV1mCDBDeHDaOAS3HkkRCTBTaH3wh4olyj3F6nVm7iux/ziGs8/c5RPfuLJhHjc5/B9iUL09Qn5qFSEGKTT28tslauqqnDR9biMt9thdScSAtt9QZR3iHpdt817YmqW4GZxO8WwCW4dkqOf4KagLuxyf5Ezs3PU602CMETXNCqbdSYnhvnSyx9PLuwHBN0kQkVETFOiE0Gw3ba927hMVc1I+W5IFAX4vkur1SKKwm1kpLtHDcS+JGEoERnTjHvPdH+mBAluBEoD9+jRA1S26py/uEhlq85jxw7wS/9FUrp7LyCJiCS4adxOlXuCewM7Vcwo3Uc2G5uZKTGppsVEAeT/mgZb1RpbWxAENrV6g82Tiywst3n6if3AIBA3yOtueqfITqsVExH1/q4bR18SJLgR3C4xbIJbg4SIJPhQSC7sBxvNphCDbDZOnShjs1xOCIIiIur5blfVIIBms8XcwjkI92BaGQwMTCwWli6zVa0w2PeTGEZqW4kvhHQ6HRwnwnVTuK6BZck5pYhKp5MQkQQ3j9shhk1wa5AQkQQfGsmF/eCiXheioXrMQOzhkc2KXsR1JRqi60JI3LgqkjAMWV3bouO4FLLW1ccN02BkuJ/llWXSVotiwcQw9CvkxWNxaZNWZ40o0kmZQzTbbcZH+/C8DJXNNuvrPqVSiVIpIbwJEjwoSIhIggQJroHqMaMqZnxfUiXpdGzl7jgh1WqLZiui2YpwnDygo+vQaHRoOw75bPbKHiMgBA00oNyXx/U8fD/AMHR836XddvFDh3TKQNNsoshga6vD/MJxBvofpbK1yMkzZxgfK/LlnzqYpAATJHhAkBCRBAkSXAMlFC2VIJ0OOTu7wdqaSX+/ThQVWFzc4LXX19iqtogii1NnV3j/jMVTjz/K+NgQjhcShWBYJqEHEIEWyl8irJSJ0wlwPY9arUWzFRJFJraVQrdaEOp4vkYQRNRqG/SXQrKZApGW5dLlDX7zt99PeoQkSPCAICEiCRIk2BFBAItLa3zzO9/hwkUHnX2gbfLnry1R3RrAMibIZwvopkkYhly8tMbGxo94+TPPYxp5dF0n8EI0ICAg8n20yKXjtLg8P8dw/wwNrc03/uR1UsajZNMlJF4iHg++FxBGYKcj/CAAN8S0IsZGB7lweTbpEZIgwQOChIgkSJBgG3w/ZH29yoWLNd479Sb11iwTo09h6EX8oMGfv/o2+ewjPH70IIaZRtM87FzERGaIxaUN3jx+mqmxF0nbGdY2lshmPHQNmq0m9eYKa+tncL0WY0MmtmXhBTpex8O3AtBDdDz8EIIoj4ZOrbFFqeDiuB6rG8tkMjqFgp30CEmQ4AFBQkQSJEhwFWdn5/iPX3+bufmApeU2tdoymVyDkcEQy9aJ8NA1A13XqNYaDA+UQOugaR0gQ7mvwNLyOoN9LoZhUKlUSQ17GIZBEPi0mm0azTaGFWJZOgEhYRigaSFRFOF0PFI2hEEDgjK+79NoNghDB02LCMOIjcoW80sXGRroS3qEJEjwACAhIgkSJACEhPzmb3+Vej1HPjtMEKyTSnmsrW+SMivsnx7BDxtomk0uk8ZxXLygiW266LqLH4BlW3jVOksrGywtN/CDEPFNjAgCHy/wMHQNQ9NotztYps7l+SVGB/cThuB6Aa4boRsGYRgQBCHNxhau5+L7bba2Wjhuk+X1ZWlmlknqeBMkuN+RJFcTJEhAGIa88q1XqWzWmJqYxjSz+L6HaTmU+4s4Tov1SgXTbGPbBTTdIggDwtDDMGuAD2h4jovn+szNLeN6LinLwrhqvxoRRSHo0Op06Dgtwiggl7PRtEh0IX7A5madtfUaYRgSRhEtp0kUBZhGCl3XCcOQTidgda3ChUtLd/OwJUiQ4BYgISIJEiRgYXGN2fNzjI8NAhqGLv4e7U4VTTOwbZOVtROguQz0D+E4EVHkXxWKSsO6iI3NGltbddodhzAKCSMIo4goCojCkCgM8FwfPwjw/Q5h6LK5WcNTtqyazkalTq3Wwg+Vp3tIGPr4vksQBOiaRiZdwPcD/vg7PyJU3vIJEiS4L5EQkQQJEtBotrtSHRqZrE/KblBvNDGNDKZp0O7U8LyAybEposjA93xcr0MQBNTrTZaW1/E8j1qjQxhFEIXomoHrtmg0N3C9jhCKUEgJEQR+SMdxIQrQiDB0HU0zCMOQIBCHNF3XqdYWCSIfx22j6RqlQj+6oXPu4jwLi2t39+AlSJDgIyHRiCRIkIB8LoNtp2i1PNI2GIbDnqksGxWbRgM0XMKwwfqGSXVLxzQGQYOFxWU6zgL5whrHDj/CwtIKaCamIboQx23jBQ7hlehGEIUQRfiBjx/6eH4HonYc/bgSjQkCB8dtYhgWdipL26kQhi7zS28xPfE4lpUllbKIIhLBaoIE9zmSiEiCBAmYGB9iZv8UK6sOURShaW0GykWeeuIwQ4OjNJotGq0675+5BMD0nikO7p9kcnyIUjFHLltg7/QYjuuQz2TRdY0gEHIRhqIfiUBSNEQQRYRhiOc7LK+dIoxUekUjQpcKGrdOu1MFdHyvjePVCEMXz/exrDz5bIZcNk0+l7kbhyxBggS3CElEJEGCBOi6zpdefpHlpR+xtLxOuX+LTC5FyjIoFEr09w3ghz6bW3WOHTpCGOQBKBRSlPomuXh5k3dOnEMjolgsEoXgOA3aTpUyEF1paieEI0LTNED+b5qW6EUiH03TMQyLMAzxfQffd2i1K2QyfaRSeSwri67ppG0btDRPPHqQifGhu3TUEiRIcCuQREQSJEgASEfll3/yJ9gzOUqlWuXchQXmFlfpLxZ5/NgBAEaGytSbLRzXQXrH+GgE5PMFFhZXaTsd+kr9mKZBvblMq70JmobvO4SBTxCIp4iha0ShWMk3mh3qjRW8wENDR9cMgtAn8B2iKMC2i3heBzSdXLaIZmhoms7hg3v40ssfT5xVEyS4z5FERBIkSHAVY6ND/NQXB1ir5PjWn0h0ZKvW4Ls/eJsfv/MWxUKRoQELXV8mm0lTyGWpNxfZqlbx/SyG4eF2tugrFvD8zpXIh4bntzl/6btEWoSpa3SiELQr9u+hRnBFvKrpGmiaCFU1DcuwyWVKOE6bKAoxjTSZtMUzTx7hv/jPjiW9ZhIkeACQEJEECRJsw/LKBt/4kx9Q2awxOT6M62S4dPkSjUYbjQz5bIcodKluNbjsdWi7KwwPjpBKmQS+T7vlo9HE8xwMwwIgikLWty7Q7mwyNf4UYeATRREA+hVNiO93iIwUGjqe10HTDTQrh2Fk0M02KdNiZv8+Xv7JT/DCs49ycMa4m4cpQYIEtwgJEUmQIAEAnifGZm+98zaVzRqHZqaIwhSX5yp4foNiMUfgp2i3XXK5HIZh03EaNFtVqtU+DD0CLWCobKNpGmHoYZlpQPYb+A5RGBIEAcEVszI00HWDKArxAxddN9E0nXprlTwDZOx+UlaWVnsdPwzQI5MTJ09RqdSxUtMcOphERBIkuN+RJFcTJEgAQL0OlUqVy/PnGR8bRNM06o2IRqNFKhWg6xqWlSEIAjzPRdPE9KzVqtLqtPGDgJRlYxiyvgmvlOf6fodaY4lme6Pr3aRyBkDTdBy3gee1AXFf9f0O1fqi6EmMFIYJhqZjmgaFgsHl+WX+f//yFc7Ozt35A5UgQYJbioSIJEiQAIBaDdqOS7uzebWHi+vXhXj4dXw/IAg28Lw6vu+jaykA/MBlfeM0a5XTtDsOjit6jiAMqNaXaHe2cJyavIkm5TNhGBFGAaCh6wYXLv+AjlvDcZq4XuuKtgQ0zUDTIAgc7LSF6/hEkcvY6CDVqsfX//jVxFk1QYL7HAkRSZAgAQCOAxk7hW1btNoOACkrpNo4heN4hGGErge0nTkyWRfT1DBNE13TiSKfRnMTPwhot7eoN1YIQx/TsGi01lhdPwOAhiIYIa7ToOM2cNwGrfYGjeYqQXjFTfWKI2unUyVCA3yymTRoBq7fQNM0RoZGOHtuLnFWTZDgPkdCRBIkSABIpmRgoMTM/ikWl9aJoohiIUu5P0+746Bp4LgemYyNbTfwgwq+Lykb6ScTsbp+imp9kYiQMAoIQ/9KdESITRgFRFGE67XQDQ3PbV59/06niuvJfV03WVp9F8drogHlcpq2U6HVXsdxWkRRhG0XcFwvcVZNkOA+R0JEEiRIcBWGIcZm5f4iZ2bnaDRa7Jsex7YtHEeazpVKeWlSp4FpGqBdlXvQcWq02htE0ZVKmc3zVGsLhGEAQBC4LC4fx3UrOO4abWcLTZPql1pj+YqTqghYFTRNZ3FpkcXFy2xtrfPue7OcPTfHxkYbO2UlzqoJEtznSIhIggQJcCRgQaEgxma/9Atf5tGjB6hs1dmqNZieGuPIoX2UinnWN6rU6i0GykUOzUyRSdvb9rW+eY73z77C4vJxIML1mleJCIAfOBimgeu1AK7qQdqdLVZWT9FqV9D1WPAahAErqyt4gY+dygGwVa1z5txlyn3FxFk1QYL7HEn5boIECahd0ZIWCvL34MwUB/ZPsLC4RqPZJp/LMDY6wA9ee/eq0Vk2ncOyDL7w2Rf49p+9Qb3eAg0cx6N1pULG1gtoQBj5AOi6hqaBBrTbbVJWTETWNs7SaK5RKAyjX4mSBKErlu9RgKHpBKHO+tom2bQIXSOiO3iUEiRIcDuQEJEECRJQr8vfXC5+TNd1piZHtm33E594ipdefIL5hTVOn4FSweLpp3+aX/v13+SrX/surVYHXdfEJVXTMPQAXdcJwzYpy7jimhrieR6mqaNpGtrVwGxEGHq4bv1KukbDdVukrByGkSKIQjzHEb0KETP7p9jcOs/C4to1nzNBggT3DxIikiBBAjxplKuqa68LXdcZGx2h1YRiEUwTfuGv/gXW1jd57Y33iADLMvF9n47TYG7pDdJpiwOTk2xUqlTrTTLpNFEUoGs6lmlefe8o8mk7WzSbFSASUpIfJp3K03FXaTXXyOfSmJZGJpumUUnEqgkS3O9INCIJEiQgim6MhCgoTYl9RR5ycGaKv/N/+Wv89Jc+SSGXwXU8dF2nUMhxYP84n//MC3zs+ccwLZPDM9O89OLjDJbzZDIpTMu68iHA8xxarU02a3NoGtSbKzidGn7o4Djr2BmTIAhpt+u0Wx3sVDoRqyZIcJ8jiYgkSPAQIwxD5hfWmFuAfC5FGJZuqJttpyN/0+n4sYMzU/w//x//JT/5qWf4zd/6Q+rNFvunxxgeLtNpO5w6exnLNNm7Z5SLl5cJgi3GRwYZHx1kY/MyQRACEWuV0wDYdop2Z42NrYtEgYtp5jB0HScI8YIWjWabo4/sS8SqCRLc50iISIIEDynOzs7xyrde5dz5NaJwBMNs8vpbGb708osf2NW2NyKioOs6n/7kM0yMDfPK/7+9ew+Lqs4fOP4+M8AwXANEBWRRxEsWBkKyqbuZ8tOsx1Yt7WKal3BLW7WLqWlarUqGtpm2ZplWa+stV/MWRualNdNMIfECooI4ICIoINdhZn5/sEyOoEAyDYOf1/PM8zhnzuUz34NzPud7Od+E/aSdyeRsRjYaJ0fu7hKMRuNI1oVLlJaV49vCC5RSPDzcaeHjSX5+IfpKAw5qdVXTjsFARUUx2ReTcHRwqJqp12BAr6/EYCjFxcWZPwQGocvKJcDft14JlBCi6ZFERIjb0Km0TD75fDP5lwvxb90BJ0cfKvQlJB8/TVZ2LmNHPnLTZKQ6EXG4wS/IjUbdxC74nC1ff0/Lll5UtwQpikLrlj4UF5ehUlXNymswGP/36HYFVxctLi7OlJVXUFpWjsbJEXc3NWXlFezcncSu73cSEhxYrwRKCNH0SCIixG3GaDTydcJ+8wy7hkpvTCYVbm5qOoYEkpqWSfy3+2kfHHDDWobqzq03U9uom+4RXdiesI+iohIUVDg4XOFqcQ4lpeX4+/middZwPusiiqJgMhnROFU9cr56kK7XHR608vXG29uDVi280TirKNdX1DuBEkI0PZKICHGb0WXlknYm0zzDLlTPggug4N/axzyHS23DYo1GI5fyCigprcDVjQY1i3S5sx2dO7Ql/3IBxSVlFBWfxEGtpqWvFyHBgTg6qPDz8+GBP0VyNkPHxdzLXCm4ikqlIqRdAPpKAxdy8ugYEkilXoOCCXc3l3onUEKIpkcSESFuM1eLSykvrzDPsKt2uITJ5GT+XOviTPnF/FqHxVb3K8nIcESvr2RL/LkGNYsE+PsSfk8njh5Po3VLH/SVlTg5OuLh7gJAalomYaEdeWpYPwCLph2j0cjCxf+2TKCUqie2KkrdCZQQommSRESI24ybqxaNxomS0nLc3VxQlEoUpdL8edWw2JpzuFj0K2kZgUbjQHnllQY1i6hUVXPZZGXnknMxH//WPlXPA7laQtaFPLy9PXgw+j5zjca1CcWJlPTrEqg8FOXXNqKbJVBCiKZL6i+FuM0E+PtazLB7LZPJRNaFPDq0D7QYFnt9vxJnrabqOSH/axbJv1xI/Lf7/9fB9Oaun8vmTHoW+VeKCL2rPWNH3DiZuTaBAlCpylCUX+ewuVECJYRo2qRGRIjbzLW1EqlpmeZaidKSslprJaD2fiWK6tdJ6xraLFLbqJq6+ppUJ1DJx0/TMSTQPEcN/JpAhd7VXp4rIoSdkUREiNtQda1E9bM+yi/mo3FyJPSu9jwYXbO/x/X9ShwcLwK/1n78lmaR2kbV1LV+QxMoIUTTJ4mIELephtRK1OxXYjl+9/dqFmloAiWEaPokERHiNlbfWomm1CzyW5p1hBBNlyQiQog6NbVmkYY26wghmi5JRIQQ9SLNIkIIa5BERAhRb9IsIoRobJKICCEaRJpFhBCNSW5jhBBCCGEzVk1Etm3bRlRUFFqtFi8vLwYNGmTNwwkhhBDCzlitaWbDhg3ExMQwb948+vTpQ2VlJcnJydY6nBBCCCHskFUSkcrKSiZNmkRcXBxjx441L+/SpYs1DieEEEIIO2WVppnDhw+j0+lQqVSEh4fj5+fHgAED6qwRKS8vp7Cw0OIlhBBCiObLKonImTNnAHjjjTeYOXMmW7duxcvLi969e5Ofn3/D7WJjY/H09DS/AgPluQRCCCFEc9agRGTatGkoinLT18mTJ81Tgc+YMYNHH32UiIgIVq5ciaIorF+//ob7nz59OgUFBeZXZmbmrX07IYQQQjRpDeoj8vLLLzNq1KibrhMcHEx2djZg2SdEo9EQHBzMuXPnbritRqNBo9E0JCQhhBBC2LEGJSK+vr74+tY9qVVERAQajYaUlBR69eoFgF6vJz09naCgoHofz2QyAUhfESGEEMKOVF+3q6/jN2OVUTMeHh4899xzzJ49m8DAQIKCgoiLiwNg6NCh9d5PUVERgPQVEUIIIexQUVERnp6eN13Has8RiYuLw8HBgREjRlBaWkpUVBTfffcdXl5e9d6Hv78/x48fp0uXLmRmZuLh4WGtcJu9wsJCAgMDpRxvgZRh45BybBxSjo1DyvHW1VaGJpOJoqIi/P3969xeMdWn3sSGCgsL8fT0pKCgQP5IboGU462TMmwcUo6NQ8qxcUg53rpbLUOZa0YIIYQQNiOJiBBCCCFspsknIhqNhtmzZ8uw3lsk5XjrpAwbh5Rj45BybBxSjrfuVsuwyfcREUIIIUTz1eRrRIQQQgjRfEkiIoQQQgibkURECCGEEDYjiYgQQgghbMbuEpFt27YRFRWFVqvFy8uLQYMG2Toku1VeXk5YWBiKopCYmGjrcOxKeno6Y8eOpV27dmi1Wtq3b8/s2bOpqKiwdWhN3gcffEDbtm1xdnYmKiqKgwcP2jokuxIbG8u9996Lu7s7LVu2ZNCgQaSkpNg6LLv29ttvoygKkydPtnUodken0/H000/j4+ODVqslNDSUQ4cONWgfdpWIbNiwgREjRjB69GiSkpLYt28fTz31lK3DsluvvvpqvR6/K2o6efIkRqORZcuWcezYMf7xj3/w4Ycf8tprr9k6tCZt7dq1vPTSS8yePZvDhw9zzz330L9/fy5evGjr0OzGnj17mDBhAj/++CMJCQno9Xr69etHcXGxrUOzSz/99BPLli2ja9eutg7F7ly+fJmePXvi6OjI119/zfHjx1m4cGGDpnIBwGQn9Hq9KSAgwLR8+XJbh9IsbN++3dS5c2fTsWPHTIDpyJEjtg7J7r3zzjumdu3a2TqMJq179+6mCRMmmN8bDAaTv7+/KTY21oZR2beLFy+aANOePXtsHYrdKSoqMnXo0MGUkJBguv/++02TJk2ydUh2ZerUqaZevXrd8n7spkbk8OHD6HQ6VCoV4eHh+Pn5MWDAAJKTk20dmt3JyckhJiaGf/3rX7i4uNg6nGajoKAAb29vW4fRZFVUVPDzzz8THR1tXqZSqYiOjmb//v02jMy+FRQUAMjf3m8wYcIEHn74YYu/SVF/mzdvJjIykqFDh9KyZUvCw8P5+OOPG7wfu0lEzpw5A8Abb7zBzJkz2bp1K15eXvTu3Zv8/HwbR2c/TCYTo0aN4rnnniMyMtLW4TQbaWlpLF68mL/+9a+2DqXJunTpEgaDgVatWlksb9WqFRcuXLBRVPbNaDQyefJkevbsyd13323rcOzKmjVrOHz4MLGxsbYOxW6dOXOGpUuX0qFDB3bs2MHzzz/PxIkT+eyzzxq0H5snItOmTUNRlJu+qtvjAWbMmMGjjz5KREQEK1euRFEU1q9fb+NvYXv1LcfFixdTVFTE9OnTbR1yk1TfcryWTqfjwQcfZOjQocTExNgocnE7mjBhAsnJyaxZs8bWodiVzMxMJk2axBdffIGzs7Otw7FbRqORbt26MW/ePMLDwxk3bhwxMTF8+OGHDdqPg5Xiq7eXX36ZUaNG3XSd4OBgsrOzAejSpYt5uUajITg4mHPnzlkzRLtQ33L87rvv2L9/f405ASIjIxk+fHiDM9nmpr7lWC0rK4sHHniAHj168NFHH1k5OvvWokUL1Go1OTk5FstzcnJo3bq1jaKyXy+88AJbt25l7969tGnTxtbh2JWff/6Zixcv0q1bN/Myg8HA3r17WbJkCeXl5ajVahtGaB/8/PwsrskAd955Jxs2bGjQfmyeiPj6+uLr61vnehEREWg0GlJSUujVqxcAer2e9PR0goKCrB1mk1ffcnz//feZM2eO+X1WVhb9+/dn7dq1REVFWTNEu1DfcoSqmpAHHnjAXDunUtm8grFJc3JyIiIigp07d5qH3RuNRnbu3MkLL7xg2+DsiMlk4m9/+xsbN25k9+7dtGvXztYh2Z2+ffty9OhRi2WjR4+mc+fOTJ06VZKQeurZs2eNoeOpqakNvibbPBGpLw8PD5577jlmz55NYGAgQUFBxMXFATB06FAbR2c//vCHP1i8d3NzA6B9+/ZyV9UAOp2O3r17ExQUxIIFC8jNzTV/Jnf3N/bSSy/xzDPPEBkZSffu3XnvvfcoLi5m9OjRtg7NbkyYMIF///vffPXVV7i7u5v713h6eqLVam0cnX1wd3ev0afG1dUVHx8f6WvTAC+++CI9evRg3rx5DBs2jIMHD/LRRx81uHbYbhIRgLi4OBwcHBgxYgSlpaVERUXx3XffNXzMshC3KCEhgbS0NNLS0mokcCaZ0PqGHn/8cXJzc5k1axYXLlwgLCyM+Pj4Gh1YxY0tXboUgN69e1ssX7lyZZ3NikI0pnvvvZeNGzcyffp03nrrLdq1a8d7773H8OHDG7QfxSS/mkIIIYSwEWnUFkIIIYTNSCIihBBCCJuRREQIIYQQNmNXnVVF/RUVFZGdnW1+EJwQQoiGU6lU+Pn54e7ubutQmi1JRJoZo9FIbGwsGzdutHUoQgjRbAwePJjp06fL84KsQBKRZiY2NpZNmzYxceJEwsPDcXR0tHVIQghht/R6PUeOHGHx4sVA1TQjonHJ8N1mpLCwkD59+jBx4kRGjhxp63CEEKLZ+Pzzz3n//ffZtWuXNNM0Mqljakaqn7AYHh5u40iEEKJ5qf5drZ73TDQeSUSakeqOqdIcI4QQjav6d1UGADQ+SUSEEEIIYTOSiAghhBDCZiQREUIIIYTNSCIihBC3obKysjpfDekPkZeXR8uWLUlPT7de0FbwxBNPsHDhQluHcVuTREQI0Sh69+7N5MmTbR2GWVOLp6np1q0bWq32hi8XFxfOnTtX7/3NnTuXv/zlL7Rt29a87MUXX2TIkCFWiP63uz6mmTNnMnfuXAoKCmwY1e1NEhFRK5PJxNXSCq5cLeNqaQXN6XEzBoOh2fd8NxqNZJ7P4URKOpnnc+zm+1ZUVNg6BJsxGY0YL+ZiyMjEeDEXk5XP2bPPPouHhwepqamcPXvW4tW3b1+io6MtkoqbKSkp4ZNPPmHs2LEWyw8ePEhkZOQtx1pZWXnL+6h2fUx333037du3Z9WqVY12DNEwkoiIGgqKyziRcYmjZ3M4ln6Ro2dzOJFxiYLiMqsd88svvyQ0NBStVouPjw/R0dEUFxdjNBp56623aNOmDRqNhrCwMOLj483b7d69G0VRuHLlinlZYmIiiqKYq4g//fRT7rjjDjZv3kyXLl3QaDScO3eO8vJypk6dSmBgIBqNhpCQED755BPzfpKTkxkwYABubm60atWKESNGcOnSJauVQWM5lZbJkmVfErdoFe99sJq4RatYsuxLTqVlWu2Yo0aNYs+ePSxatAhFUVAUhdOnTzN27FjatWuHVqulU6dOLFq0qMZ2gwYNYu7cufj7+9OpUycAfvjhB8LCwnB2diYyMpJNmzahKAqJiYnmbW92fmqLpyk3GRjO66jYtIXytV9S/uVGytd+ScWmLRjO66x2zJEjR1JWVsbRo0dp27at+eXm5sbevXt59tlnzeseOHCAXr16odVqCQsLY+/evSiKQnJyMgDbt29Ho9Hwxz/+EahKKB0dHfnhhx+YMWMGiqKYP5s9ezahoaG4urrSqlUrnn/+efR6vflY6enpKIrCunXr+NOf/oRGo2Hz5s31igPg3LlzPPXUU3h5eeHt7c3w4cO5fPnyTWMaOHAga9assVpZi5uTRERYKCguI/V8HvlFpWgc1bi5OKFxVJNfVErq+TyrJCPZ2dk8+eSTjBkzhhMnTrB7926GDBmCyWRi0aJFLFy4kAULFvDLL7/Qv39/HnnkEU6dOtWgY5SUlDB//nyWL1/OsWPHaNmyJSNHjmT16tW8//77nDhxgmXLluHm5gbAlStX6NOnD+Hh4Rw6dIj4+HhycnIYNmxYo3//xnQqLZNPPt9M8vHTeHt50L5dAN5eHiQfP80nn2+2WjKyaNEi7rvvPmJiYsjOziY7O5s2bdrQpk0b1q9fz/Hjx5k1axavvfYa69ats9h2586dpKSkkJCQwNatWyksLGTgwIGEhoZy+PBh/v73vzN16lSLbeo6P7XFExgYaJXvfqsM53Xot+/AcDYDxd0dVYAfirs7hrMZVcutlIy0aNGCQYMGsWLFCovlq1atwtPTk0GDBgFVCV/fvn3p3bs3R44c4fXXX2fo0KFoNBo6d+4MwPfff09ERIR5Hw4ODuzbtw+oujHIzs4mPj4ek8mEyWRi2bJlHD9+nE8//ZQNGzawfPly87ZJSUkAxMXFMWvWLI4dO0bfvn3rFUdaWhoRERGEhITw448/kpCQQFpaGlOmTLlhTADdu3fn4MGDlJeXW6GkRV1krhlhZjKZ0OUWUV5hwMPVCUVRAFA5qPFwVVFYXIEutwgPF435s8aQnZ1NZWUlQ4YMISgoCIDQ0FAAFixYwNSpU3niiScAmD9/Prt27eK9997jgw8+qPcx9Ho9//znP7nnnnsASE1NZd26dSQkJBAdHQ1AcHCwef0lS5YQHh7OvHnzzMtWrFhBYGAgqampdOzY8da+tBUYjUa+TthP/uVCOoYEms+Ru5sLHUMCSU3LJP7b/bQPDmj0ibs8PT1xcnLCxcWF1q1bm5e/+eab5n+3a9eO/fv3s27dOouEztXVleXLl+Pk5ATAhx9+iKIofPzxxzg7O9OlSxd0Oh0xMTHmbepzfmqLp6kxGY1UHjyEsegqqjb+v/6/ctGi0vpjPJ9F5cGfUfn7oVhhsrWYmBgefPBBsrKy8Pf3B2DlypWMHDnSfD4mTpzII488wpw5cwDo3Lkzn332GefPn8fBoeoSkpGRYd4eqmaszcrKwsfHx/x/rtpbb71l/ndQUBDR0dGkpKSYlyUmJuLq6sr69estmoYGDx5cZxzjx49n/PjxFn93r776KlOmTLlpTP7+/lRUVHDhwgXzb5D4/UgiIsyKy/QUlJTh4uxQI9FQFAUXZwcKSsooLtPjpnVqtOPec8899O3bl9DQUPr370+/fv147LHHUKvVZGVl0bNnT4v1e/bsab5rqi8nJye6du1qfp+YmIhareb++++vdf2kpCR27dplriG51unTp5tkIqLLyiXtTCb+fi1qPX/+rX04dToTXVYugW1a/S4xffDBB6xYsYJz585RWlpKRUUFYWFhFuuEhoaaL3oAKSkpdO3aFWdnZ/Oy7t27W2xjj+enNqZLeRh1Wah8vGs9ZyofL4w6HaZLeSgtfRv9+H379iUoKIjPPvuM6dOn8/PPP/PLL7+YmykyMjLYtWuXRdMHgEajsbiYl5aWWpwvgCNHjtS44GdkZPDOO++wZ88edDoder2esrIy3n77bfM6SUlJPPLIIxZJSH3iyMjIICEhgf/+978Wo2AMBoO5Nqy2mAC0Wi1QVXMqfn+SiAizSoMRo9GEWl37nZdarcJYXkmloXE70anVahISEvjhhx/45ptvWLx4MTNmzCAhIaHObavv7K/tTHtte3M1rVZr8UNf/cNzI1evXmXgwIHMnz+/xmd+fn51xmULV4tLKS+vwEWrqfVzrYsz5RfzuVpc+rvEs2bNGl555RUWLlzIfffdh7u7O3FxcRw4cMBiPVdX1wbv2x7PT21MpWWYKvQozrWfMzQaTPmXMZVap3+WoiiMGTOGlStXMn36dFasWEGPHj248847gaqE3cnJibvuustiuxMnTlj0IWnRogWXL1+2WCcxMdHiop+bm8u9995Lnz59ePfddwkICMBgMBAZGWmxXmJiItOmTauxr7riSEpKwtvbu8bfF/z6//36mKrl5+cD4Ovb+MmeqJskIsLMQa1CpVIwGIyoHNQ1PjcYjKhUCg43SFRuhaIo9OzZk549ezJr1iyCgoLYuXMn/v7+7Nu3z6LmYt++feY75OofjuzsbLy8vAAsOjTeSGhoKEajkT179pibZq7VrVs3NmzYQNu2bc3Vvk2dm6sWjcaJktJy3N1canxeWlKGxskRN9ebJ2G/lZOTEwaDwfx+37599OjRg/Hjx5uXnT59us79dOrUiVWrVlFeXo5GU3WB/umnnyzWqc/5uT6epkjROqM4OUJZObjUcl7Ky1EcHVG0zjU/aySjR49m9uzZfPvtt6xevZp3333X/JlaraayspKysjJzjcfOnTs5duyYxQU9PDy8xqiTo0eP8uijj5rfb9myBYPBwOrVq803BUuWLEGv15tryQoLC0lPT68xcWd94nB0dKSoqAh/f39cXGr+/dcWU7Xk5GTatGlDixYt6lVmonFJZ1Vh5ursiKeLMyVllTWG65pMJkrKKvF0ccbVuXEn1Ttw4ADz5s3j0KFDnDt3jv/85z/k5uZy5513MmXKFObPn8/atWtJSUlh2rRpJCYmMmnSJABCQkIIDAzkjTfe4NSpU2zbtq1eDydq27YtzzzzDGPGjGHTpk2cPXuW3bt3mztSTpgwgfz8fJ588kl++uknTp8+zY4dOxg9enSTvbgF+PsSEhxIVvalWs9f1oU8OrQPJMDfOnd9bdu25cCBA6Snp3Pp0iU6dOjAoUOH2LFjB6mpqbz++us1EoraPPXUUxiNRsaNG8eJEyfYsWMHCxYsADBfwOpzfq6PpykOYVZa+KAK8MeYl1/rOTPmXUYVEIDSwsdqMfj7+/PQQw8xZswYDAaDRf+diIgIHB0dmTJlCmfOnGHLli2MGzcOwCIR6d+/P8eOHbOoFTEajaSkpJCVlUVBQQE+Pj4UFhayefNmTp06xbvvvsubb75JQECA+YYiKSkJtVpt7iPWkDiioqLw8PBg5MiRJCUlkZaWRnx8vMWzZK6Pqdr3339Pv379GqlERUNJIiLMFEUhwNcdjZOawuIK9JUGjCYT+koDhcUVaJzUBPi6N2pHVQAPDw/27t3LQw89RMeOHZk5cyYLFy5kwIABTJw4kZdeeomXX36Z0NBQ4uPj2bx5Mx06dACq7oJWr17NyZMn6dq1K/Pnzzd3ZqvL0qVLeeyxxxg/fjydO3cmJiaG4uJiAHNNjMFgoF+/foSGhjJ58mTuuOOORu/o2VhUKhUD/u8+vL08SE3LpKiomEqDgaKiYlLTMvH29uDB6PusFv8rr7yCWq2mS5cu+Pr60r9/f4YMGcLjjz9OVFQUeXl5FrUjN+Lh4cGWLVtITEwkLCyMGTNmMGvWLADz3XB9zs/18TTk4Vy/F0WlwqF7JCp3N4znszCVlGAyGDCVlGA8n4XK3Q2H7hFW6ah6rXHjxpGVlcXw4cMtahP8/PxYsWIFX331FV27dmXlypU888wzhISE4O3tbV4vNDSUbt26WYyImjNnDp9++ikBAQHMmTOHgQMHMnbsWEaMGEGvXr3Q6XQMGzbMos9QUlISnTp1qtHfpD5xeHt7s337dvLy8vjzn/9Mt27dmDFjhkUn9OtjgqonzG7atMmiM7T4fSmm5vSkqtvcyZMnefrpp1m1apV5ONtvUVBchi63iIKSMoxGEyqVgqeLMwG+7ni6Wq+KWDSOU2mZfJ2wn7QzmZRX6NE4OdKhfSAPRt9Hh5CmOYS1Ll988QWjR4+moKCgzv499shwXlc1ekaXhUmvR3F0RBUQgEP3CNRtAmwdnpnRaKR379706tXLYsQSwLZt25gyZQrJyclWT9ZvFkdDLV26lI0bN/LNN9/cdL3G+n0VNdlH47f4XXm6OuPhoqG4TE+lwYiDWoWrs2Oj14QI6+gQEkj74AB0WblcLS7FzVVLgL9vk63Jqc3nn39OcHAwAQEBJCUlMXXqVIYNG9YskxAAdZsAVP5+mC7lYSotq+o70sLH6jUhddm7dy+5ubmEh4dz6dIl4uLiyMjIYNOmTTXWffjhhzl16hQ6na7Rn9nSkDgaytHRkcWLF996kOI3k0RE1EpRlEYdoit+XyqV6ncbomsNFy5cYNasWVy4cAE/Pz+GDh3K3LlzbR2WVSkqlVWG6N6KnJwcpk2bhk6no1WrVkRHR3Pw4EGLZplrWWtun4bG0RDXjv4RtiFNM82IVB0KIYR1yO+r9dhPXa0QQgghmh1JRIQQQghhM5KICCGEEMJmJBFpRqpHRdT2iHMhhBC/XfXvqj2NPrMXUqLNSPUso0eOHLFxJEII0bxU/67a01xG9kKG7zYjHh4eDB482DwmPjw8HEfHxn0cuxBC3E70ej1Hjhxh8eLFDB48GHd3d1uH1OzI8N1mxmg0Ehsby8aNG20dihBCNBuDBw9m+vTp0jRjBZKINFNFRUVkZ2c3yYm+hBDCXqhUKvz8/KQmxIokERFCCCGEzUgdkxBCCCFsRhIRIYQQQtiMJCJCCCGEsBlJRIQQQghhM5KICCGEEMJmJBERQgghhM1IIiKEEEIIm/l/7rPlzkCzj3IAAAAASUVORK5CYII=", "text/plain": [ "
" ] @@ -414,7 +414,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.10" + "version": "3.10.6" }, "vscode": { "interpreter": { From 982d20b6c36d81f4bb6a42fd81577034a49e6b41 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 14:48:59 +0200 Subject: [PATCH 178/186] Fix links to neural in the docs --- src/ott/neural/datasets.py | 4 ++-- src/ott/neural/methods/flows/genot.py | 2 +- src/ott/neural/methods/flows/otfm.py | 2 +- src/ott/neural/methods/monge_gap.py | 12 ++++++------ src/ott/neural/methods/neuraldual.py | 9 +++++---- src/ott/neural/networks/potentials.py | 2 +- 6 files changed, 16 insertions(+), 15 deletions(-) diff --git a/src/ott/neural/datasets.py b/src/ott/neural/datasets.py index c5661aecc..89453b2ce 100644 --- a/src/ott/neural/datasets.py +++ b/src/ott/neural/datasets.py @@ -27,8 +27,8 @@ class OTData: """Distribution data for (conditional) optimal transport problems. Args: - lin: Linear (living in the shared space) part of the samples. - quad: Quadratic (living in the incomparable subspace) part of the samples. + lin: Linear term of the samples. + quad: Quadratic term of the samples. condition: Condition corresponding to the data distribution. """ lin: Optional[np.ndarray] = None diff --git a/src/ott/neural/methods/flows/genot.py b/src/ott/neural/methods/flows/genot.py index a3bad5902..ce200d376 100644 --- a/src/ott/neural/methods/flows/genot.py +++ b/src/ott/neural/methods/flows/genot.py @@ -66,7 +66,7 @@ class GENOT: n_samples_per_src: Number of samples drawn from the conditional distribution per one source sample. kwargs: Keyword arguments for - :meth:`ott.neural.flow_models.models.VelocityField.create_train_state`. + :meth:`~ott.neural.networks.velocity_field.VelocityField.create_train_state`. """ # noqa: E501 def __init__( diff --git a/src/ott/neural/methods/flows/otfm.py b/src/ott/neural/methods/flows/otfm.py index ebeb138b5..65d6a149d 100644 --- a/src/ott/neural/methods/flows/otfm.py +++ b/src/ott/neural/methods/flows/otfm.py @@ -41,7 +41,7 @@ class OTFlowMatching: distributions. It has a ``(src, tgt) -> matching`` signature. time_sampler: Time sampler with a ``(rng, n_samples) -> time`` signature. kwargs: Keyword arguments for - :meth:`~ott.neural.flow_models.models.VelocityField.create_train_state`. + :meth:`~ott.neural.networks.velocity_field.VelocityField.create_train_state`. """ def __init__( diff --git a/src/ott/neural/methods/monge_gap.py b/src/ott/neural/methods/monge_gap.py index c108a3509..140fad4a1 100644 --- a/src/ott/neural/methods/monge_gap.py +++ b/src/ott/neural/methods/monge_gap.py @@ -64,14 +64,14 @@ def monge_gap( W_{c, \varepsilon}(\hat{\rho}_n, T \sharp \hat{\rho}_n) See :cite:`uscidda:23` Eq. (8). This function is a thin wrapper that calls - :func:`~ott.neural.losses.monge_gap_from_samples`. + :func:`~ott.neural.methods.monge_gap.monge_gap_from_samples`. Args: map_fn: Callable corresponding to map :math:`T` in definition above. The - callable should be vectorized (e.g. using :func:`jax.vmap`), i.e, + callable should be vectorized (e.g. using :func:`~jax.vmap`), i.e, able to process a *batch* of vectors of size `d`, namely ``map_fn`` applied to an array returns an array of the same shape. - reference_points: Array of `[n,d]` points, :math:`\hat\rho_n` in paper + reference_points: Array of `[n,d]` points, :math:`\hat\rho_n`. cost_fn: An object of class :class:`~ott.geometry.costs.CostFn`. epsilon: Regularization parameter. See :class:`~ott.geometry.pointcloud.PointCloud` @@ -184,7 +184,7 @@ class MongeGapEstimator: For instance, :math:`\Delta` can be the :func:`~ott.tools.sinkhorn_divergence.sinkhorn_divergence` - and :math:`R` the :func:`~ott.neural.losses.monge_gap_from_samples` + and :math:`R` the :func:`~ott.neural.methods.monge_gap.monge_gap_from_samples` :cite:`uscidda:23` for a given cost function :math:`c`. In that case, it estimates a :math:`c`-OT map, i.e. a map :math:`T` optimal for the Monge problem induced by :math:`c`. @@ -259,11 +259,11 @@ def setup( def regularizer(self) -> Callable[[jnp.ndarray, jnp.ndarray], float]: """Regularizer added to the fitting loss. - Can be, e.g. the :func:`~ott.neural.losses.monge_gap_from_samples`. + Can be, e.g. the :func:`~ott.neural.methods.monge_gap.monge_gap_from_samples`. If no regularizer is passed for solver instantiation, or regularization weight :attr:`regularizer_strength` is 0, return 0 by default along with an empty set of log values. - """ + """ # noqa: E501 if self._regularizer is not None: return self._regularizer return lambda *_, **__: (0.0, None) diff --git a/src/ott/neural/methods/neuraldual.py b/src/ott/neural/methods/neuraldual.py index 6845224f4..30fd08d4e 100644 --- a/src/ott/neural/methods/neuraldual.py +++ b/src/ott/neural/methods/neuraldual.py @@ -48,7 +48,8 @@ class W2NeuralDual: denoted source and target, respectively. This is achieved by parameterizing a Kantorovich potential :math:`f_\theta: \mathbb{R}^n\rightarrow\mathbb{R}` associated with the :math:`\alpha` measure with an - :class:`~ott.neural.models.ICNN` or :class:`~ott.neural.models.MLP`, where + :class:`~ott.neural.networks.icnn.ICNN` or a + :class:`~ott.neural.networks.potentials.PotentialMLP`, where :math:`\nabla f` transports source to target cells. This potential is learned by optimizing the dual form associated with the negative inner product cost @@ -64,10 +65,10 @@ class W2NeuralDual: transport map from :math:`\beta` to :math:`\alpha`. This solver estimates the conjugate :math:`f^\star` with a neural approximation :math:`g` that is fine-tuned - with :class:`~ott.neural.duality.conjugate.FenchelConjugateSolver`, + with :class:`~ott.neural.networks.layers.conjugate.FenchelConjugateSolver`, which is a combination further described in :cite:`amos:23`. - The :class:`~ott.neural.duality.neuraldual.BaseW2NeuralDual` potentials for + The :class:`~ott.neural.networks.potentials.BasePotential` potentials for ``neural_f`` and ``neural_g`` can 1. both provide the values of the potentials :math:`f` and :math:`g`, or @@ -76,7 +77,7 @@ class W2NeuralDual: via the Fenchel conjugate as discussed in :cite:`amos:23`. The potential's value or gradient mapping is specified via - :attr:`~ott.neural.duality.neuraldual.BaseW2NeuralDual.is_potential`. + :attr:`~ott.neural.networks.potentials.BasePotential.is_potential`. Args: dim_data: input dimensionality of data required for network init diff --git a/src/ott/neural/networks/potentials.py b/src/ott/neural/networks/potentials.py index 6a08e0048..563f4537c 100644 --- a/src/ott/neural/networks/potentials.py +++ b/src/ott/neural/networks/potentials.py @@ -34,7 +34,7 @@ class PotentialTrainState(train_state.TrainState): This extends :class:`~flax.training.train_state.TrainState` to include the potential methods from the - :class:`~ott.neural.duality.neuraldual.BaseW2NeuralDual` used during training. + :class:`~ott.neural.networks.potentials.BasePotential` used during training. Args: potential_value_fn: the potential's value function From 7b61e054ba8d0b242ac430ab24a17220c915f633 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 17:00:16 +0200 Subject: [PATCH 179/186] Check for condition dim in VF --- src/ott/neural/networks/velocity_field.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/ott/neural/networks/velocity_field.py b/src/ott/neural/networks/velocity_field.py index 55bbfabfc..589f3d33d 100644 --- a/src/ott/neural/networks/velocity_field.py +++ b/src/ott/neural/networks/velocity_field.py @@ -111,7 +111,11 @@ def create_train_state( The training state. """ t, x = jnp.ones((1, 1)), jnp.ones((1, input_dim)) - cond = None if self.condition_dims is None else jnp.ones((1, condition_dim)) + if self.condition_dims is None: + cond = None + else: + assert condition_dim > 0, "Condition dimension must be positive." + cond = jnp.ones((1, condition_dim)) params = self.init(rng, t, x, cond)["params"] return train_state.TrainState.create( From 8819d5e87e5bb356dc6bd97a366cb50602de4d5a Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 19:28:20 +0200 Subject: [PATCH 180/186] Don't use activation fn in the last layer of VF --- src/ott/neural/networks/velocity_field.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/ott/neural/networks/velocity_field.py b/src/ott/neural/networks/velocity_field.py index 589f3d33d..39c7d98da 100644 --- a/src/ott/neural/networks/velocity_field.py +++ b/src/ott/neural/networks/velocity_field.py @@ -87,10 +87,11 @@ def __call__( else: feats = jnp.concatenate([t, x], axis=-1) - for output_dim in self.output_dims: + for output_dim in self.output_dims[:-1]: feats = self.act_fn(nn.Dense(output_dim)(feats)) - return feats + # no activation function for the final layer + return nn.Dense(self.output_dims[-1])(feats) def create_train_state( self, From 6f9cbcc848fc4a89f93d59f35ec2aec8403ba9dd Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 19:35:00 +0200 Subject: [PATCH 181/186] Update assertions --- tests/neural/methods/genot_test.py | 8 +++++--- tests/neural/methods/otfm_test.py | 10 +++++++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/tests/neural/methods/genot_test.py b/tests/neural/methods/genot_test.py index 086ea7a80..2c746596c 100644 --- a/tests/neural/methods/genot_test.py +++ b/tests/neural/methods/genot_test.py @@ -19,6 +19,7 @@ import jax import jax.numpy as jnp import jax.tree_util as jtu +import numpy as np import optax @@ -76,7 +77,7 @@ def test_genot(self, rng: jax.Array, dl: str, request): optimizer=optax.adam(learning_rate=1e-4), ) - _logs = model(dl.loader, n_iters=3, rng=rng_call) + _logs = model(dl.loader, n_iters=2, rng=rng_call) batch = next(iter(dl.loader)) batch = jtu.tree_map(jnp.asarray, batch) @@ -86,5 +87,6 @@ def test_genot(self, rng: jax.Array, dl: str, request): res = model.transport(src, condition=src_cond) - assert jnp.sum(jnp.isnan(res)) == 0 - assert res.shape[-1] == tgt_dim + assert len(_logs["loss"]) == 2 + np.testing.assert_array_equal(jnp.isfinite(res), True) + assert res.shape == (batch_size, tgt_dim) diff --git a/tests/neural/methods/otfm_test.py b/tests/neural/methods/otfm_test.py index 0eb311fa6..a7c14758c 100644 --- a/tests/neural/methods/otfm_test.py +++ b/tests/neural/methods/otfm_test.py @@ -16,6 +16,7 @@ import jax import jax.numpy as jnp import jax.tree_util as jtu +import numpy as np import optax @@ -54,6 +55,9 @@ def test_otfm(self, rng: jax.Array, dl: str, request): res_fwd = fm.transport(batch["src_lin"], condition=src_cond) res_bwd = fm.transport(batch["tgt_lin"], t0=1.0, t1=0.0, condition=src_cond) - # TODO(michalk8): better assertions - assert jnp.sum(jnp.isnan(res_fwd)) == 0 - assert jnp.sum(jnp.isnan(res_bwd)) == 0 + assert len(_logs["loss"]) == 3 + + assert res_fwd.shape == batch["src_lin"].shape + assert res_bwd.shape == batch["tgt_lin"].shape + np.testing.assert_array_equal(jnp.isfinite(res_fwd), True) + np.testing.assert_array_equal(jnp.isfinite(res_bwd), True) From 9e1499b93e7980f2e00cf55fae1d401522619217 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 21:06:31 +0200 Subject: [PATCH 182/186] Try skipping OTFM/GENOT tests temporarily --- tests/neural/methods/genot_test.py | 2 +- tests/neural/methods/otfm_test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/neural/methods/genot_test.py b/tests/neural/methods/genot_test.py index 2c746596c..2bc4ba9fb 100644 --- a/tests/neural/methods/genot_test.py +++ b/tests/neural/methods/genot_test.py @@ -52,7 +52,7 @@ class TestGENOT: "fused_cond_dl" ] ) - def test_genot(self, rng: jax.Array, dl: str, request): + def skip_test_genot(self, rng: jax.Array, dl: str, request): rng_init, rng_call, rng_data = jax.random.split(rng, 3) problem_type = dl.split("_")[0] dl = request.getfixturevalue(dl) diff --git a/tests/neural/methods/otfm_test.py b/tests/neural/methods/otfm_test.py index a7c14758c..76759d4e8 100644 --- a/tests/neural/methods/otfm_test.py +++ b/tests/neural/methods/otfm_test.py @@ -28,7 +28,7 @@ class TestOTFlowMatching: @pytest.mark.parametrize("dl", ["lin_dl", "lin_cond_dl"]) - def test_otfm(self, rng: jax.Array, dl: str, request): + def skip_test_otfm(self, rng: jax.Array, dl: str, request): dl = request.getfixturevalue(dl) dim, cond_dim = dl.lin_dim, dl.cond_dim From b37da2a0c627b8a251889989f372d88bb4358718 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 22:26:43 +0200 Subject: [PATCH 183/186] Be extra verbose when intalling packages --- .github/workflows/tests.yml | 2 +- tests/neural/methods/genot_test.py | 2 +- tests/neural/methods/otfm_test.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index a06447d9f..d8ead3517 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -97,7 +97,7 @@ jobs: - name: Setup environment run: | - tox -e py${{ matrix.python-version }} --notest -v + tox -e py${{ matrix.python-version }} --notest -vv - name: Run tests run: | diff --git a/tests/neural/methods/genot_test.py b/tests/neural/methods/genot_test.py index 2bc4ba9fb..2c746596c 100644 --- a/tests/neural/methods/genot_test.py +++ b/tests/neural/methods/genot_test.py @@ -52,7 +52,7 @@ class TestGENOT: "fused_cond_dl" ] ) - def skip_test_genot(self, rng: jax.Array, dl: str, request): + def test_genot(self, rng: jax.Array, dl: str, request): rng_init, rng_call, rng_data = jax.random.split(rng, 3) problem_type = dl.split("_")[0] dl = request.getfixturevalue(dl) diff --git a/tests/neural/methods/otfm_test.py b/tests/neural/methods/otfm_test.py index 76759d4e8..a7c14758c 100644 --- a/tests/neural/methods/otfm_test.py +++ b/tests/neural/methods/otfm_test.py @@ -28,7 +28,7 @@ class TestOTFlowMatching: @pytest.mark.parametrize("dl", ["lin_dl", "lin_cond_dl"]) - def skip_test_otfm(self, rng: jax.Array, dl: str, request): + def test_otfm(self, rng: jax.Array, dl: str, request): dl = request.getfixturevalue(dl) dim, cond_dim = dl.lin_dim, dl.cond_dim From 9c561a5e01d513f0f5e2c46732a775c6c8aa1a57 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 22:54:23 +0200 Subject: [PATCH 184/186] Remove `torch` dependency --- .github/workflows/tests.yml | 2 +- tests/neural/conftest.py | 57 +++++++++++++++++++++++-------- tests/neural/methods/otfm_test.py | 2 +- 3 files changed, 44 insertions(+), 17 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d8ead3517..a06447d9f 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -97,7 +97,7 @@ jobs: - name: Setup environment run: | - tox -e py${{ matrix.python-version }} --notest -vv + tox -e py${{ matrix.python-version }} --notest -v - name: Run tests run: | diff --git a/tests/neural/conftest.py b/tests/neural/conftest.py index f4c25c514..41b5ea71a 100644 --- a/tests/neural/conftest.py +++ b/tests/neural/conftest.py @@ -11,18 +11,45 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import NamedTuple, Optional, Union +from collections import defaultdict +from typing import Dict, NamedTuple, Optional, Union import pytest +import jax.numpy as jnp import numpy as np -from torch.utils.data import DataLoader from ott.neural import datasets +class SimpleDataLoader: + + def __init__( + self, + dataset: datasets.OTDataset, + batch_size: int, + seed: Optional[int] = None + ): + self.dataset = dataset + self.batch_size = batch_size + self.seed = seed + + def __iter__(self): + self._rng = np.random.default_rng(self.seed) + return self + + def __next__(self) -> Dict[str, jnp.ndarray]: + data = defaultdict(list) + for _ in range(self.batch_size): + ix = self._rng.integers(0, len(self.dataset)) + for k, v in self.dataset[ix].items(): + data[k].append(v) + + return {k: jnp.vstack(v) for k, v in data.items()} + + class OTLoader(NamedTuple): - loader: DataLoader + loader: SimpleDataLoader lin_dim: int = 0 quad_src_dim: int = 0 quad_tgt_dim: int = 0 @@ -58,7 +85,7 @@ def _ot_data( @pytest.fixture() -def lin_dl() -> DataLoader: +def lin_dl() -> OTLoader: n, d = 128, 2 rng = np.random.default_rng(0) @@ -67,13 +94,13 @@ def lin_dl() -> DataLoader: ds = datasets.OTDataset(src, tgt) return OTLoader( - DataLoader(ds, batch_size=16, shuffle=True), + SimpleDataLoader(ds, batch_size=13), lin_dim=d, ) @pytest.fixture() -def lin_cond_dl() -> DataLoader: +def lin_cond_dl() -> OTLoader: n, d, cond_dim = 128, 2, 3 rng = np.random.default_rng(13) @@ -84,14 +111,14 @@ def lin_cond_dl() -> DataLoader: ds = datasets.OTDataset(src, tgt) return OTLoader( - DataLoader(ds, batch_size=16, shuffle=True), + SimpleDataLoader(ds, batch_size=14), lin_dim=d, cond_dim=cond_dim, ) @pytest.fixture() -def quad_dl(): +def quad_dl() -> OTLoader: n, quad_src_dim, quad_tgt_dim = 128, 2, 4 rng = np.random.default_rng(11) @@ -100,14 +127,14 @@ def quad_dl(): ds = datasets.OTDataset(src, tgt) return OTLoader( - DataLoader(ds, batch_size=16, shuffle=True), + SimpleDataLoader(ds, batch_size=15), quad_src_dim=quad_src_dim, quad_tgt_dim=quad_tgt_dim, ) @pytest.fixture() -def quad_cond_dl(): +def quad_cond_dl() -> OTLoader: n, quad_src_dim, quad_tgt_dim, cond_dim = 128, 2, 4, 5 rng = np.random.default_rng(414) @@ -118,7 +145,7 @@ def quad_cond_dl(): ds = datasets.OTDataset(src, tgt) return OTLoader( - DataLoader(ds, batch_size=16, shuffle=True), + SimpleDataLoader(ds, batch_size=16), quad_src_dim=quad_src_dim, quad_tgt_dim=quad_tgt_dim, cond_dim=cond_dim, @@ -126,7 +153,7 @@ def quad_cond_dl(): @pytest.fixture() -def fused_dl(): +def fused_dl() -> OTLoader: n, lin_dim, quad_src_dim, quad_tgt_dim = 128, 6, 2, 4 rng = np.random.default_rng(11) @@ -135,7 +162,7 @@ def fused_dl(): ds = datasets.OTDataset(src, tgt) return OTLoader( - DataLoader(ds, batch_size=16, shuffle=True), + SimpleDataLoader(ds, batch_size=17), lin_dim=lin_dim, quad_src_dim=quad_src_dim, quad_tgt_dim=quad_tgt_dim, @@ -143,7 +170,7 @@ def fused_dl(): @pytest.fixture() -def fused_cond_dl(): +def fused_cond_dl() -> OTLoader: n, lin_dim, quad_src_dim, quad_tgt_dim, cond_dim = 128, 6, 2, 4, 7 rng = np.random.default_rng(11) @@ -163,7 +190,7 @@ def fused_cond_dl(): ds = datasets.OTDataset(src, tgt) return OTLoader( - DataLoader(ds, batch_size=16, shuffle=True), + SimpleDataLoader(ds, batch_size=18), lin_dim=lin_dim, quad_src_dim=quad_src_dim, quad_tgt_dim=quad_tgt_dim, diff --git a/tests/neural/methods/otfm_test.py b/tests/neural/methods/otfm_test.py index a7c14758c..f1ccae767 100644 --- a/tests/neural/methods/otfm_test.py +++ b/tests/neural/methods/otfm_test.py @@ -25,7 +25,7 @@ from ott.solvers import utils as solver_utils -class TestOTFlowMatching: +class TestOTFM: @pytest.mark.parametrize("dl", ["lin_dl", "lin_cond_dl"]) def test_otfm(self, rng: jax.Array, dl: str, request): From f227d54ba30bbc920fbdb095a0a19c36edd21fc5 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Tue, 2 Apr 2024 22:54:40 +0200 Subject: [PATCH 185/186] Remove `torch` from tests in `pyproject.toml` --- pyproject.toml | 147 ++++++++++++++++++++++++------------------------- 1 file changed, 73 insertions(+), 74 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1c71241f1..3bb8351be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,6 @@ test = [ "tslearn>=0.5; python_version < '3.12'", "lineax; python_version >= '3.9'", "matplotlib", - "torch" ] docs = [ "sphinx>=4.0", @@ -110,7 +109,7 @@ multi_line_output = 3 sections = ["FUTURE", "STDLIB", "THIRDPARTY", "TEST", "NUMERIC", "NEURAL", "PLOTTING", "FIRSTPARTY", "LOCALFOLDER"] # also contains what we import in notebooks/tests known_neural = ["flax", "optax", "diffrax", "orbax"] -known_numeric = ["numpy", "scipy", "jax", "flax", "optax", "jaxopt", "torch", "ot", "torchvision", "pandas", "sklearn", "tslearn"] +known_numeric = ["numpy", "scipy", "jax", "flax", "optax", "jaxopt", "ot", "torch", "torchvision", "pandas", "sklearn", "tslearn"] known_test = ["_pytest", "pytest"] known_plotting = ["IPython", "matplotlib", "mpl_toolkits", "seaborn"] @@ -187,85 +186,85 @@ ignore_path = ["docs/**/_autosummary", "docs/contributing.rst"] [tool.tox] legacy_tox_ini = """ - [tox] - min_version = 4.0 - env_list = lint-code,py{3.8,3.9,3.10,3.11,3.12},py3.9-jax-default - skip_missing_interpreters = true +[tox] +min_version = 4.0 +env_list = lint-code,py{3.8,3.9,3.10,3.11,3.12},py3.9-jax-default +skip_missing_interpreters = true - [testenv] - extras = - test - # https://github.com/google/flax/issues/3329 - py{3.9,3.10,3.11,3.12},py3.9-jax-default: neural - pass_env = CUDA_*,PYTEST_*,CI - commands_pre = - gpu: python -I -m pip install "jax[cuda]" -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html - jax-latest: python -I -m pip install 'git+https://github.com/google/jax@main' - commands = - python -m pytest {tty:--color=yes} {posargs: \ - --cov={env_site_packages_dir}{/}ott --cov-config={tox_root}{/}pyproject.toml \ - --no-cov-on-fail --cov-report=xml --cov-report=term-missing:skip-covered} +[testenv] +extras = + test + # https://github.com/google/flax/issues/3329 + py{3.9,3.10,3.11,3.12},py3.9-jax-default: neural +pass_env = CUDA_*,PYTEST_*,CI +commands_pre = + gpu: python -I -m pip install "jax[cuda]" -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html + jax-latest: python -I -m pip install 'git+https://github.com/google/jax@main' +commands = + python -m pytest {tty:--color=yes} {posargs: \ + --cov={env_site_packages_dir}{/}ott --cov-config={tox_root}{/}pyproject.toml \ + --no-cov-on-fail --cov-report=xml --cov-report=term-missing:skip-covered} - [testenv:lint-code] - description = Lint the code. - deps = pre-commit>=2.16.0 - skip_install = true - commands = - pre-commit run --all-files --show-diff-on-failure +[testenv:lint-code] +description = Lint the code. +deps = pre-commit>=2.16.0 +skip_install = true +commands = + pre-commit run --all-files --show-diff-on-failure - [testenv:lint-docs] - description = Lint the documentation. - deps = - extras = docs,neural - ignore_errors = true - allowlist_externals = make - pass_env = PYENCHANT_LIBRARY_PATH - set_env = SPHINXOPTS = -W -q --keep-going - changedir = {tox_root}{/}docs - commands = - make linkcheck {posargs} - make spelling {posargs} +[testenv:lint-docs] +description = Lint the documentation. +deps = +extras = docs,neural +ignore_errors = true +allowlist_externals = make +pass_env = PYENCHANT_LIBRARY_PATH +set_env = SPHINXOPTS = -W -q --keep-going +changedir = {tox_root}{/}docs +commands = + make linkcheck {posargs} + make spelling {posargs} - [testenv:build-docs] - description = Build the documentation. - use_develop = true - deps = - extras = docs,neural - allowlist_externals = make - changedir = {tox_root}{/}docs - commands = - make html {posargs} - commands_post = - python -c 'import pathlib; print("Documentation is under:", pathlib.Path("{tox_root}") / "docs" / "_build" / "html" / "index.html")' +[testenv:build-docs] +description = Build the documentation. +use_develop = true +deps = +extras = docs,neural +allowlist_externals = make +changedir = {tox_root}{/}docs +commands = + make html {posargs} +commands_post = + python -c 'import pathlib; print("Documentation is under:", pathlib.Path("{tox_root}") / "docs" / "_build" / "html" / "index.html")' - [testenv:clean-docs] - description = Remove the documentation. - deps = - skip_install = true - changedir = {tox_root}{/}docs - allowlist_externals = make - commands = - make clean +[testenv:clean-docs] +description = Remove the documentation. +deps = +skip_install = true +changedir = {tox_root}{/}docs +allowlist_externals = make +commands = + make clean - [testenv:build-package] - description = Build the package. - deps = - build - twine - commands = - python -m build --sdist --wheel --outdir {tox_root}{/}dist{/} {posargs:} - twine check {tox_root}{/}dist{/}* - commands_post = - python -c 'import pathlib; print(f"Package is under:", pathlib.Path("{tox_root}") / "dist")' +[testenv:build-package] +description = Build the package. +deps = + build + twine +commands = + python -m build --sdist --wheel --outdir {tox_root}{/}dist{/} {posargs:} + twine check {tox_root}{/}dist{/}* +commands_post = + python -c 'import pathlib; print(f"Package is under:", pathlib.Path("{tox_root}") / "dist")' - [testenv:format-references] - description = Format references.bib. - skip_install = true - allowlist_externals = biber - commands = biber --tool --output_file={tox_root}{/}docs{/}references.bib --nolog \ - --output_align --output_indent=2 --output_fieldcase=lower \ - --output_legacy_dates --output-field-replace=journaltitle:journal,thesis:phdthesis,institution:school \ - {tox_root}{/}docs{/}references.bib +[testenv:format-references] +description = Format references.bib. +skip_install = true +allowlist_externals = biber +commands = biber --tool --output_file={tox_root}{/}docs{/}references.bib --nolog \ + --output_align --output_indent=2 --output_fieldcase=lower \ + --output_legacy_dates --output-field-replace=journaltitle:journal,thesis:phdthesis,institution:school \ + {tox_root}{/}docs{/}references.bib """ [tool.ruff] From 6f9a77c52eb7bd78704b7b84ae75b32e338fc774 Mon Sep 17 00:00:00 2001 From: Michal Klein <46717574+michalk8@users.noreply.github.com> Date: Wed, 3 Apr 2024 14:19:14 +0200 Subject: [PATCH 186/186] [ci skip] Update docstrings --- src/ott/solvers/utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/ott/solvers/utils.py b/src/ott/solvers/utils.py index 6c48a2577..f7bdae63a 100644 --- a/src/ott/solvers/utils.py +++ b/src/ott/solvers/utils.py @@ -32,7 +32,7 @@ def match_linear( x: jnp.ndarray, - y: jnp.ndarray, + y: Optional[jnp.ndarray], cost_fn: Optional[costs.CostFn] = None, epsilon: Optional[float] = None, scale_cost: ScaleCost_t = 1.0, @@ -41,8 +41,8 @@ def match_linear( """Compute solution to a linear OT problem. Args: - x: Linear term of the source point cloud. - y: Linear term of the target point cloud. + x: Source point cloud of shape ``[n, d]``. + y: Target point cloud of shape ``[m, d]``. cost_fn: Cost function. epsilon: Regularization parameter. scale_cost: Scaling of the cost matrix. @@ -70,8 +70,8 @@ def match_quadratic( """Compute solution to a quadratic OT problem. Args: - xx: Quadratic (incomparable) term of the source point cloud. - yy: Quadratic (incomparable) term of the target point cloud. + xx: Source point cloud of shape ``[n, d1]``. + yy: Target point cloud of shape ``[m, d2]``. x: Linear (fused) term of the source point cloud. y: Linear (fused) term of the target point cloud. scale_cost: Scaling of the cost matrix.