Skip to content

Commit

Permalink
Merge pull request #197 from moorepants/test-args-kwargs
Browse files Browse the repository at this point in the history
args and kwargs now work fully for minimize_ipopt
  • Loading branch information
moorepants authored Apr 6, 2023
2 parents 3a69165 + a55e0bb commit 94b6200
Show file tree
Hide file tree
Showing 2 changed files with 63 additions and 16 deletions.
45 changes: 29 additions & 16 deletions cyipopt/scipy_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,14 +171,17 @@ def objective(self, x):
self.nfev += 1
return self.fun(x, *self.args, **self.kwargs)

# TODO : **kwargs is ignored, not sure why it is here.
def gradient(self, x, **kwargs):
self.njev += 1
return self.jac(x, *self.args, **self.kwargs) # .T

def constraints(self, x):
con_values = []
for fun, args in zip(self._constraint_funs, self._constraint_args):
con_values.append(fun(x, *args))
for fun, args, kwargs in zip(self._constraint_funs,
self._constraint_args,
self._constraint_kwargs):
con_values.append(fun(x, *args, **kwargs))
return np.hstack(con_values)

def jacobianstructure(self):
Expand All @@ -189,22 +192,25 @@ def jacobian(self, x):
# The structure ( = row and column indices) is already known at this point,
# so we only need to stack the evaluated jacobians
jac_values = []
for i, (jac, args) in enumerate(zip(self._constraint_jacs, self._constraint_args)):
for i, (jac, args, kwargs) in enumerate(zip(self._constraint_jacs,
self._constraint_args,
self._constraint_kwargs)):
if self._constraint_jac_is_sparse[i]:
jac_val = jac(x, *args)
jac_val = jac(x, *args, **kwargs)
jac_values.append(jac_val.data)
else:
dense_jac_val = np.atleast_2d(jac(x, *args))
dense_jac_val = np.atleast_2d(jac(x, *args, **kwargs))
jac_values.append(dense_jac_val.ravel())
return np.hstack(jac_values)

def hessian(self, x, lagrange, obj_factor):
H = obj_factor * self.obj_hess(x) # type: ignore
H = obj_factor * self.obj_hess(x, *self.args, **self.kwargs) # type: ignore
# split the lagrangian multipliers for each constraint hessian
lagrs = np.split(lagrange, np.cumsum(self._constraint_dims[:-1]))
for hessian, args, lagr in zip(self._constraint_hessians,
self._constraint_args, lagrs):
H += hessian(x, lagr, *args)
for hessian, args, kwargs, lagr in zip(self._constraint_hessians,
self._constraint_args,
self._constraint_kwargs, lagrs):
H += hessian(x, lagr, *args, **kwargs)
return H[np.tril_indices(x.size)]

def intermediate(self, alg_mod, iter_count, obj_value, inf_pr, inf_du, mu,
Expand Down Expand Up @@ -235,9 +241,11 @@ def _get_sparse_jacobian_structure(constraints, x0):
con_jac = con.get('jac', False)
if con_jac:
if isinstance(con_jac, bool):
_, jac_val = con['fun'](x0, *con.get('args', []))
_, jac_val = con['fun'](x0, *con.get('args', []),
**con.get('kwargs', {}))
else:
jac_val = con_jac(x0, *con.get('args', []))
jac_val = con_jac(x0, *con.get('args', []),
**con.get('kwargs', {}))
# check if dense or sparse
if isinstance(jac_val, coo_array):
jacobians.append(jac_val)
Expand All @@ -250,7 +258,8 @@ def _get_sparse_jacobian_structure(constraints, x0):
con_jac_is_sparse.append(False)
else:
# we approximate this jacobian later (=dense)
con_val = np.atleast_1d(con['fun'](x0, *con.get('args', [])))
con_val = np.atleast_1d(con['fun'](x0, *con.get('args', []),
**con.get('kwargs', {})))
jacobians.append(coo_array(np.ones((con_val.size, x0.size))))
con_jac_is_sparse.append(False)
J = scipy.sparse.vstack(jacobians)
Expand All @@ -263,9 +272,11 @@ def get_constraint_dimensions(constraints, x0):
constraints = (constraints, )
for con in constraints:
if con.get('jac', False) is True:
m = len(np.atleast_1d(con['fun'](x0, *con.get('args', []))[0]))
m = len(np.atleast_1d(con['fun'](x0, *con.get('args', []),
**con.get('kwargs', {}))[0]))
else:
m = len(np.atleast_1d(con['fun'](x0, *con.get('args', []))))
m = len(np.atleast_1d(con['fun'](x0, *con.get('args', []),
**con.get('kwargs', {}))))
con_dims.append(m)
return np.array(con_dims)

Expand All @@ -277,9 +288,11 @@ def get_constraint_bounds(constraints, x0, INF=1e19):
constraints = (constraints, )
for con in constraints:
if con.get('jac', False) is True:
m = len(np.atleast_1d(con['fun'](x0, *con.get('args', []))[0]))
m = len(np.atleast_1d(con['fun'](x0, *con.get('args', []),
**con.get('kwargs', {}))[0]))
else:
m = len(np.atleast_1d(con['fun'](x0, *con.get('args', []))))
m = len(np.atleast_1d(con['fun'](x0, *con.get('args', []),
**con.get('kwargs', {}))))
cl.extend(np.zeros(m))
if con['type'] == 'eq':
cu.extend(np.zeros(m))
Expand Down
34 changes: 34 additions & 0 deletions cyipopt/tests/unit/test_scipy_optional.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,40 @@ def test_minimize_ipopt_jac_and_hessians_constraints_if_scipy(
np.testing.assert_allclose(res.get("x"), expected_res, rtol=1e-5)


@pytest.mark.skipif("scipy" not in sys.modules,
reason="Test only valid if Scipy available.")
def test_minimize_ipopt_jac_hessians_constraints_with_arg_kwargs():
"""Makes sure that args and kwargs can be passed to all user defined
functions in minimize_ipopt."""
from scipy.optimize import rosen, rosen_der, rosen_hess

rosen2 = lambda x, a, b=None: rosen(x)*a*b
rosen_der2 = lambda x, a, b=None: rosen_der(x)*a*b
rosen_hess2 = lambda x, a, b=None: rosen_hess(x)*a*b

x0 = [0.0, 0.0]
constr = {
"type": "ineq",
"fun": lambda x, a, b=None: -x[0]**2 - x[1]**2 + 2*a*b,
"jac": lambda x, a, b=None: np.array([-2 * x[0], -2 * x[1]])*a*b,
"hess": lambda x, v, a, b=None: -2 * np.eye(2) * v[0]*a*b,
"args": (1.0, ),
"kwargs": {'b': 1.0},
}
res = cyipopt.minimize_ipopt(rosen2, x0,
jac=rosen_der2,
hess=rosen_hess2,
args=constr['args'],
kwargs=constr['kwargs'],
constraints=constr)
assert isinstance(res, dict)
assert np.isclose(res.get("fun"), 0.0)
assert res.get("status") == 0
assert res.get("success") is True
expected_res = np.array([1.0, 1.0])
np.testing.assert_allclose(res.get("x"), expected_res, rtol=1e-5)


@pytest.mark.skipif("scipy" not in sys.modules,
reason="Test only valid of Scipy available")
def test_minimize_ipopt_sparse_jac_if_scipy():
Expand Down

0 comments on commit 94b6200

Please sign in to comment.