import sys sys.path.insert(0, "/usr/local/lib/python2.7/site-packages") import numpy as np import nlopt from math import sqrt, exp def cobyla_bug_repro(): numFuncEval = [0] lb = [-10., -10.] ub = [+10., +10.] def myfunc(x, grad): if np.any(np.isnan(x)) or np.any(x < lb) or np.any(x > ub): return 1.e10 numFuncEval[0] += 1 if grad.size > 0: raise ValueError('Cannot suppply gradient values') x1 = x[0] x2 = x[1] # http://al-roomi.org/benchmarks/unconstrained/2-dimensions/65-powell-s-badly-scaled-function # Powell's Badly Scaled Function # Range of initial points: -10 < xj < 10 , j=1,2 # Global minima: (x1,x2)=(1.098...e-5, 9.106...) # f(x1,x2)=0 f1 = (10000. * x1 * x2 - 1.)**2 f2 = (exp(-x1) + exp(-x2) - 1.0001)**2 retval = f1 + f2 print "myfunc: x:", x, ", val:", retval return retval algolist = [ #nlopt.LN_PRAXIS, #nlopt.LN_NELDERMEAD, nlopt.LN_COBYLA, nlopt.LN_NEWUOA, nlopt.LN_NEWUOA_BOUND, nlopt.LN_BOBYQA, ] for algo in algolist: opt = nlopt.opt(algo, 2) print print '-'*40 print "Algo:", opt.get_algorithm_name() numFuncEval[0] = 0 opt.set_min_objective(myfunc) opt.set_lower_bounds(lb) opt.set_upper_bounds(ub) opt.set_xtol_rel(1e-4) x0 = 0.5 * (np.array(lb) + np.array(ub)) print "x0:", x0 x = opt.optimize(x0) minf = opt.last_optimum_value() print "optimum at ", x[0],x[1] print "minimum value = ", minf print "result code = ", opt.last_optimize_result() print "num function evaluations:", numFuncEval[0] cobyla_bug_repro()