Skip to content

Commit

Permalink
Merge pull request #1 from schoonhovenrichard/development
Browse files Browse the repository at this point in the history
Merge development of caching features, and small bug fixes
  • Loading branch information
schoonhovenrichard authored Feb 25, 2022
2 parents 19ca577 + de0c3a2 commit d3f7fee
Show file tree
Hide file tree
Showing 18 changed files with 546 additions and 257 deletions.
37 changes: 24 additions & 13 deletions bloopy/algorithms/basin_hopping.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,25 +50,29 @@ def __call__(self, xk, f, context):
return False

class basin_hopping(continuous_base):
def __init__(self, fitness_function, minmax_problem, searchspace, T=1.0, method='L-BFGS-B'):
def __init__(self, fitness_function, minmax_problem, searchspace, T=1.0, method='L-BFGS-B', caching=True):
r"""
Base Differential Evolutions algorithm.
Basin hopping algorithm.
Args:
fitness_function (bitarray() -> float): Function that
scores fitness of bitstrings.
bitstring_size (int): Length of the bitstring instances.
min_max_problem (int): 1 if maximization problem, -1 for
minimization problem. Default is 1.
searchspace (dict(params, vals)): Dict that contains the
tunable variables and their possible values.
hillclimb (bool): (optional) Should solution be hillclimbed
afterwards (calls polish=True for scipy.optimize).
Default is False.
T (float): Temperature that determines likelihood of basin swap.
method (str): internal optimization routine.
supported_methods = ["Nelder-Mead", "Powell", "CG",
"L-BFGS-B", "COBYLA", "SLSQP", "BFGS"]
caching (bool): If true, caches fitness for every point in search space
visited (repeated visits do not count towards function evaluation.
Should not be used for stochastic optimization.
"""
super().__init__(fitness_function,
minmax_problem,
searchspace)
searchspace,
caching=caching)
self.temp = T
self.method = method

Expand All @@ -90,15 +94,22 @@ def cost_func(self, y):
Intermediate function to supply to scipy.optimize function.
"""
float_indiv = continuous_individual(y, self.sspace, scaling=self.eps)
bsstr = float_indiv.bitstring.to01()
if bsstr in self.visited_cache:
fit = self.visited_cache[bsstr]
if self.caching:
bsstr = float_indiv.bitstring.to01()
if bsstr in self.visited_cache:
fit = self.visited_cache[bsstr]
else:
# These optimizers do only minimization problems, for maximization,
# we flip the fitness to negative for it to work.
fit = -1 * self.minmax * self.ffunc(float_indiv.bitstring)
self.nfeval += 1
self.visited_cache[bsstr] = fit
else:
# These optimizers do only minimization problems, for maximization,
# we flip the fitness to negative for it to work.
fit = -1 * self.minmax * self.ffunc(float_indiv.bitstring)
self.nfeval += 1
self.visited_cache[bsstr] = fit

if self.solution_fit is None or fit < self.solution_fit:
self.solution = y
self.solution_fit = fit
Expand Down Expand Up @@ -138,13 +149,13 @@ def solve(self,
bnds = self.get_scaling()
minimizer_dict = dict()
minimizer_dict['method'] = self.method

if self.method in ["L-BFGS-B", "SLSQP"]:
lb = np.array(bnds)[:,0]
ub = np.array(bnds)[:,1]
bounds = scipy.optimize.Bounds(lb, ub)
minimizer_dict['bounds'] = bounds

if self.method in ["CG", "L-BFGS-B", "SLSQP", "BFGS"]:
options['eps'] = self.eps
elif self.method == "COBYLA":
Expand Down
25 changes: 18 additions & 7 deletions bloopy/algorithms/continuous_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@


class continuous_base:
def __init__(self, fitness_function, minmax_problem, searchspace):
def __init__(self, fitness_function, minmax_problem, searchspace, caching=True):
r"""
Base class for all optimizers that work on real-valued vector
solutions.
Expand All @@ -22,12 +22,19 @@ def __init__(self, fitness_function, minmax_problem, searchspace):
minimization problem. Default is 1.
searchspace (dict(params, vals)): Dict that contains the
tunable variables and their possible values.
caching (bool): If true, caches fitness for every point in search space
visited (repeated visits do not count towards function evaluation.
Should not be used for stochastic optimization.
"""
self.ffunc = fitness_function
self.minmax = minmax_problem
self.sspace = searchspace
self.boundary_list = utils.generate_boundary_list(self.sspace)
self.visited_cache = dict()
self.caching = caching
if self.caching:
self.visited_cache = dict()
else:
self.visited_cache = None
self.nfeval = 0

def get_bounds(self):
Expand All @@ -54,11 +61,15 @@ def cost_func(self, y):
Cost function to optimize.
"""
float_indiv = continuous_individual(y, self.sspace, scaling=self.eps)
bsstr = float_indiv.bitstring.to01()
if bsstr in self.visited_cache:
return self.visited_cache[bsstr]
if self.caching:
bsstr = float_indiv.bitstring.to01()
if bsstr in self.visited_cache:
fit = self.visited_cache[bsstr]
else:
fit = self.ffunc(float_indiv.bitstring)
self.nfeval += 1
self.visited_cache[bsstr] = fit
else:
fit = self.ffunc(float_indiv.bitstring)
self.nfeval += 1
self.visited_cache[bsstr] = fit
return fit
return fit
30 changes: 23 additions & 7 deletions bloopy/algorithms/differential_evolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,9 @@ def __call__(self, xk=None, convergence=None):


class differential_evolution(continuous_base):
def __init__(self, fitness_function, minmax_problem, searchspace, method=None, hillclimb=False, pop_size=15, mutation=(0.5,1), recombination=0.7):
def __init__(self, fitness_function, minmax_problem, searchspace, method=None, hillclimb=False, pop_size=15, mutation=(0.5,1), recombination=0.7, caching=True):
r"""
Base Differential Evolutions algorithm.
Differential Evolution algorithm.
Args:
fitness_function (bitarray() -> float): Function that
Expand All @@ -61,10 +61,19 @@ def __init__(self, fitness_function, minmax_problem, searchspace, method=None, h
afterwards (calls polish=True for scipy.optimize).
Default is False.
pop_size (int): (optional) Populations size, default is 15.
mutation (tuple(float, float)): Mutation constant, higher
values slow down convergence, but increases search radius.
recombination (float): Recombination constant, more children
to progress into the next generation, but at the risk of
population stability.
caching (bool): If true, caches fitness for every point in search space
visited (repeated visits do not count towards function evaluation.
Should not be used for stochastic optimization.
"""
super().__init__(fitness_function,
minmax_problem,
searchspace)
searchspace,
caching=caching)
self.method = method
self.hillclimb = hillclimb
self.pop_size = pop_size
Expand All @@ -84,15 +93,22 @@ def cost_func(self, y):
NOTE: The "-1 *" is because differential evolution only does maximization
"""
float_indiv = continuous_individual(y, self.sspace, scaling=self.eps)
bsstr = float_indiv.bitstring.to01()
if bsstr in self.visited_cache:
fit = self.visited_cache[bsstr]
if self.caching:
bsstr = float_indiv.bitstring.to01()
if bsstr in self.visited_cache:
fit = self.visited_cache[bsstr]
else:
# These optimizers do only minimization problems, for maximization,
# we flip the fitness to negative for it to work.
fit = -1 * self.minmax * self.ffunc(float_indiv.bitstring)
self.nfeval += 1
self.visited_cache[bsstr] = fit
else:
# These optimizers do only minimization problems, for maximization,
# we flip the fitness to negative for it to work.
fit = -1 * self.minmax * self.ffunc(float_indiv.bitstring)
self.nfeval += 1
self.visited_cache[bsstr] = fit

if self.solution_fit is None or fit < self.solution_fit:
self.solution = y
self.solution_fit = fit
Expand Down
41 changes: 27 additions & 14 deletions bloopy/algorithms/discrete_diffevo.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ def __init__(self,
searchspace=None,
input_pop=None,
mutrange=(0.2,0.7),
recomb=0.9):
recomb=0.9,
caching=True):
r"""
Base Linkage Tree Genetic Algorithm. Most functionalities can
be adapted by changing input component functions.
Expand All @@ -33,8 +34,8 @@ def __init__(self,
bitstring_size (int): Length of the bitstring instances.
min_max_problem (int): 1 if maximization problem, -1 for
minimization problem. Default is 1.
boundary_list (list(tuple(int))): (optional) None if
regular bitstrings. Otherwise, list of tuples
boundary_list (list(tuple(int))): (optional) None if
regular bitstrings. Otherwise, list of tuples
(start, end) of each segment of the bitstring in
which we can have only one 1 that points to the
element of the list that is active.
Expand All @@ -43,6 +44,9 @@ def __init__(self,
the GA will generate its own.
maxdepth (int): Maximum tree depth for search masks
for mutual information.
caching (bool): If true, caches fitness for every point in search space
visited (repeated visits do not count towards function evaluation.
Should not be used for stochastic optimization.
"""
super().__init__(fitness_function,
rep.mask_crossover_pair, # The reproductor is fixed now
Expand All @@ -52,7 +56,8 @@ def __init__(self,
min_max_problem,
searchspace,
input_pop,
None)#Mutations are not used
mutation=None,
caching=caching)#Mutations are not used
self.mutrange = mutrange
self.recomb = recomb

Expand All @@ -73,7 +78,7 @@ def create_offspring2(self, parents):
raise Exception("Not implemented for uneven number of parents!")
best = self.current_best()
random.shuffle(parents)

mut = (self.mutrange[1]-self.mutrange[0])*random.random() + self.mutrange[0]
for k in range(len(parents)):
a = random.randint(0, len(parents)-1)
Expand Down Expand Up @@ -129,12 +134,16 @@ def create_offspring2(self, parents):
if random.random() > self.recomb:
bprime.bitstring[b] = parents[k].bitstring[b]

bsstr = bprime.bitstring.to01()
if bsstr in self.visited_cache:
bprime.fitness = self.visited_cache[bsstr]
if self.caching:
bsstr = bprime.bitstring.to01()
if bsstr in self.visited_cache:
bprime.fitness = self.visited_cache[bsstr]
else:
bprime.fitness = self.ffunc(bprime.bitstring)
self.visited_cache[bsstr] = bprime.fitness
self.func_evals += 1
else:
bprime.fitness = self.ffunc(bprime.bitstring)
self.visited_cache[bsstr] = bprime.fitness
self.func_evals += 1

# If better, replace candidate
Expand All @@ -157,7 +166,7 @@ def create_offspring(self, parents):
raise Exception("Not implemented for uneven number of parents!")
best = self.current_best()
random.shuffle(parents)

mut = (self.mutrange[1]-self.mutrange[0])*random.random() + self.mutrange[0]
for k in range(len(parents)):
a = random.randint(0, len(parents)-1)
Expand Down Expand Up @@ -220,12 +229,16 @@ def create_offspring(self, parents):
if random.random() > self.recomb:
bprime.bitstring[b] = parents[k].bitstring[b]

bsstr = bprime.bitstring.to01()
if bsstr in self.visited_cache:
bprime.fitness = self.visited_cache[bsstr]
if self.caching:
bsstr = bprime.bitstring.to01()
if bsstr in self.visited_cache:
bprime.fitness = self.visited_cache[bsstr]
else:
bprime.fitness = self.ffunc(bprime.bitstring)
self.visited_cache[bsstr] = bprime.fitness
self.func_evals += 1
else:
bprime.fitness = self.ffunc(bprime.bitstring)
self.visited_cache[bsstr] = bprime.fitness
self.func_evals += 1

# If better, replace candidate
Expand Down
27 changes: 19 additions & 8 deletions bloopy/algorithms/dual_annealing.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,22 +46,26 @@ def __call__(self, xk, f, context):


class dual_annealing(continuous_base):
def __init__(self, fitness_function, minmax_problem, searchspace, method = 'BFGS'):
def __init__(self, fitness_function, minmax_problem, searchspace, method = 'BFGS', caching=True):
r"""
Base Dual Annealing algorithm.
Dual Annealing algorithm.
Args:
fitness_function (bitarray() -> float): Function that
scores fitness of bitstrings.
bitstring_size (int): Length of the bitstring instances.
min_max_problem (int): 1 if maximization problem, -1 for
minimization problem. Default is 1.
searchspace (dict(params, vals)): Dict that contains the
tunable variables and their possible values.
method (str): Internal minimization routine.
caching (bool): If true, caches fitness for every point in search space
visited (repeated visits do not count towards function evaluation.
Should not be used for stochastic optimization.
"""
super().__init__(fitness_function,
minmax_problem,
searchspace)
searchspace,
caching=caching)
self.method = method
supported_methods = ['COBYLA','L-BFGS-B','SLSQP','CG','Powell','Nelder-Mead', 'BFGS', 'trust-constr']
if self.method not in supported_methods:
Expand All @@ -72,15 +76,22 @@ def cost_func(self, y):
Intermediate function to supply to scipy.optimize function.
"""
float_indiv = continuous_individual(y, self.sspace, scaling=self.eps)
bsstr = float_indiv.bitstring.to01()
if bsstr in self.visited_cache:
fit = self.visited_cache[bsstr]
if self.caching:
bsstr = float_indiv.bitstring.to01()
if bsstr in self.visited_cache:
fit = self.visited_cache[bsstr]
else:
# These optimizers do only minimization problems, for maximization,
# we flip the fitness to negative for it to work.
fit = -1 * self.minmax * self.ffunc(float_indiv.bitstring)
self.nfeval += 1
self.visited_cache[bsstr] = fit
else:
# These optimizers do only minimization problems, for maximization,
# we flip the fitness to negative for it to work.
fit = -1 * self.minmax * self.ffunc(float_indiv.bitstring)
self.nfeval += 1
self.visited_cache[bsstr] = fit

if self.solution_fit is None or fit < self.solution_fit:
self.solution = y
self.solution_fit = fit
Expand Down
Loading

0 comments on commit d3f7fee

Please sign in to comment.