Skip to content

Commit

Permalink
add examples outside of PyPi package
Browse files Browse the repository at this point in the history
  • Loading branch information
schoonhovenrichard committed Aug 6, 2021
1 parent 7a1cc77 commit dd831ae
Show file tree
Hide file tree
Showing 18 changed files with 1,217 additions and 0 deletions.
101 changes: 101 additions & 0 deletions examples/dynamic_programming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
import numpy as np
import fitness_functions as ff

def bruteforce_MK_solve(mk_func):
r"""
Bruteforce computation of MK function optimum.
"""
best_fit = 0.0
best_bitset = ff.create_bitstring(0, mk_func.m)
for i in range(2**mk_func.m):
bs = ff.create_bitstring(i, mk_func.m)
fitness = mk_func.get_fitness(bs)
if fitness > best_fit:
best_fit = fitness
best_bitset = bs
return best_fit, best_bitset

def dp_solve_MK(mk_func):
r"""
Function to find optimal solution to adjecent MK function using
dynamic programming.
"""
m = mk_func.m
k = mk_func.k
if (m % (k-1) != 0):
raise Exception("We have not decided how to implement DP if there are bits left over")
tilde_functions = []
q = int(m / (k-1))
for i in range(q):
tilde_function = dict()
start = (k-1) * i
for a0 in range(2**(2*k-2)):
tilde_val = 0
temp_bs = ff.create_bitstring(a0, 2*k-2)
for j in range(k-1):
current_mk_func = mk_func.fitness_map[start + j]
temp_bs1 = ff.create_bitstring(0, k)
for p in range(k):
temp_bs1[p] = temp_bs[j+p]
tilde_val += current_mk_func[tuple(temp_bs1)]
tilde_function[tuple(temp_bs)] = tilde_val
tilde_functions.append(tilde_function)
return dp_solve_adjacentMK_tilde(tilde_functions, q-1, k)

def dp_solve_adjacentMK_tilde(tilde_funcs, pos, k):
r"""
Helper function to DP solve adjacent MK functions.
"""
if pos == 1:
best_val = 0
for a1 in range(2**(k-1)):
for a2 in range(2**(k-1)):
bsa1 = ff.create_bitstring(a1, k-1)
bsa2 = ff.create_bitstring(a2, k-1)
tilde0_bs = ff.create_bitstring(0, 2*k-2)
tilde1_bs = ff.create_bitstring(0, 2*k-2)
for i in range(k-1):
tilde0_bs[i] = bsa1[i]
tilde1_bs[k-1+i] = bsa1[i]
for i in range(k-1):
tilde0_bs[k-1+i] = bsa2[i]
tilde1_bs[i] = bsa2[i]
val = tilde_funcs[0][tuple(tilde0_bs)] + tilde_funcs[1][tuple(tilde1_bs)]
if val > best_val:
best_val = val
return best_val

new_tilde_funcs = []
for i in range(pos-1):
new_tilde_funcs.append(tilde_funcs[i])
tilde1 = tilde_funcs[pos-1]
tilde2 = tilde_funcs[pos]
new_tilde = dict()
for a1 in range(2**(k-1)):
for a2 in range(2**(k-1)):
bsa1 = ff.create_bitstring(a1, k-1)
bsa2 = ff.create_bitstring(a2, k-1)
new_tilde_inp = ff.create_bitstring(0, 2*k-2)
for i in range(k-1):
new_tilde_inp[i] = bsa1[i]
for i in range(k-1):
new_tilde_inp[k-1+i] = bsa2[i]
bs_tilde1 = ff.create_bitstring(0, 2*k-2)
bs_tilde2 = ff.create_bitstring(0, 2*k-2)
for i in range(k-1):
bs_tilde1[i] = bsa1[i]
for i in range(k-1):
bs_tilde2[k-1+i] = bsa2[i]
best_bval = 0
for b in range(2**(k-1)):
bsb = ff.create_bitstring(b, k-1)
for j in range(k-1):
bs_tilde1[k-1+j] = bsb[j]
for j in range(k-1):
bs_tilde2[j] = bsb[j]
bval = tilde1[tuple(bs_tilde1)] + tilde2[tuple(bs_tilde2)]
if bval > best_bval:
best_bval = bval
new_tilde[tuple(new_tilde_inp)] = best_bval
new_tilde_funcs.append(new_tilde)
return dp_solve_adjacentMK_tilde(new_tilde_funcs, pos-1, k)
87 changes: 87 additions & 0 deletions examples/example_basin_hopping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import numpy as np
import sys
from timeit import default_timer as timer
import random

import fitness_functions as ff
import dynamic_programming as dp
import bloopy.algorithms.basin_hopping as bashop
import bloopy.algorithms.local_minimize as minim
import bloopy.utils as utils

## Generate a (randomized) MK fitness function
k = 4;
m = 33*(k-1);
randomMK = True
if randomMK:
mk_func = ff.random_MK_function(m, k)
mk_func.generate()
else:
mk_func = ff.adjacent_MK_function(m, k)
mk_func.generate()

## Find optimal solution using dynamic programming for comparison
best_dp_fit = dp.dp_solve_MK(mk_func)
print("Max fitness DP:", best_dp_fit)

# (We also have bruteforce solves but it is exponentially slow.
# Only use it for bitstrings of sizes < 20 to check.
#best_fit, sol = dp.bruteforce_MK_solve(mk_func)
#print("Max fitness bruteforce:", best_fit)

ffunc = mk_func.get_fitness
bitstring_size = m

# Continuous algorithms require a search space to operate
# NOTE: Continuous algorithms can be applied to low dimensional discrete
# problems with many values per parameter. Bitstring based problems, i.e.
# only 2 values per dimension are poorly suited.
searchspace = utils.create_bitstring_searchspace(m)
converter = utils.bitstring_as_discrete(searchspace, mk_func.get_fitness)
fitness_func = converter.get_fitness

count = 1
for vals in searchspace.values():
count *= len(vals)
print("Points in searchspace:", count)

BASH = True
MINIM = False

if BASH:
## Run basin hopping
# supported_methods = ["Nelder-Mead", "Powell", "CG", "L-BFGS-B", "COBYLA", "SLSQP", "BFGS"]
method = "SLSQP"
temperature = 1.0
iterations = 10000
test_bash = bashop.basin_hopping(fitness_func,
1,
searchspace,
T=temperature,
method=method)

x = test_bash.solve(max_iter=iterations,
max_time=20,#seconds
stopping_fitness=0.98*best_dp_fit,
max_funcevals=10000)

print("Best fitness:",x[0],", fraction of optimal {0:.4f}".format(x[0]/float(best_dp_fit)))
print("Function evaluations:", x[2])

if MINIM:
## Run local minimization
# supported_methods = ["Nelder-Mead", "Powell", "CG", "L-BFGS-B", "COBYLA", "SLSQP", "BFGS"]
method = "CG"
iterations = 10000
test_minim = minim.local_minimizer(fitness_func,
1,
searchspace,
method=method)

x = test_minim.solve(max_iter=iterations,
max_time=10,#seconds
stopping_fitness=0.98*best_dp_fit,
max_funcevals=10000)

print("Best fitness:",x[0],", fraction of optimal {0:.4f}".format(x[0]/float(best_dp_fit)))
print("Function evaluations:", x[2])
75 changes: 75 additions & 0 deletions examples/example_differential_evolution.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
import numpy as np
import sys
from timeit import default_timer as timer
import random

import fitness_functions as ff
import dynamic_programming as dp
import bloopy.algorithms.differential_evolution as de
import bloopy.utils as utils

## Generate a (randomized) MK fitness function
k = 4;
m = 33*(k-1);
randomMK = True
if randomMK:
mk_func = ff.random_MK_function(m, k)
mk_func.generate()
else:
mk_func = ff.adjacent_MK_function(m, k)
mk_func.generate()

## Find optimal solution using dynamic programming for comparison
best_dp_fit = dp.dp_solve_MK(mk_func)
print("Max fitness DP:", best_dp_fit)

# (We also have bruteforce solves but it is exponentially slow.
# Only use it for bitstrings of sizes < 20 to check.
#best_fit, sol = dp.bruteforce_MK_solve(mk_func)
#print("Max fitness bruteforce:", best_fit)

ffunc = mk_func.get_fitness
bitstring_size = m

# Continuous algorithms require a search space to operate
# NOTE: Continuous algorithms can be applied to low dimensional discrete
# problems with many values per parameter. Bitstring based problems, i.e.
# only 2 values per dimension are poorly suited.
searchspace = utils.create_bitstring_searchspace(m)
converter = utils.bitstring_as_discrete(searchspace, mk_func.get_fitness)
fitness_func = converter.get_fitness

# Define the Basin Hopping algorithm

count = 1
for vals in searchspace.values():
count *= len(vals)
print("Points in searchspace:", count)

## Run differential evolution
method = "best1bin"
popsize = 150
recomb = 0.7
mutate = (0.2, 0.7)

minvar = 0.1
maxf = 10000
iterations = int(maxf/(popsize * m) - 1)

test_diffevo = de.differential_evolution(fitness_func,
1,
searchspace,
method=method,
mutation=mutate,
recombination=recomb,
hillclimb=False,#For accurate feval measurements
pop_size=popsize)

x = test_diffevo.solve(min_variance=minvar,
max_iter=iterations,
max_time=30,#seconds
stopping_fitness=0.98*best_dp_fit,
max_funcevals=maxf)

print("Best fitness:",x[0],", fraction of optimal {0:.4f}".format(x[0]/float(best_dp_fit)))
print("Function evaluations:", x[2])
62 changes: 62 additions & 0 deletions examples/example_dual_annealing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
import numpy as np
import sys
from timeit import default_timer as timer
import random

import fitness_functions as ff
import dynamic_programming as dp
import bloopy.algorithms.dual_annealing as dsa
import bloopy.utils as utils

## Generate a (randomized) MK fitness function
k = 4;
m = 33*(k-1);
randomMK = True
if randomMK:
mk_func = ff.random_MK_function(m, k)
mk_func.generate()
else:
mk_func = ff.adjacent_MK_function(m, k)
mk_func.generate()

## Find optimal solution using dynamic programming for comparison
best_dp_fit = dp.dp_solve_MK(mk_func)
print("Max fitness DP:", best_dp_fit)

# (We also have bruteforce solves but it is exponentially slow.
# Only use it for bitstrings of sizes < 20 to check.
#best_fit, sol = dp.bruteforce_MK_solve(mk_func)
#print("Max fitness bruteforce:", best_fit)

ffunc = mk_func.get_fitness
bitstring_size = m

# Continuous algorithms require a search space to operate
# NOTE: Continuous algorithms can be applied to low dimensional discrete
# problems with many values per parameter. Bitstring based problems, i.e.
# only 2 values per dimension are poorly suited.
searchspace = utils.create_bitstring_searchspace(m)
converter = utils.bitstring_as_discrete(searchspace, mk_func.get_fitness)
fitness_func = converter.get_fitness

count = 1
for vals in searchspace.values():
count *= len(vals)
print("Points in searchspace:", count)

## Run dual annealing
# supported_methods = ['COBYLA','L-BFGS-B','SLSQP','CG','Powell','Nelder-Mead', 'BFGS', 'trust-constr']
method = "trust-constr"
iterations = 10000
test_dsa = dsa.dual_annealing(fitness_func,
1,
searchspace,
method=method)

x = test_dsa.solve(max_iter=iterations,
max_time=10,#seconds
stopping_fitness=0.98*best_dp_fit,
max_funcevals=10000)

print("Best fitness:",x[0],", fraction of optimal {0:.4f}".format(x[0]/float(best_dp_fit)))
print("Function evaluations:", x[2])
55 changes: 55 additions & 0 deletions examples/example_ga.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import numpy as np
import sys
from timeit import default_timer as timer
import random

import fitness_functions as ff
import dynamic_programming as dp
import bloopy.mutation_functions as mut
import bloopy.reproductive_functions as rep
import bloopy.selection_functions as sel
import bloopy.algorithms.genetic_algorithm as ga

## Generate a (randomized) MK fitness function
k = 4;
m = 33*(k-1);
randomMK = True
if randomMK:
mk_func = ff.random_MK_function(m, k)
mk_func.generate()
else:
mk_func = ff.adjacent_MK_function(m, k)
mk_func.generate()

## Find optimal solution using dynamic programming for comparison
best_dp_fit = dp.dp_solve_MK(mk_func)
print("Max fitness DP:", best_dp_fit)

# (We also have bruteforce solves but it is exponentially slow.
# Only use it for bitstrings of sizes < 20 to check.

#best_fit, sol = dp.bruteforce_MK_solve(mk_func)
#print("Max fitness bruteforce:", best_fit)

fitness_func = mk_func.get_fitness
population_size = 1000
reproductor = rep.twopoint_crossover
selector = sel.tournament2_selection
bitstring_size = m

## We can optionally provide an input population
test_ga = ga.genetic_algorithm(fitness_func,
reproductor,
selector,
population_size,
bitstring_size,
min_max_problem=1,
input_pop=None)

x = test_ga.solve(min_variance=0.1,
max_iter=1000,
no_improve=300,
max_time=15,#seconds
stopping_fitness=0.98*best_dp_fit,
max_funcevals=200000)
print("Best fitness:",x[0],", fraction of optimal {0:.4f}".format(x[0]/float(best_dp_fit)))
Loading

0 comments on commit dd831ae

Please sign in to comment.