diff --git a/examples/basic_optimization_with_arguments.ipynb b/examples/basic_optimization_with_arguments.ipynb new file mode 100644 index 00000000..0398a008 --- /dev/null +++ b/examples/basic_optimization_with_arguments.ipynb @@ -0,0 +1,236 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Basic Optimization with Arguments" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here, we will run a basic optimization using an objective function that needs parameterization. We will use the ``single.GBestPSO`` and a version of the rosenbrock function to demonstrate" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Running Python 3.5.2 |Anaconda custom (64-bit)| (default, Jul 2 2016, 17:53:06) \n", + "[GCC 4.4.7 20120313 (Red Hat 4.4.7-1)]\n" + ] + } + ], + "source": [ + "import sys\n", + "# change directory to access pyswarms\n", + "sys.path.append('../')\n", + "\n", + "print(\"Running Python {}\".format(sys.version))" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false, + "scrolled": true + }, + "outputs": [], + "source": [ + "# import modules\n", + "import numpy as np\n", + "\n", + "# create a parameterized version of the classic Rosenbrock unconstrained optimzation function\n", + "def rosenbrock_with_args(x, a, b, c=0):\n", + "\n", + " f = (a - x[:, 0]) ** 2 + b * (x[:, 1] - x[:, 0] ** 2) ** 2 + c\n", + " return f" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Using Arguments" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Arguments can either be passed in using a tuple or a dictionary, using the ``kwargs={}`` paradigm. First lets optimize the Rosenbrock function using keyword arguments. Note in the definition of the Rosenbrock function above, there were two arguments that need to be passed other than the design variables, and one optional keyword argument, ``a``, ``b``, and ``c``, respectively" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:pyswarms.single.global_best:Arguments Passed to Objective Function: {'c': 0, 'b': 100, 'a': 1}\n", + "INFO:pyswarms.single.global_best:Iteration 1/1000, cost: 1022.9667801907804\n", + "INFO:pyswarms.single.global_best:Iteration 101/1000, cost: 0.0011172801146408992\n", + "INFO:pyswarms.single.global_best:Iteration 201/1000, cost: 7.845605970774126e-07\n", + "INFO:pyswarms.single.global_best:Iteration 301/1000, cost: 1.313503109901238e-09\n", + "INFO:pyswarms.single.global_best:Iteration 401/1000, cost: 5.187079604907219e-10\n", + "INFO:pyswarms.single.global_best:Iteration 501/1000, cost: 1.0115283486088853e-10\n", + "INFO:pyswarms.single.global_best:Iteration 601/1000, cost: 2.329870757208421e-13\n", + "INFO:pyswarms.single.global_best:Iteration 701/1000, cost: 4.826176894160183e-15\n", + "INFO:pyswarms.single.global_best:Iteration 801/1000, cost: 3.125715456651088e-17\n", + "INFO:pyswarms.single.global_best:Iteration 901/1000, cost: 1.4236768129666014e-19\n", + "INFO:pyswarms.single.global_best:================================\n", + "Optimization finished!\n", + "Final cost: 0.0000\n", + "Best value: [0.99999999996210465, 0.9999999999218413]\n", + "\n" + ] + } + ], + "source": [ + "from pyswarms.single.global_best import GlobalBestPSO\n", + "\n", + "# instatiate the optimizer\n", + "x_max = 10 * np.ones(2)\n", + "x_min = -1 * x_max\n", + "bounds = (x_min, x_max)\n", + "options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9}\n", + "optimizer = GlobalBestPSO(n_particles=10, dimensions=2, options=options, bounds=bounds)\n", + "\n", + "# now run the optimization, pass a=1 and b=100 as a tuple assigned to args\n", + "\n", + "cost, pos = optimizer.optimize(rosenbrock_with_args, 1000, print_step=100, verbose=3, a=1, b=100, c=0)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "It is also possible to pass a dictionary of key word arguments by using ``**`` decorator when passing the dict" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:pyswarms.single.global_best:Arguments Passed to Objective Function: {'c': 0, 'b': 100.0, 'a': 1.0}\n", + "INFO:pyswarms.single.global_best:Iteration 1/1000, cost: 1.996797703363527e-21\n", + "INFO:pyswarms.single.global_best:Iteration 101/1000, cost: 1.0061676299213387e-24\n", + "INFO:pyswarms.single.global_best:Iteration 201/1000, cost: 4.8140236741112245e-28\n", + "INFO:pyswarms.single.global_best:Iteration 301/1000, cost: 2.879342304056693e-29\n", + "INFO:pyswarms.single.global_best:Iteration 401/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 501/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 601/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 701/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 801/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 901/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:================================\n", + "Optimization finished!\n", + "Final cost: 0.0000\n", + "Best value: [1.0, 1.0]\n", + "\n" + ] + } + ], + "source": [ + "kwargs={\"a\": 1.0, \"b\": 100.0, 'c':0}\n", + "cost, pos = optimizer.optimize(rosenbrock_with_args, 1000, print_step=100, verbose=3, **kwargs)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": true + }, + "source": [ + "Any key word arguments in the objective function can be left out as they will be passed the default as defined in the prototype. Note here, ``c`` is not passed into the function." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:pyswarms.single.global_best:Arguments Passed to Objective Function: {'b': 100, 'a': 1}\n", + "INFO:pyswarms.single.global_best:Iteration 1/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 101/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 201/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 301/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 401/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 501/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 601/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 701/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 801/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:Iteration 901/1000, cost: 0.0\n", + "INFO:pyswarms.single.global_best:================================\n", + "Optimization finished!\n", + "Final cost: 0.0000\n", + "Best value: [1.0, 1.0]\n", + "\n" + ] + } + ], + "source": [ + "cost, pos = optimizer.optimize(rosenbrock_with_args, 1000, print_step=100, verbose=3, a=1, b=100)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "anaconda-cloud": {}, + "kernelspec": { + "display_name": "Python [conda env:anaconda3]", + "language": "python", + "name": "conda-env-anaconda3-py" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.2" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/pyswarms/base/base_discrete.py b/pyswarms/base/base_discrete.py index 52781888..a52aced9 100644 --- a/pyswarms/base/base_discrete.py +++ b/pyswarms/base/base_discrete.py @@ -193,7 +193,7 @@ def _populate_history(self, hist): self.pos_history.append(hist.position) self.velocity_history.append(hist.velocity) - def optimize(self, objective_func, iters, print_step=1, verbose=1): + def optimize(self, objective_func, iters, print_step=1, verbose=1, **kwargs): """Optimizes the swarm for a number of iterations. Performs the optimization to evaluate the objective @@ -210,6 +210,8 @@ def optimize(self, objective_func, iters, print_step=1, verbose=1): amount of steps for printing into console. verbose : int (the default is 1) verbosity setting. + kwargs : dict + arguments for objective function Raises ------ diff --git a/pyswarms/base/base_single.py b/pyswarms/base/base_single.py index 339559e2..ef633387 100644 --- a/pyswarms/base/base_single.py +++ b/pyswarms/base/base_single.py @@ -224,7 +224,7 @@ def _populate_history(self, hist): self.pos_history.append(hist.position) self.velocity_history.append(hist.velocity) - def optimize(self, objective_func, iters, print_step=1, verbose=1): + def optimize(self, objective_func, iters, print_step=1, verbose=1, **kwargs): """Optimizes the swarm for a number of iterations. Performs the optimization to evaluate the objective @@ -241,6 +241,8 @@ def optimize(self, objective_func, iters, print_step=1, verbose=1): amount of steps for printing into console. verbose : int (the default is 1) verbosity setting. + kwargs : dict + arguments for objective function Raises ------ diff --git a/pyswarms/discrete/binary.py b/pyswarms/discrete/binary.py index a9e41d2c..1497e4fb 100644 --- a/pyswarms/discrete/binary.py +++ b/pyswarms/discrete/binary.py @@ -149,7 +149,7 @@ def __init__( # Initialize the topology self.top = Ring() - def optimize(self, objective_func, iters, print_step=1, verbose=1): + def optimize(self, objective_func, iters, print_step=1, verbose=1,**kwargs): """Optimizes the swarm for a number of iterations. Performs the optimization to evaluate the objective @@ -165,6 +165,8 @@ def optimize(self, objective_func, iters, print_step=1, verbose=1): amount of steps for printing into console. verbose : int (the default is 1) verbosity setting. + kwargs : dict + arguments for objective function Returns ------- @@ -172,10 +174,13 @@ def optimize(self, objective_func, iters, print_step=1, verbose=1): the local best cost and the local best position among the swarm. """ + cli_print("Arguments Passed to Objective Function: {}".format(kwargs), + verbose, 2, logger=self.logger) + for i in range(iters): # Compute cost for current position and personal best - self.swarm.current_cost = objective_func(self.swarm.position) - self.swarm.pbest_cost = objective_func(self.swarm.pbest_pos) + self.swarm.current_cost = objective_func(self.swarm.position, **kwargs) + self.swarm.pbest_cost = objective_func(self.swarm.pbest_pos, **kwargs) self.swarm.pbest_pos, self.swarm.pbest_cost = compute_pbest( self.swarm ) @@ -187,8 +192,7 @@ def optimize(self, objective_func, iters, print_step=1, verbose=1): # Print to console if i % print_step == 0: cli_print( - "Iteration %s/%s, cost: %s" - % (i + 1, iters, np.min(self.swarm.best_cost)), + "Iteration {}/{}, cost: {}".format(i + 1, iters, np.min(self.swarm.best_cost)), verbose, 2, logger=self.logger, diff --git a/pyswarms/single/global_best.py b/pyswarms/single/global_best.py index eb05356f..4ff47fb9 100644 --- a/pyswarms/single/global_best.py +++ b/pyswarms/single/global_best.py @@ -131,7 +131,7 @@ def __init__( # Initialize the topology self.top = Star() - def optimize(self, objective_func, iters, print_step=1, verbose=1): + def optimize(self, objective_func, iters, print_step=1, verbose=1, **kwargs): """Optimizes the swarm for a number of iterations. Performs the optimization to evaluate the objective @@ -147,16 +147,22 @@ def optimize(self, objective_func, iters, print_step=1, verbose=1): amount of steps for printing into console. verbose : int (default is 1) verbosity setting. + kwargs : dict + arguments for the objective function Returns ------- tuple the global best cost and the global best position. """ + + cli_print("Arguments Passed to Objective Function: {}".format(kwargs), + verbose, 2, logger=self.logger) + for i in range(iters): # Compute cost for current position and personal best - self.swarm.current_cost = objective_func(self.swarm.position) - self.swarm.pbest_cost = objective_func(self.swarm.pbest_pos) + self.swarm.current_cost = objective_func(self.swarm.position, **kwargs) + self.swarm.pbest_cost = objective_func(self.swarm.pbest_pos, **kwargs) self.swarm.pbest_pos, self.swarm.pbest_cost = compute_pbest( self.swarm ) @@ -169,8 +175,7 @@ def optimize(self, objective_func, iters, print_step=1, verbose=1): # Print to console if i % print_step == 0: cli_print( - "Iteration %s/%s, cost: %s" - % (i + 1, iters, self.swarm.best_cost), + "Iteration {}/{}, cost: {}".format(i + 1, iters, self.swarm.best_cost), verbose, 2, logger=self.logger, diff --git a/pyswarms/single/local_best.py b/pyswarms/single/local_best.py index 6d368ca3..364cbb1f 100644 --- a/pyswarms/single/local_best.py +++ b/pyswarms/single/local_best.py @@ -174,7 +174,7 @@ def __init__( # Initialize the topology self.top = Ring() - def optimize(self, objective_func, iters, print_step=1, verbose=1): + def optimize(self, objective_func, iters, print_step=1, verbose=1, **kwargs): """Optimizes the swarm for a number of iterations. Performs the optimization to evaluate the objective @@ -190,6 +190,8 @@ def optimize(self, objective_func, iters, print_step=1, verbose=1): amount of steps for printing into console. verbose : int (default is 1) verbosity setting. + kwargs : dict + arguments for the objective function Returns ------- @@ -197,10 +199,13 @@ def optimize(self, objective_func, iters, print_step=1, verbose=1): the local best cost and the local best position among the swarm. """ + cli_print("Arguments Passed to Objective Function: {}".format(kwargs), + verbose, 2, logger=self.logger) + for i in range(iters): # Compute cost for current position and personal best - self.swarm.current_cost = objective_func(self.swarm.position) - self.swarm.pbest_cost = objective_func(self.swarm.pbest_pos) + self.swarm.current_cost = objective_func(self.swarm.position, **kwargs) + self.swarm.pbest_cost = objective_func(self.swarm.pbest_pos, **kwargs) self.swarm.pbest_pos, self.swarm.pbest_cost = compute_pbest( self.swarm ) @@ -212,8 +217,7 @@ def optimize(self, objective_func, iters, print_step=1, verbose=1): # Print to console if i % print_step == 0: cli_print( - "Iteration %s/%s, cost: %s" - % (i + 1, iters, np.min(self.swarm.best_cost)), + "Iteration {}/{}, cost: {}".format(i + 1, iters, np.min(self.swarm.best_cost)), verbose, 2, logger=self.logger, diff --git a/tests/optimizers/test_objective_func_with_kwargs.py b/tests/optimizers/test_objective_func_with_kwargs.py new file mode 100644 index 00000000..a980044b --- /dev/null +++ b/tests/optimizers/test_objective_func_with_kwargs.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Import modules +import pytest +import numpy as np + +# Import from package +from pyswarms.single import GlobalBestPSO, LocalBestPSO +from pyswarms.utils.functions.single_obj import rosenbrock_func + + +def rosenbrock_with_args(x, a, b): + + f = (a - x[:, 0]) ** 2 + b * (x[:, 1] - x[:, 0] ** 2) ** 2 + return f + + +@pytest.mark.parametrize('func', [ + rosenbrock_with_args +]) +def test_global_kwargs(func): + """Tests if kwargs are passed properly to the objective function for when kwargs are present""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = GlobalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + cost, pos = opt_ps.optimize(func, 1000, print_step=10, verbose=3, a=1 , b=100) + + assert np.isclose(cost, 0, rtol=1e-03) + assert np.isclose(pos[0], 1.0, rtol=1e-03) + assert np.isclose(pos[1], 1.0, rtol=1e-03) + + +@pytest.mark.parametrize('func', [ + rosenbrock_with_args +]) +def test_global_kwargs_without_named_arguments(func): + """Tests if kwargs are passed properly to the objective function for when kwargs are present and + other named arguments are not passed, such as print_step""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = GlobalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + cost, pos = opt_ps.optimize(func, 1000, verbose=3, a=1 , b=100) + + assert np.isclose(cost, 0, rtol=1e-03) + assert np.isclose(pos[0], 1.0, rtol=1e-03) + assert np.isclose(pos[1], 1.0, rtol=1e-03) + + +@pytest.mark.parametrize('func', [ + rosenbrock_func +]) +def test_global_no_kwargs(func): + """Tests if args are passed properly to the objective function for when no args are present""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = GlobalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + cost, pos = opt_ps.optimize(func, 1000, print_step=10, verbose=3) + + assert np.isclose(cost, 0, rtol=1e-03) + assert np.isclose(pos[0], 1.0, rtol=1e-03) + assert np.isclose(pos[1], 1.0, rtol=1e-03) + + +@pytest.mark.parametrize('func', [ + rosenbrock_with_args +]) +def test_local_kwargs(func): + """Tests if kwargs are passed properly to the objective function for when kwargs are present""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = LocalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + cost, pos = opt_ps.optimize(func, 1000, print_step=10, verbose=3, a=1, b=100) + + assert np.isclose(cost, 0, rtol=1e-03) + assert np.isclose(pos[0], 1.0, rtol=1e-03) + assert np.isclose(pos[1], 1.0, rtol=1e-03) + + +@pytest.mark.parametrize('func', [ + rosenbrock_func +]) +def test_local_no_kwargs(func): + """Tests if no kwargs/args are passed properly to the objective function for when kwargs are present""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = LocalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + cost, pos = opt_ps.optimize(func, iters=1000, print_step=10, verbose=3) + + assert np.isclose(cost, 0, rtol=1e-03) + assert np.isclose(pos[0], 1.0, rtol=1e-03) + assert np.isclose(pos[1], 1.0, rtol=1e-03) + + +@pytest.mark.parametrize('func', [ + rosenbrock_func +]) +def test_global_uneeded_kwargs(func): + """Tests kwargs are passed the objective function for when kwargs do not exist""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = GlobalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + with pytest.raises(TypeError) as excinfo: + cost, pos = opt_ps.optimize(func, 1000, print_step=10, verbose=3, a=1) + assert 'unexpected keyword' in str(excinfo.value) + + +@pytest.mark.parametrize('func', [ + rosenbrock_with_args +]) +def test_global_missed_kwargs(func): + """Tests kwargs are passed the objective function for when kwargs do not exist""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = GlobalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + with pytest.raises(TypeError) as excinfo: + cost, pos = opt_ps.optimize(func, 1000, print_step=10, verbose=3, a=1) + assert 'missing 1 required positional argument' in str(excinfo.value) + + +@pytest.mark.parametrize('func', [ + rosenbrock_func +]) +def test_local_uneeded_kwargs(func): + """Tests kwargs are passed the objective function for when kwargs do not exist""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = LocalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + with pytest.raises(TypeError) as excinfo: + cost, pos = opt_ps.optimize(func, 1000, print_step=10, verbose=3, a=1) + assert 'unexpected keyword' in str(excinfo.value) + + +@pytest.mark.parametrize('func', [ + rosenbrock_with_args +]) +def test_local_missed_kwargs(func): + """Tests kwargs are passed the objective function for when kwargs do not exist""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = LocalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + with pytest.raises(TypeError) as excinfo: + cost, pos = opt_ps.optimize(func, 1000, print_step=10, verbose=3, a=1) + assert 'missing 1 required positional argument' in str(excinfo.value) + + +@pytest.mark.parametrize('func', [ + rosenbrock_with_args +]) +def test_local_wrong_kwargs(func): + """Tests kwargs are passed the objective function for when kwargs do not exist""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = LocalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + with pytest.raises(TypeError) as excinfo: + cost, pos = opt_ps.optimize(func, 1000, print_step=10, verbose=3, c=1, d=100) + assert 'unexpected keyword' in str(excinfo.value) + + +@pytest.mark.parametrize('func', [ + rosenbrock_with_args +]) +def test_global_wrong_kwargs(func): + """Tests kwargs are passed the objective function for when kwargs do not exist""" + + # setup optimizer + options = {'c1': 0.5, 'c2': 0.3, 'w': 0.9, 'k': 2, 'p': 2} + + x_max = 10 * np.ones(2) + x_min = -1 * x_max + bounds = (x_min, x_max) + opt_ps = GlobalBestPSO(n_particles=100, dimensions=2, options=options, bounds=bounds) + + # run it + with pytest.raises(TypeError) as excinfo: + cost, pos = opt_ps.optimize(func, 1000, print_step=10, verbose=3, c=1, d=100) + assert 'unexpected keyword' in str(excinfo.value)