From d19d98c67475b3a8f52bd8ec54d85b6abe594b60 Mon Sep 17 00:00:00 2001 From: M#Coder Date: Sat, 3 Oct 2020 13:00:49 -0700 Subject: [PATCH] Code example on how to use a function as optimizer argument --- docs/user/neuralnet.rst | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/docs/user/neuralnet.rst b/docs/user/neuralnet.rst index cf34276ce..79052c7d6 100644 --- a/docs/user/neuralnet.rst +++ b/docs/user/neuralnet.rst @@ -135,6 +135,28 @@ support for wildcards (globbing): ('linear0.bias', {'lr': 1}), ] +Your use case may require an optimizer whose signature differs from a +default PyTorch optimizer's signature. In that case, you can define a +custom function that reroutes the arguments as needed and pass it to +the ``optimizer`` parameter: + +.. code:: python + + # custom optimizer to encapsulate Adam + def make_lookahead(parameters, optimizer_cls, k, alpha, **kwargs): + optimizer = optimizer_cls(parameters, **kwargs) + return Lookahead(optimizer=optimizer, k=k, alpha=alpha) + + + net = NeuralNetClassifier( + ..., + optimizer=make_lookahead, + optimizer__optimizer_cls=torch.optim.Adam, + optimizer__weight_decay=1e-2, + optimizer__k=5, + optimizer__alpha=0.5, + lr=1e-3) + lr ^^^