Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixed AQGD optimizer groups objective function calls by default #116

Merged
merged 8 commits into from
Jan 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .pylintdict
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ bitstrings
bloch
boltzmann
bool
boolean
boyer
brassard
broyden
Expand Down Expand Up @@ -238,6 +239,8 @@ optimizer's
optimizers
otimes
o'brien
parallelization
parallelized
param
parameterizations
parametrization
Expand All @@ -255,6 +258,7 @@ postprocess
powell
pre
preconditioner
prepend
preprint
preprocess
preprocesses
Expand Down Expand Up @@ -297,6 +301,7 @@ rightarrow
robert
rosen
runarsson
runtime
rz
sanjiv
sashank
Expand All @@ -310,6 +315,7 @@ scikit
scipy
sdg
seealso
serializable
shanno
skquant
sle
Expand Down
15 changes: 12 additions & 3 deletions qiskit_algorithms/optimizers/aqgd.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ def __init__(
momentum: float | list[float] = 0.25,
param_tol: float = 1e-6,
averaging: int = 10,
max_evals_grouped: int = 1,
proeseler marked this conversation as resolved.
Show resolved Hide resolved
) -> None:
"""
Performs Analytical Quantum Gradient Descent (AQGD) with Epochs.
Expand All @@ -73,6 +74,7 @@ def __init__(
param_tol: Tolerance for change in norm of parameters.
averaging: Length of window over which to average objective values for objective
convergence criterion
max_evals_grouped: Max number of default gradient evaluations performed simultaneously.

Raises:
AlgorithmError: If the length of ``maxiter``, `momentum``, and ``eta`` is not the same.
Expand All @@ -98,6 +100,7 @@ def __init__(
self._param_tol = param_tol
self._tol = tol
self._averaging = averaging
self.set_max_evals_grouped(max_evals_grouped)

# state
self._avg_objval: float | None = None
Expand Down Expand Up @@ -156,7 +159,15 @@ def _compute_objective_fn_and_gradient(
)
# Evaluate,
# reshaping to flatten, as expected by objective function
values = np.array(obj(param_sets_to_eval.reshape(-1)))
if self._max_evals_grouped > 1:
batches = [
param_sets_to_eval[i : i + self._max_evals_grouped]
for i in range(0, len(param_sets_to_eval), self._max_evals_grouped)
]
values = np.array(np.concatenate([obj(b) for b in batches]))
else:
batches = param_sets_to_eval
values = np.array([obj(b) for b in batches])

# Update number of objective function evaluations
self._eval_count += 2 * num_params + 1
Expand Down Expand Up @@ -312,7 +323,6 @@ def minimize(

iter_count = 0
logger.info("Initial Params: %s", params)

epoch = 0
converged = False
for (eta, mom_coeff) in zip(self._eta, self._momenta_coeff):
Expand All @@ -327,7 +337,6 @@ def minimize(
converged = self._converged_parameter(params, self._param_tol)
if converged:
break

# Calculate objective function and estimate of analytical gradient
if jac is None:
objval, gradient = self._compute_objective_fn_and_gradient(params, fun)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
fixes:
- |
Fixed the AQGD optimizer grouping objective function calls by default so that a single point is now passed to the
objective function. For algorithms that can handle more than one gradient evaluations in their objective function,
such as a VQE in the algorithms here, the number of grouped evaluations can be controlled via the max_grouped_evals
parameter. Grouped evaluations allows a list of points to be handed over so that they can potentially be assessed
more efficiently in a single job.

41 changes: 41 additions & 0 deletions test/optimizers/test_optimizer_aqgd.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@

import unittest
from test import QiskitAlgorithmsTestCase
import numpy as np
from ddt import ddt, data
from qiskit.circuit.library import RealAmplitudes
from qiskit.primitives import Estimator
from qiskit.quantum_info import SparsePauliOp
Expand All @@ -26,6 +28,7 @@
from qiskit_algorithms.utils import algorithm_globals


@ddt
class TestOptimizerAQGD(QiskitAlgorithmsTestCase):
"""Test AQGD optimizer using RY for analytic gradient with VQE"""

Expand Down Expand Up @@ -93,6 +96,44 @@ def test_int_values(self):

self.assertAlmostEqual(result.eigenvalue.real, -1.857, places=3)

@data(1, 2, 3) # Values for max_grouped_evals
def test_max_grouped_evals_parallelizable(self, max_grouped_evals):
"""Tests max_grouped_evals for an objective function that can be parallelized"""
aqgd = AQGD(momentum=0.0, max_evals_grouped=2)

vqe = VQE(
self.estimator,
ansatz=RealAmplitudes(),
optimizer=aqgd,
gradient=self.gradient,
)

with self.subTest(max_grouped_evals=max_grouped_evals):
aqgd.set_max_evals_grouped(max_grouped_evals)
result = vqe.compute_minimum_eigenvalue(operator=self.qubit_op)
self.assertAlmostEqual(result.eigenvalue.real, -1.857, places=3)

def test_max_grouped_evals_non_parallelizable(self):
"""Tests max_grouped_evals for an objective function that cannot be parallelized"""
# Define the objective function (toy example for functionality)
def quadratic_objective(x: np.ndarray) -> float:
# Check if only a single point as parameters is passed
if np.array(x).ndim != 1:
raise ValueError("The function expects a vector.")

return x[0] ** 2 + x[1] ** 2 - 2 * x[0] * x[1]

# Define initial point
x0 = np.array([1, 2.23])
# Test max_evals_grouped raises no error for max_evals_grouped=1
aqgd = AQGD(maxiter=100, max_evals_grouped=1)
x_new = aqgd.minimize(quadratic_objective, x0).x
self.assertAlmostEqual(sum(np.round(x_new / max(x_new), 7)), 0)
# Test max_evals_grouped raises an error for max_evals_grouped=2
aqgd.set_max_evals_grouped(2)
with self.assertRaises(ValueError):
aqgd.minimize(quadratic_objective, x0)


if __name__ == "__main__":
unittest.main()
Loading