Skip to content

Commit

Permalink
ADAM bug when calculating the gradient in batches #178 (#183)
Browse files Browse the repository at this point in the history
Co-authored-by: Peter Röseler <peter.roeseler@gmail.com>
Co-authored-by: Steve Wood <40241007+woodsp-ibm@users.noreply.github.com>
Co-authored-by: Elena Peña Tapia <57907331+ElePT@users.noreply.github.com>
(cherry picked from commit 6724b47)
  • Loading branch information
proeseler authored and mergify[bot] committed Aug 20, 2024
1 parent de6a85e commit 64c1d9e
Show file tree
Hide file tree
Showing 3 changed files with 99 additions and 2 deletions.
6 changes: 4 additions & 2 deletions qiskit_algorithms/optimizers/adam_amsgrad.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# This code is part of a Qiskit project.
#
# (C) Copyright IBM 2019, 2023.
# (C) Copyright IBM 2019, 2024.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
Expand Down Expand Up @@ -209,7 +209,9 @@ def minimize(
The result of the optimization, containing e.g. the result as attribute ``x``.
"""
if jac is None:
jac = Optimizer.wrap_function(Optimizer.gradient_num_diff, (fun, self._eps))
jac = Optimizer.wrap_function(
Optimizer.gradient_num_diff, (fun, self._eps, self._max_evals_grouped)
)

derivative = jac(x0)
self._t = 0
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
fixes:
- |
Fixed the AQGD optimizer grouping objective function calls by default so that a single point is now passed to the
objective function. For algorithms that can handle more than one gradient evaluations in their objective function,
such as a VQE in the algorithms here, the number of grouped evaluations can be controlled via the max_grouped_evals
parameter. Grouped evaluations allows a list of points to be handed over so that they can potentially be assessed
more efficiently in a single job.
86 changes: 86 additions & 0 deletions test/optimizers/test_adam.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
# This code is part of a Qiskit project.
#
# (C) Copyright IBM 2024.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.

"""Tests for the ADAM optimizer."""

from test import QiskitAlgorithmsTestCase

from ddt import ddt, data
import numpy as np

from qiskit_algorithms.optimizers import ADAM, Optimizer
from qiskit_algorithms.utils import algorithm_globals


@ddt
class TestADAM(QiskitAlgorithmsTestCase):
"""Tests for the ADAM optimizer."""

def setUp(self):
super().setUp()
algorithm_globals.random_seed = 52
# Feature vector
self.x = np.array([1, 2, 3, 4])
# Target value
self.y = 5

def objective(self, w):
"""
Objective function to minimize mean squared error.
Parameters:
w : numpy array
The weights (including bias) for the linear model.
Returns:
float
The mean squared error.
"""
# Extract weights and bias from the parameter vector
new_shape = (5, int(len(w) / 5))
w = np.reshape(w, new_shape)

weights = w[:-1, :]
bias = w[-1, :]
# Calculate the predicted values
y_pred = np.dot(self.x, weights) + bias
# Calculate the mean squared error
mse = np.mean((self.y - y_pred) ** 2)
return mse

def run_optimizer(self, optimizer: Optimizer, weights: np.ndarray, max_nfev: int):
"""Test the optimizer.
Args:
optimizer: The optimizer instance to test.
weights: The weights to optimize.
max_nfev: The maximal allowed number of function evaluations.
"""

# Minimize
res = optimizer.minimize(self.objective, np.array(weights), None)
error = res.fun
nfev = res.nfev

self.assertAlmostEqual(error, 0, places=3)
self.assertLessEqual(nfev, max_nfev)

@data(1, 5)
def test_adam_max_evals(self, max_evals_grouped):
"""adam test"""
# Initialize weights (including bias)
w = np.zeros(len(self.x) + 1)
# Initialize optimizer
optimizer = ADAM(maxiter=10000, tol=1e-06)
# Test one evaluation at a time
optimizer.set_max_evals_grouped(max_evals_grouped)
self.run_optimizer(optimizer, w, max_nfev=10000)

0 comments on commit 64c1d9e

Please sign in to comment.