From d493f8c5b4dd4e28bb127de7f1a47f66d18db88b Mon Sep 17 00:00:00 2001 From: dwierichs Date: Sat, 4 May 2024 12:37:45 +0200 Subject: [PATCH] black --- pennylane/gradients/gradient_transform.py | 4 ++- pennylane/gradients/parameter_shift.py | 6 ++-- .../parameter_shift/test_parameter_shift.py | 29 ++++++++++++++----- 3 files changed, 28 insertions(+), 11 deletions(-) diff --git a/pennylane/gradients/gradient_transform.py b/pennylane/gradients/gradient_transform.py index e0b13106fbd..93192febcbc 100644 --- a/pennylane/gradients/gradient_transform.py +++ b/pennylane/gradients/gradient_transform.py @@ -285,7 +285,9 @@ def _swap_first_two_axes(grads, first_axis_size, second_axis_size, squeeze=True) ) -def _move_first_axis_to_third_pos(grads, first_axis_size, second_axis_size, third_axis_size, squeeze=True): +def _move_first_axis_to_third_pos( + grads, first_axis_size, second_axis_size, third_axis_size, squeeze=True +): """Transpose the first three axes of an iterable of iterables like a tuple of tuples the same way as np.transpose(..., [1, 2, 0]) would do.""" if first_axis_size == 1 and squeeze: diff --git a/pennylane/gradients/parameter_shift.py b/pennylane/gradients/parameter_shift.py index 79b914daaec..eebd7273738 100644 --- a/pennylane/gradients/parameter_shift.py +++ b/pennylane/gradients/parameter_shift.py @@ -192,7 +192,7 @@ def _evaluate_gradient(tape_specs, res, data, r0, batch_size): res = fn(res) *_, num_measurements, shots = tape_specs - scalar_shots, len_shot_vec = not shots.has_partitioned_shots, shots.num_copies + scalar_shots, len_shot_vec = not shots.has_partitioned_shots, shots.num_copies if r0 is None and not scalar_shots: r0 = [None] * int(len_shot_vec) @@ -220,7 +220,9 @@ def _evaluate_gradient(tape_specs, res, data, r0, batch_size): # or with broadcasting (shots, measurements, parameters) if batch_size is None: # Move first axis (parameters) to last position - res = _move_first_axis_to_third_pos(res, len(res), len_shot_vec, num_measurements, squeeze=False) + res = _move_first_axis_to_third_pos( + res, len(res), len_shot_vec, num_measurements, squeeze=False + ) # _multi_meas_grad expects (measurements, parameters), so we iterate over shot vector return tuple( _multi_meas_grad(r, coeffs, r0_, unshifted_coeff, num_measurements) diff --git a/tests/gradients/parameter_shift/test_parameter_shift.py b/tests/gradients/parameter_shift/test_parameter_shift.py index 8cffa07fffd..b58190cbc8e 100644 --- a/tests/gradients/parameter_shift/test_parameter_shift.py +++ b/tests/gradients/parameter_shift/test_parameter_shift.py @@ -27,10 +27,13 @@ from pennylane.operation import AnyWires, Observable from pennylane.measurements.shots import Shots + class TestEvaluateGradient: """Test _evaluate_gradient.""" - @pytest.mark.parametrize("coeffs, unshifted_coeff", [(np.arange(1, 5), None), (np.arange(1, 4), 4), (np.ones(0), 10)]) + @pytest.mark.parametrize( + "coeffs, unshifted_coeff", [(np.arange(1, 5), None), (np.arange(1, 4), 4), (np.ones(0), 10)] + ) @pytest.mark.parametrize("batch_size", [None, 4]) def test_single_shots_single_meas(self, coeffs, unshifted_coeff, batch_size): """Test that a single shots, single measurement gradient is evaluated correctly.""" @@ -52,8 +55,7 @@ def test_single_shots_single_meas(self, coeffs, unshifted_coeff, batch_size): assert isinstance(grad, np.ndarray) assert grad.shape == () - assert np.isclose(grad, np.sum(-np.arange(1, 5)**2)) - + assert np.isclose(grad, np.sum(-np.arange(1, 5) ** 2)) # pylint: disable=too-few-public-methods @@ -608,14 +610,19 @@ def test_recycled_unshifted_tape(self, ops_with_custom_recipe, broadcast): [[-1e7, 1, 0], [1e7, 1, 1e-7]] if i in ops_with_custom_recipe else None for i in range(2) ) - tapes, fn = qml.gradients.param_shift(tape, gradient_recipes=gradient_recipes, broadcast=broadcast) + tapes, fn = qml.gradients.param_shift( + tape, gradient_recipes=gradient_recipes, broadcast=broadcast + ) # two tapes per parameter that doesn't use a custom recipe, # one tape per parameter that uses custom recipe, # plus one global call if at least one uses the custom recipe num_ops_standard_recipe = tape.num_params - len(ops_with_custom_recipe) tapes_per_param = 1 if broadcast else 2 - assert len(tapes) == tapes_per_param * num_ops_standard_recipe + len(ops_with_custom_recipe) + 1 + assert ( + len(tapes) + == tapes_per_param * num_ops_standard_recipe + len(ops_with_custom_recipe) + 1 + ) # Test that executing the tapes and the postprocessing function works grad = fn(qml.execute(tapes, dev, None)) assert qml.math.allclose(grad, -np.sin(x[0] + x[1]), atol=1e-5) @@ -641,7 +648,9 @@ def test_custom_recipe_unshifted_only(self, ops_with_custom_recipe, multi_measur gradient_recipes = tuple( [[-1e7, 1, 0], [1e7, 1, 0]] if i in ops_with_custom_recipe else None for i in range(2) ) - tapes, fn = qml.gradients.param_shift(tape, gradient_recipes=gradient_recipes, broadcast=broadcast) + tapes, fn = qml.gradients.param_shift( + tape, gradient_recipes=gradient_recipes, broadcast=broadcast + ) # two tapes per parameter that doesn't use a custom recipe, # plus one global (unshifted) call if at least one uses the custom recipe @@ -687,12 +696,16 @@ def test_custom_recipe_mixing_unshifted_shifted(self, ops_with_custom_recipe, br ) for i in range(2) ) - tapes, fn = qml.gradients.param_shift(tape, gradient_recipes=gradient_recipes, broadcast=broadcast) + tapes, fn = qml.gradients.param_shift( + tape, gradient_recipes=gradient_recipes, broadcast=broadcast + ) # two tapes per parameter, independent of recipe # plus one global (unshifted) call if at least one uses the custom recipe tapes_per_param = 1 if broadcast else 2 - assert len(tapes) == tapes_per_param * tape.num_params + int(len(ops_with_custom_recipe) > 0) + assert len(tapes) == tapes_per_param * tape.num_params + int( + len(ops_with_custom_recipe) > 0 + ) # Test that executing the tapes and the postprocessing function works grad = fn(qml.execute(tapes, dev, None)) assert qml.math.allclose(grad[0], -np.sin(x[0] + x[1]), atol=1e-5)