Skip to content

Commit

Permalink
Extend the device test suite: gradient, op-arithmetic and template te…
Browse files Browse the repository at this point in the history
…sts (#5273)

I ran the plugin test suite against this branch so I could see which
tests fail right away on which devices (done, see
[here](https://github.com/PennyLaneAI/plugin-test-matrix/actions?query=workflow:*-timmy-latest),
braket and quantuminspire are failing for their own reasons), and skip
them for those devices. Everything is now passing as expected.

**Context:**
The device test suite doesn't cover a lot of PennyLane's features, so
I'm adding more coverage.

**Description of the Change:**
- modified CI to install interfaces if needed (basically always except
for `default.qubit.autograd`)
- updated `test_measurements.py` to test some basic arithmetic ops by
comparing the result to `default.qubit`
- Added 5 basic differentiation tests for each interface:
`test_basic_grad`, `test_backprop_state`, `test_parameter_shift`,
`test_probs`, `test_multi_meas` `test_hessian`. They do what they sound
like they do 😄
- Added a test for every single template in PennyLane 🐳 I just grabbed
the example from each template's docstring, got the result with
default.qubit, and I'm asserting that every other device also gets that
result. If the docstring didn't have one (or it just returned 1 when
there are better examples), I tried to get one from the tests for that
template. I'd also try to swap `qml.state()` with `qml.probs()` whenever
possible so the test would work with finite-shot devices.

**Benefits:**
More confidence in our plugin devices!

**Possible Drawbacks:**
- Slower device tests
- New failures to manage?
- Some tests needed explicit skips from certain devices. I opened a
story to track those explicit skips being done in the device test suite
upgrade epic

[sc-57488]

---------

Co-authored-by: Christina Lee <christina@xanadu.ai>
Co-authored-by: Mudit Pandey <mudit.pandey@xanadu.ai>
  • Loading branch information
3 people authored Apr 12, 2024
1 parent 3beb4be commit f7834de
Show file tree
Hide file tree
Showing 10 changed files with 1,919 additions and 7 deletions.
7 changes: 4 additions & 3 deletions .github/workflows/interface-unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -510,6 +510,7 @@ jobs:
# shots: None
- device: default.qubit.autograd
shots: None
skip_interface: jax,tf,torch
- device: default.mixed
shots: None
python-version: >-
Expand All @@ -525,9 +526,9 @@ jobs:
coverage_artifact_name: devices-coverage-${{ matrix.config.device }}-${{ matrix.config.shots }}
python_version: ${{ matrix.python-version }}
pipeline_mode: ${{ inputs.pipeline_mode }}
install_jax: ${{ contains(matrix.config.device, 'jax') }}
install_tensorflow: ${{ contains(matrix.config.device, 'tf') }}
install_pytorch: ${{ contains(matrix.config.device, 'torch') }}
install_jax: ${{ !contains(matrix.config.skip_interface, 'jax') }}
install_tensorflow: ${{ !contains(matrix.config.skip_interface, 'tf') }}
install_pytorch: ${{ !contains(matrix.config.skip_interface, 'torch') }}
install_pennylane_lightning_master: false
pytest_test_directory: pennylane/devices/tests
pytest_coverage_flags: ${{ inputs.pytest_coverage_flags }}
Expand Down
3 changes: 3 additions & 0 deletions doc/releases/changelog-dev.md
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,9 @@
[(#5256)](https://github.com/PennyLaneAI/pennylane/pull/5256)
[(#5395)](https://github.com/PennyLaneAI/pennylane/pull/5395)

* Extend the device test suite to cover gradient methods, templates and arithmetic observables.
[(#5273)](https://github.com/PennyLaneAI/pennylane/pull/5273)

* Add type hints for unimplemented methods of the abstract class `Operator`.
[(#5490)](https://github.com/PennyLaneAI/pennylane/pull/5490)

Expand Down
2 changes: 1 addition & 1 deletion pennylane/_qubit_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def _const_mul(constant, array):
"Identity",
"Projector",
"Sum",
"Sprod",
"SProd",
"Prod",
}

Expand Down
16 changes: 14 additions & 2 deletions pennylane/devices/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,20 @@ def _skip_if(dev, capabilities):
return _skip_if


@pytest.fixture(scope="function")
def device(device_kwargs):
@pytest.fixture
def validate_diff_method(device, diff_method, device_kwargs):
"""Skip tests if a device does not support a diff_method"""
if diff_method == "backprop" and device_kwargs.get("shots") is not None:
pytest.skip(reason="test should only be run in analytic mode")
dev = device(1)
if isinstance(dev, qml.Device):
passthru_devices = dev.capabilities().get("passthru_devices")
if diff_method == "backprop" and passthru_devices is None:
pytest.skip(reason="device does not support backprop")


@pytest.fixture(scope="function", name="device")
def fixture_device(device_kwargs):
"""Fixture to create a device."""

# internally used by pytest
Expand Down
201 changes: 201 additions & 0 deletions pennylane/devices/tests/test_gradients_autograd.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
# Copyright 2024 Xanadu Quantum Technologies Inc.

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at

# http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests trainable circuits using the Autograd interface."""
# pylint:disable=no-self-use
import pytest

import numpy as np

import pennylane as qml
from pennylane import numpy as pnp


@pytest.mark.usefixtures("validate_diff_method")
@pytest.mark.parametrize("diff_method", ["backprop", "parameter-shift", "hadamard"])
class TestGradients:
"""Test various gradient computations."""

def test_basic_grad(self, diff_method, device, tol):
"""Test a basic function with one RX and one expectation."""
wires = 2 if diff_method == "hadamard" else 1
dev = device(wires=wires)
tol = tol(dev.shots)
if diff_method == "hadamard":
tol += 0.01

@qml.qnode(dev, diff_method=diff_method)
def circuit(x):
qml.RX(x, 0)
return qml.expval(qml.Z(0))

x = pnp.array(0.5)
res = qml.grad(circuit)(x)
assert np.isclose(res, -pnp.sin(x), atol=tol, rtol=0)

def test_backprop_state(self, diff_method, device, tol):
"""Test the trainability of parameters in a circuit returning the state."""
if diff_method != "backprop":
pytest.skip(reason="test only works with backprop")
dev = device(2)
if dev.shots:
pytest.skip("test uses backprop, must be in analytic mode")
if "mixed" in dev.name:
pytest.skip("mixed-state simulator will wrongly use grad on non-scalar results")
tol = tol(dev.shots)

x = pnp.array(0.543)
y = pnp.array(-0.654)

@qml.qnode(dev, diff_method=diff_method, grad_on_execution=True)
def circuit(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.CNOT(wires=[0, 1])
return qml.state()

def cost_fn(x, y):
res = circuit(x, y)
probs = pnp.abs(res) ** 2
return probs[0] + probs[2]

res = qml.grad(cost_fn)(x, y)
expected = np.array([-np.sin(x) * np.cos(y) / 2, -np.cos(x) * np.sin(y) / 2])
assert np.allclose(res, expected, atol=tol, rtol=0)

y = pnp.array(-0.654, requires_grad=False)
res = qml.grad(cost_fn)(x, y)
assert np.allclose(res, expected[0], atol=tol, rtol=0)

def test_parameter_shift(self, diff_method, device, tol):
"""Test a multi-parameter circuit with parameter-shift."""
if diff_method != "parameter-shift":
pytest.skip(reason="test only works with parameter-shift")

a = pnp.array(0.1)
b = pnp.array(0.2)

dev = device(2)
tol = tol(dev.shots)

@qml.qnode(dev, diff_method="parameter-shift", grad_on_execution=False)
def circuit(a, b):
qml.RY(a, wires=0)
qml.RX(b, wires=1)
qml.CNOT(wires=[0, 1])
return qml.expval(qml.Hamiltonian([1, 1], [qml.Z(0), qml.Y(1)]))

res = qml.grad(circuit)(a, b)
expected = [-np.sin(a) + np.sin(a) * np.sin(b), -np.cos(a) * np.cos(b)]
assert np.allclose(res, expected, atol=tol, rtol=0)

# make the second QNode argument a constant
b = pnp.array(0.2, requires_grad=False)
res = qml.grad(circuit)(a, b)
assert np.allclose(res, expected[0], atol=tol, rtol=0)

def test_probs(self, diff_method, device, tol):
"""Test differentiation of a circuit returning probs()."""
wires = 3 if diff_method == "hadamard" else 2
dev = device(wires=wires)
tol = tol(dev.shots)
x = pnp.array(0.543)
y = pnp.array(-0.654)

@qml.qnode(dev, diff_method=diff_method)
def circuit(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.CNOT(wires=[0, 1])
return qml.probs(wires=[1])

res = qml.jacobian(circuit)(x, y)

expected = np.array(
[
[-np.sin(x) * np.cos(y) / 2, -np.cos(x) * np.sin(y) / 2],
[np.cos(y) * np.sin(x) / 2, np.cos(x) * np.sin(y) / 2],
]
)

assert isinstance(res, tuple)
assert len(res) == 2

assert isinstance(res[0], pnp.ndarray)
assert res[0].shape == (2,)

assert isinstance(res[1], pnp.ndarray)
assert res[1].shape == (2,)

if diff_method == "hadamard" and "raket" in dev.name:
pytest.xfail(reason="braket gets wrong results for hadamard here")
assert np.allclose(res[0], expected.T[0], atol=tol, rtol=0)
assert np.allclose(res[1], expected.T[1], atol=tol, rtol=0)

def test_multi_meas(self, diff_method, device, tol):
"""Test differentiation of a circuit with both scalar and array-like returns."""
wires = 3 if diff_method == "hadamard" else 2
dev = device(wires=wires)
tol = tol(dev.shots)
x = pnp.array(0.543)
y = pnp.array(-0.654, requires_grad=False)

@qml.qnode(dev, diff_method=diff_method)
def circuit(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.Z(0)), qml.probs(wires=[1])

def cost_fn(x, y):
return pnp.hstack(circuit(x, y))

jac = qml.jacobian(cost_fn)(x, y)

expected = [-np.sin(x), -np.sin(x) * np.cos(y) / 2, np.cos(y) * np.sin(x) / 2]
assert isinstance(jac, pnp.ndarray)
assert np.allclose(jac, expected, atol=tol, rtol=0)

def test_hessian(self, diff_method, device, tol):
"""Test hessian computation."""
wires = 3 if diff_method == "hadamard" else 1
dev = device(wires=wires)
tol = tol(dev.shots)

@qml.qnode(dev, diff_method=diff_method, max_diff=2)
def circuit(x):
qml.RY(x[0], wires=0)
qml.RX(x[1], wires=0)
return qml.expval(qml.Z(0))

x = pnp.array([1.0, 2.0])
res = circuit(x)

a, b = x

expected_res = np.cos(a) * np.cos(b)
assert np.allclose(res, expected_res, atol=tol, rtol=0)

grad_fn = qml.grad(circuit)
g = grad_fn(x)

expected_g = [-np.sin(a) * np.cos(b), -np.cos(a) * np.sin(b)]
assert np.allclose(g, expected_g, atol=tol, rtol=0)

hess = qml.jacobian(grad_fn)(x)

expected_hess = [
[-np.cos(a) * np.cos(b), np.sin(a) * np.sin(b)],
[np.sin(a) * np.sin(b), -np.cos(a) * np.cos(b)],
]
assert np.allclose(hess, expected_hess, atol=tol, rtol=0)
Loading

0 comments on commit f7834de

Please sign in to comment.