Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Allow for disabling backend when running tests via pytest #2340

Merged
merged 5 commits into from
Oct 9, 2023
Merged
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 32 additions & 10 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,17 @@
import pyhf


def pytest_addoption(parser):
parser.addoption(
"--disable-backend",
action="append",
type=str,
default=[],
choices=["tensorflow", "pytorch", "jax", "minuit"],
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
choices=["tensorflow", "pytorch", "jax", "minuit"],
choices=["numpy", "tensorflow", "pytorch", "jax", "minuit"],

Is there a reason to not include numpy?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Numpy is a core dependency, not an optional one.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, but I would think that if I have the ability to disable backends during testing that I should be able to disable any of them, not just optional ones.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The point of the issue was to only disable optional ones.

That was the original case yes, but is there any technical reason to not want to also be able to skip numpy?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't want to delay this anymore so I'll merge this in and open an Issue.

help="list of backends to disable in tests",
)


# Factory as fixture pattern
@pytest.fixture
def get_json_from_tarfile():
Expand Down Expand Up @@ -59,14 +70,14 @@ def reset_backend():
@pytest.fixture(
scope='function',
params=[
(pyhf.tensor.numpy_backend(), None),
(pyhf.tensor.pytorch_backend(), None),
(pyhf.tensor.pytorch_backend(precision='64b'), None),
(pyhf.tensor.tensorflow_backend(), None),
(pyhf.tensor.jax_backend(), None),
(("numpy_backend", dict()), ("scipy_optimizer", dict())),
(("pytorch_backend", dict()), ("scipy_optimizer", dict())),
(("pytorch_backend", dict(precision="64b")), ("scipy_optimizer", dict())),
(("tensorflow_backend", dict()), ("scipy_optimizer", dict())),
(("jax_backend", dict()), ("scipy_optimizer", dict())),
(
pyhf.tensor.numpy_backend(poisson_from_normal=True),
pyhf.optimize.minuit_optimizer(),
("numpy_backend", dict(poisson_from_normal=True)),
("minuit_optimizer", dict()),
),
],
ids=['numpy', 'pytorch', 'pytorch64', 'tensorflow', 'jax', 'numpy_minuit'],
Expand All @@ -87,13 +98,20 @@ def backend(request):
only_backends = [
pid for pid in param_ids if request.node.get_closest_marker(f'only_{pid}')
]
disable_backend = any(
backend in param_id for backend in request.config.option.disable_backend
)

if skip_backend and (param_id in only_backends):
raise ValueError(
f"Must specify skip_{param_id} or only_{param_id} but not both!"
)

if skip_backend:
if disable_backend:
pytest.skip(
f"skipping {func_name} as the backend is disabled via --disable-backend"
kratsg marked this conversation as resolved.
Show resolved Hide resolved
)
elif skip_backend:
pytest.skip(f"skipping {func_name} as specified")
elif only_backends and param_id not in only_backends:
pytest.skip(
Expand All @@ -109,10 +127,14 @@ def backend(request):
pytest.mark.xfail(reason=f"expect {func_name} to fail as specified")
)

tensor_config, optimizer_config = request.param

tensor = getattr(pyhf.tensor, tensor_config[0])(**tensor_config[1])
optimizer = getattr(pyhf.optimize, optimizer_config[0])(**optimizer_config[1])
# actual execution here, after all checks is done
pyhf.set_backend(*request.param)
pyhf.set_backend(tensor, optimizer)

yield request.param
yield (tensor, optimizer)


@pytest.fixture(
Expand Down