Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Demark plan vs stub, move orphaned plans into dodal #793

Merged
merged 9 commits into from
Nov 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ dependencies = [
"aiohttp",
"redis",
"deepdiff",
"scanspec>=0.7.3",
]

dynamic = ["version"]
Expand All @@ -47,6 +48,7 @@ dev = [
# Commented out due to dependency version conflict with pydantic 1.x
# "copier",
"myst-parser",
"ophyd_async[sim]",
"pipdeptree",
"pre-commit",
"psutil",
Expand Down
Empty file.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from bluesky import preprocessors as bpp
from bluesky.utils import MsgGenerator, make_decorator

from dodal.common.beamlines import beamline_utils
from dodal.common.beamlines.beamline_utils import get_path_provider
from dodal.common.types import UpdatingPathProvider

DATA_SESSION = "data_session"
Expand Down Expand Up @@ -31,7 +31,7 @@ def attach_data_session_metadata_wrapper(
Iterator[Msg]: Plan messages
"""
if provider is None:
provider = beamline_utils.get_path_provider()
provider = get_path_provider()
yield from bps.wait_for([provider.update])
ress = yield from bps.wait_for([provider.data_session])
data_session = ress[0].result()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def __init__(
super().__init__(*args)


def _check_and_cache_values(
def check_and_cache_values(
devices_and_positions: dict[MovableReadableDevice, float],
smallest_move: float,
maximum_move: float,
Expand Down Expand Up @@ -89,7 +89,7 @@ def move_and_reset_wrapper(
on. If false it is left up to the caller to wait on
them. Defaults to True.
"""
initial_positions = yield from _check_and_cache_values(
initial_positions = yield from check_and_cache_values(
device_and_positions, smallest_move, maximum_move
)

Expand Down
150 changes: 150 additions & 0 deletions src/dodal/plan_stubs/wrapped.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
import itertools
from collections.abc import Mapping
from typing import Annotated, Any

import bluesky.plan_stubs as bps
from bluesky.protocols import Movable
from bluesky.utils import MsgGenerator

"""
Wrappers for Bluesky built-in plan stubs with type hinting
"""

Group = Annotated[str, "String identifier used by 'wait' or stubs that await"]


# After bluesky 1.14, bounds for stubs that move can be narrowed
# https://github.com/bluesky/bluesky/issues/1821
DiamondJoseph marked this conversation as resolved.
Show resolved Hide resolved
def set_absolute(
movable: Movable, value: Any, group: Group | None = None, wait: bool = False
) -> MsgGenerator:
"""
Set a device, wrapper for `bp.abs_set`.

Args:
movable (Movable): The device to set
value (T): The new value
group (Group | None, optional): The message group to associate with the
setting, for sequencing. Defaults to None.
wait (bool, optional): The group should wait until all setting is complete
(e.g. a motor has finished moving). Defaults to False.

Returns:
MsgGenerator: Plan

Yields:
Iterator[MsgGenerator]: Bluesky messages
"""
return (yield from bps.abs_set(movable, value, group=group, wait=wait))


def set_relative(
movable: Movable, value: Any, group: Group | None = None, wait: bool = False
) -> MsgGenerator:
"""
Change a device, wrapper for `bp.rel_set`.

Args:
movable (Movable): The device to set
value (T): The new value
group (Group | None, optional): The message group to associate with the
setting, for sequencing. Defaults to None.
wait (bool, optional): The group should wait until all setting is complete
(e.g. a motor has finished moving). Defaults to False.

Returns:
MsgGenerator: Plan

Yields:
Iterator[MsgGenerator]: Bluesky messages
"""

return (yield from bps.rel_set(movable, value, group=group, wait=wait))


def move(moves: Mapping[Movable, Any], group: Group | None = None) -> MsgGenerator:
"""
Move a device, wrapper for `bp.mv`.

Args:
moves (Mapping[Movable, T]): Mapping of Movables to target positions
group (Group | None, optional): The message group to associate with the
setting, for sequencing. Defaults to None.

Returns:
MsgGenerator: Plan

Yields:
Iterator[MsgGenerator]: Bluesky messages
"""

return (
# type ignore until https://github.com/bluesky/bluesky/issues/1809
yield from bps.mv(*itertools.chain.from_iterable(moves.items()), group=group) # type: ignore
)


def move_relative(
moves: Mapping[Movable, Any], group: Group | None = None
) -> MsgGenerator:
"""
Move a device relative to its current position, wrapper for `bp.mvr`.

Args:
moves (Mapping[Movable, T]): Mapping of Movables to target deltas
group (Group | None, optional): The message group to associate with the
setting, for sequencing. Defaults to None.

Returns:
MsgGenerator: Plan

Yields:
Iterator[MsgGenerator]: Bluesky messages
"""

return (
# type ignore until https://github.com/bluesky/bluesky/issues/1809
yield from bps.mvr(*itertools.chain.from_iterable(moves.items()), group=group) # type: ignore
)


def sleep(time: float) -> MsgGenerator:
"""
Suspend all action for a given time, wrapper for `bp.sleep`

Args:
time (float): Time to wait in seconds

Returns:
MsgGenerator: Plan

Yields:
Iterator[MsgGenerator]: Bluesky messages
"""

return (yield from bps.sleep(time))


def wait(
group: Group | None = None,
timeout: float | None = None,
) -> MsgGenerator:
"""
Wait for a group status to complete, wrapper for `bp.wait`.
Does not expose move_on, as when used as a stub will not fail on Timeout.

Args:
group (Group | None, optional): The name of the group to wait for, defaults
to None, in which case waits for all
groups that have not yet been awaited.
timeout (float | None, default=None): a timeout in seconds


Returns:
MsgGenerator: Plan

Yields:
Iterator[MsgGenerator]: Bluesky messages
"""

return (yield from bps.wait(group, timeout=timeout))
4 changes: 4 additions & 0 deletions src/dodal/plans/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .scanspec import spec_scan
from .wrapped import count

__all__ = ["count", "spec_scan"]
66 changes: 66 additions & 0 deletions src/dodal/plans/scanspec.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import operator
from functools import reduce
from typing import Annotated, Any

import bluesky.plans as bp
from bluesky.protocols import Movable, Readable
from cycler import Cycler, cycler
from pydantic import Field, validate_call
from scanspec.specs import Spec

from dodal.common import MsgGenerator
from dodal.plan_stubs.data_session import attach_data_session_metadata_decorator


@attach_data_session_metadata_decorator()
@validate_call(config={"arbitrary_types_allowed": True})
def spec_scan(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

FYI: I added DiamondLightSource/mx-bluesky#596 as I think we have overlap here

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This plan has now moved repositories so many times that the issue has now been lost to time, but it does need improvement. The axes_to_move parameter was a temporary fix and we should try to get rid of it.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

@DiamondJoseph DiamondJoseph Oct 25, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we no longer need axes_to_move, I just need to find time to start up blueapi with these plans and ensure we can run a spec just passing the name of the device and no axes_to_move

Copy link
Contributor Author

@DiamondJoseph DiamondJoseph Oct 25, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In theory, all plans that use a ScanSpec should be constructing their specific spec and passing it to this (for software scans) or an ophyd-async plan to-be-produced for hardware.

detectors: Annotated[
set[Readable],
Field(
description="Set of readable devices, will take a reading at each point, \
in addition to any Movables in the Spec",
),
],
spec: Annotated[
Spec[Movable],
Field(description="ScanSpec modelling the path of the scan"),
],
metadata: dict[str, Any] | None = None,
) -> MsgGenerator:
"""Generic plan for reading `detectors` at every point of a ScanSpec `Spec`.
A `Spec` is an N-dimensional path.
"""
# TODO: https://github.com/bluesky/scanspec/issues/154
# support Static.duration: Spec[Literal["DURATION"]]

_md = {
"plan_args": {
"detectors": {det.name for det in detectors},
"spec": repr(spec),
},
"plan_name": "spec_scan",
"shape": spec.shape(),
**(metadata or {}),
}

yield from bp.scan_nd(tuple(detectors), _as_cycler(spec), md=_md)


def _as_cycler(spec: Spec[Movable]) -> Cycler:
"""
Convert a scanspec to a cycler for compatibility with legacy Bluesky plans such as
`bp.scan_nd`. Use the midpoints of the scanspec since cyclers are normally used
for software triggered scans.

Args:
spec: A scanspec

Returns:
Cycler: A new cycler
"""

midpoints = spec.frames().midpoints
# Need to "add" the cyclers for all the axes together. The code below is
# effectively: cycler(motor1, [...]) + cycler(motor2, [...]) + ...
return reduce(operator.add, (cycler(*args) for args in midpoints.items()))
57 changes: 57 additions & 0 deletions src/dodal/plans/wrapped.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
from collections.abc import Sequence
from typing import Annotated, Any

import bluesky.plans as bp
from bluesky.protocols import Readable
from pydantic import Field, NonNegativeFloat, validate_call

from dodal.common import MsgGenerator
from dodal.plan_stubs.data_session import attach_data_session_metadata_decorator

"""This module wraps plan(s) from bluesky.plans until required handling for them is
moved into bluesky or better handled in downstream services.

Required decorators are installed on plan import
https://github.com/DiamondLightSource/blueapi/issues/474

Non-serialisable fields are ignored when they are optional
https://github.com/DiamondLightSource/blueapi/issues/711

We may also need other adjustments for UI purposes, e.g.
Forcing uniqueness or orderedness of Readables
Limits and metadata (e.g. units)
"""


@attach_data_session_metadata_decorator()
@validate_call(config={"arbitrary_types_allowed": True})
def count(
detectors: Annotated[
set[Readable],
Field(
description="Set of readable devices, will take a reading at each point",
min_length=1,
),
],
num: Annotated[int, Field(description="Number of frames to collect", ge=1)] = 1,
delay: Annotated[
NonNegativeFloat | Sequence[NonNegativeFloat],
Field(
description="Delay between readings: if tuple, len(delay) == num - 1 and \
the delays are between each point, if value or None is the delay for every \
gap",
json_schema_extra={"units": "s"},
),
] = 0.0,
metadata: dict[str, Any] | None = None,
) -> MsgGenerator:
"""Reads from a number of devices.
Wraps bluesky.plans.count(det, num, delay, md=metadata) exposing only serializable
parameters and metadata."""
Comment on lines +48 to +50
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: I assume we've discussed with bluesky to try and get this into the core codebase and couldn't? May be worth a link to that discussion in case anyone asks the question again

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It was a series of mostly offline discussions and should probably be written up as an ADR

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does someone involved in the discussion have time to write an issue with some back of the envelope bullet point reasons and saying to put them in an ADR, just before it's lost to time?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I believe we need DiamondLightSource/blueapi#474 for the @attach_metadata to be handled in blueapi consistently, then that decorator gets moved back into blueapi and then blueapi can just use bluesky.plans as a plans module (or... the ones we actually want).

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In which case can we have a comment here saying we can remove this when that has been done?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think actually we'll need to maintain them for at least a while afterwards- bp.count has a an argument of type PerShot which is not serialisable. We may be able to ignore all fields that are optional and of a non-serialisable type? Especially where they are Callables. I'll add something to that effect.

PerShot = Callable[[Sequence[Readable], Optional[bps.TakeReading]], MsgGenerator]

https://github.com/bluesky/bluesky/blob/92d2633127e33e51aa1d1b109c80503e74bbe0e0/src/bluesky/plans.py#L66

if isinstance(delay, Sequence):
assert (
len(delay) == num - 1
), f"Number of delays given must be {num - 1}: was given {len(delay)}"
metadata = metadata or {}
metadata["shape"] = (num,)
yield from bp.count(tuple(detectors), num, delay=delay, md=metadata)
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@
from ophyd_async.epics.motor import Motor

from dodal.devices.util.test_utils import patch_motor
from dodal.plans.motor_util_plans import (
from dodal.plan_stubs.motor_utils import (
MoveTooLarge,
_check_and_cache_values,
check_and_cache_values,
home_and_reset_wrapper,
)

Expand Down Expand Up @@ -59,7 +59,7 @@ def my_device(RE):
"device_type",
[DeviceWithOnlyMotors, DeviceWithNoMotors, DeviceWithSomeMotors],
)
@patch("dodal.plans.motor_util_plans.move_and_reset_wrapper")
@patch("dodal.plan_stubs.motor_utils.move_and_reset_wrapper")
def test_given_types_of_device_when_home_and_reset_wrapper_called_then_motors_and_zeros_passed_to_move_and_reset_wrapper(
patch_move_and_reset, device_type, RE
):
Expand All @@ -80,7 +80,7 @@ def test_given_a_device_when_check_and_cache_values_then_motor_values_returned(
set_mock_value(motor.user_readback, i * 100)

motors_and_positions: dict[Motor, float] = RE(
_check_and_cache_values(
check_and_cache_values(
{motor_obj: 0.0 for motor_obj in my_device.motors}, 0, 1000
)
).plan_result # type: ignore
Expand Down Expand Up @@ -109,7 +109,7 @@ def test_given_a_device_with_a_too_large_move_when_check_and_cache_values_then_e
motors_and_positions = {motor_obj: new_position for motor_obj in my_device.motors}

with pytest.raises(MoveTooLarge) as e:
RE(_check_and_cache_values(motors_and_positions, 0, max))
RE(check_and_cache_values(motors_and_positions, 0, max))
assert e.value.axis == my_device.y
assert e.value.maximum_move == max

Expand All @@ -136,7 +136,7 @@ def test_given_a_device_where_one_move_too_small_when_check_and_cache_values_the
}

motors_and_positions: dict[Motor, float] = RE(
_check_and_cache_values(motors_and_new_positions, min, 1000)
check_and_cache_values(motors_and_new_positions, min, 1000)
).plan_result # type: ignore
cached_positions = motors_and_positions.values()

Expand All @@ -156,7 +156,7 @@ def test_given_a_device_where_all_moves_too_small_when_check_and_cache_values_th
motors_and_new_positions = {motor_obj: 0.0 for motor_obj in my_device.motors}

motors_and_positions: dict[Motor, float] = RE(
_check_and_cache_values(motors_and_new_positions, 40, 1000)
check_and_cache_values(motors_and_new_positions, 40, 1000)
).plan_result # type: ignore
cached_positions = motors_and_positions.values()

Expand Down
Loading
Loading