Skip to content

Commit

Permalink
WIP: pushing so I dont lose progress when power cycling
Browse files Browse the repository at this point in the history
  • Loading branch information
evalott100 committed Aug 15, 2024
1 parent b60a8d2 commit 8bbdcdc
Show file tree
Hide file tree
Showing 14 changed files with 459 additions and 242 deletions.
10 changes: 9 additions & 1 deletion src/ophyd_async/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,13 @@
soft_signal_rw,
wait_for_value,
)
from ._signal_backend import RuntimeSubsetEnum, SignalBackend, SubsetEnum
from ._signal_backend import (
BackendConverterFactory,
ProtocolDatatypeAbstraction,
RuntimeSubsetEnum,
SignalBackend,
SubsetEnum,
)
from ._soft_signal_backend import SignalMetadata, SoftSignalBackend
from ._status import AsyncStatus, WatchableAsyncStatus
from ._utils import (
Expand Down Expand Up @@ -103,6 +109,7 @@
"MockSignalBackend",
"callback_on_mock_put",
"get_mock_put",
"BackendConverterFactory",
"mock_puts_blocked",
"reset_mock_put_calls",
"set_mock_put_proceeds",
Expand All @@ -117,6 +124,7 @@
"NameProvider",
"PathInfo",
"PathProvider",
"ProtocolDatatypeAbstraction",
"ShapeProvider",
"StaticFilenameProvider",
"StaticPathProvider",
Expand Down
44 changes: 42 additions & 2 deletions src/ophyd_async/core/_signal_backend.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,50 @@
from abc import abstractmethod
from typing import TYPE_CHECKING, ClassVar, Generic, Literal, Optional, Tuple, Type
from abc import ABC, abstractmethod
from typing import (
TYPE_CHECKING,
ClassVar,
Generic,
Literal,
Optional,
Tuple,
Type,
)

from ._protocol import DataKey, Reading
from ._utils import DEFAULT_TIMEOUT, ReadingValueCallback, T


class ProtocolDatatypeAbstraction(ABC, Generic[T]):
@abstractmethod
def __init__(self):
"""The abstract datatype must be able to be intialized with no arguments."""

@abstractmethod
def convert_to_protocol_datatype(self) -> T:
"""
Convert the abstract datatype to a form which can be sent
over whichever protocol.
"""

@classmethod
@abstractmethod
def convert_from_protocol_datatype(cls, value: T) -> "ProtocolDatatypeAbstraction":
"""
Convert the datatype received from the protocol to a
higher level abstract datatype.
"""


class BackendConverterFactory(ABC):
"""Convert between the signal backend and the signal type"""

_ALLOWED_TYPES: ClassVar[Tuple[Type]]

@classmethod
@abstractmethod
def make_converter(self, datatype: Type):
"""Updates the object with callables `to_signal` and `from_signal`."""


class SignalBackend(Generic[T]):
"""A read/write/monitor backend for a Signals"""

Expand Down
64 changes: 49 additions & 15 deletions src/ophyd_async/core/_soft_signal_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,18 @@
import time
from collections import abc
from enum import Enum
from typing import Dict, Generic, Optional, Tuple, Type, Union, cast, get_origin
from typing import Any, Dict, Generic, Optional, Tuple, Type, Union, cast, get_origin

import numpy as np
from bluesky.protocols import DataKey, Dtype, Reading
from typing_extensions import TypedDict

from ._signal_backend import RuntimeSubsetEnum, SignalBackend
from ._signal_backend import (
BackendConverterFactory,
ProtocolDatatypeAbstraction,
RuntimeSubsetEnum,
SignalBackend,
)
from ._utils import DEFAULT_TIMEOUT, ReadingValueCallback, T, get_dtype

primitive_dtypes: Dict[type, Dtype] = {
Expand Down Expand Up @@ -94,7 +99,7 @@ def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
class SoftEnumConverter(SoftConverter):
choices: Tuple[str, ...]

def __init__(self, datatype: Union[RuntimeSubsetEnum, Enum]):
def __init__(self, datatype: Union[RuntimeSubsetEnum, Type[Enum]]):
if issubclass(datatype, Enum):
self.choices = tuple(v.value for v in datatype)
else:
Expand Down Expand Up @@ -122,19 +127,46 @@ def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
return cast(T, self.choices[0])


def make_converter(datatype):
is_array = get_dtype(datatype) is not None
is_sequence = get_origin(datatype) == abc.Sequence
is_enum = inspect.isclass(datatype) and (
issubclass(datatype, Enum) or issubclass(datatype, RuntimeSubsetEnum)
)
class SoftProtocolDatatypeAbstractionConverter(SoftConverter):
def __init__(self, datatype: Type[ProtocolDatatypeAbstraction]):
self.datatype = datatype

def reading(self, value) -> Reading:
value = self.datatype.convert_from_protocol_datatype(value)
return super().reading(value)

def value(self, value: Any) -> Any:
return self.datatype.convert_to_protocol_datatype(value)

if is_array or is_sequence:
return SoftArrayConverter()
if is_enum:
return SoftEnumConverter(datatype)
def write_value(self, value):
return value.convert_to_pva_table()

return SoftConverter()
def make_initial_value(self, datatype: Type | None) -> Any:
return super().make_initial_value(datatype)


class SoftSignalConverterFactory(BackendConverterFactory):
_ALLOWED_TYPES = (object,) # Any type is allowed

@classmethod
def make_converter(cls, datatype):
is_array = get_dtype(datatype) is not None
is_sequence = get_origin(datatype) == abc.Sequence
is_enum = inspect.isclass(datatype) and (
issubclass(datatype, Enum) or issubclass(datatype, RuntimeSubsetEnum)
)
is_convertable_abstract_datatype = inspect.isclass(datatype) and issubclass(
ProtocolDatatypeAbstraction
)

if is_array or is_sequence:
return SoftArrayConverter()
if is_enum:
return SoftEnumConverter(datatype)
if is_convertable_abstract_datatype:
return SoftProtocolDatatypeAbstractionConverter(datatype)

return SoftConverter()


class SoftSignalBackend(SignalBackend[T]):
Expand All @@ -154,7 +186,9 @@ def __init__(
self.datatype = datatype
self._initial_value = initial_value
self._metadata = metadata or {}
self.converter: SoftConverter = make_converter(datatype)
self.converter: SoftConverter = SoftSignalConverterFactory.make_converter(
datatype
)
if self._initial_value is None:
self._initial_value = self.converter.make_initial_value(self.datatype)
else:
Expand Down
3 changes: 2 additions & 1 deletion src/ophyd_async/epics/signal/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from ._common import LimitPair, Limits, get_supported_values
from ._p4p import PvaSignalBackend
from ._p4p import PvaSignalBackend, PvaTableAbstraction
from ._signal import (
epics_signal_r,
epics_signal_rw,
Expand All @@ -13,6 +13,7 @@
"LimitPair",
"Limits",
"PvaSignalBackend",
"PvaTableAbstraction",
"epics_signal_r",
"epics_signal_rw",
"epics_signal_rw_rbv",
Expand Down
117 changes: 65 additions & 52 deletions src/ophyd_async/epics/signal/_aioca.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from dataclasses import dataclass
from enum import Enum
from math import isnan, nan
from typing import Any, Dict, List, Optional, Type, Union
from typing import Any, Dict, List, Optional, Sequence, Type, Union

import numpy as np
from aioca import (
Expand All @@ -22,8 +22,10 @@

from ophyd_async.core import (
DEFAULT_TIMEOUT,
BackendConverterFactory,
NotConnected,
ReadingValueCallback,
RuntimeSubsetEnum,
SignalBackend,
T,
get_dtype,
Expand Down Expand Up @@ -183,57 +185,66 @@ def __getattribute__(self, __name: str) -> Any:
raise NotImplementedError("No PV has been set as connect() has not been called")


def make_converter(
datatype: Optional[Type], values: Dict[str, AugmentedValue]
) -> CaConverter:
pv = list(values)[0]
pv_dbr = get_unique({k: v.datatype for k, v in values.items()}, "datatypes")
is_array = bool([v for v in values.values() if v.element_count > 1])
if is_array and datatype is str and pv_dbr == dbr.DBR_CHAR:
# Override waveform of chars to be treated as string
return CaLongStrConverter()
elif is_array and pv_dbr == dbr.DBR_STRING:
# Waveform of strings, check we wanted this
if datatype:
datatype_dtype = get_dtype(datatype)
if not datatype_dtype or not np.can_cast(datatype_dtype, np.str_):
raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
return CaArrayConverter(pv_dbr, None)
elif is_array:
pv_dtype = get_unique({k: v.dtype for k, v in values.items()}, "dtypes")
# This is an array
if datatype:
# Check we wanted an array of this type
dtype = get_dtype(datatype)
if not dtype:
raise TypeError(f"{pv} has type [{pv_dtype}] not {datatype.__name__}")
if dtype != pv_dtype:
raise TypeError(f"{pv} has type [{pv_dtype}] not [{dtype}]")
return CaArrayConverter(pv_dbr, None)
elif pv_dbr == dbr.DBR_ENUM and datatype is bool:
# Database can't do bools, so are often representated as enums, CA can do int
pv_choices_len = get_unique(
{k: len(v.enums) for k, v in values.items()}, "number of choices"
)
if pv_choices_len != 2:
raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
return CaBoolConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
elif pv_dbr == dbr.DBR_ENUM:
# This is an Enum
pv_choices = get_unique(
{k: tuple(v.enums) for k, v in values.items()}, "choices"
)
supported_values = get_supported_values(pv, datatype, pv_choices)
return CaEnumConverter(dbr.DBR_STRING, None, supported_values)
else:
value = list(values.values())[0]
# Done the dbr check, so enough to check one of the values
if datatype and not isinstance(value, datatype):
raise TypeError(
f"{pv} has type {type(value).__name__.replace('ca_', '')} "
+ f"not {datatype.__name__}"
class CaConverterFactory(BackendConverterFactory):
_ALLOWED_TYPES = (bool, int, float, str, Sequence, Enum, RuntimeSubsetEnum)

@classmethod
def make_converter(
cls, datatype: Optional[Type], values: Dict[str, AugmentedValue]
) -> CaConverter:
if datatype and not issubclass(datatype, cls._ALLOWED_TYPES):
raise TypeError(f"Given datatype {datatype.__name__} unsupported in PVA.")

pv = list(values)[0]
pv_dbr = get_unique({k: v.datatype for k, v in values.items()}, "datatypes")
is_array = bool([v for v in values.values() if v.element_count > 1])
if is_array and datatype is str and pv_dbr == dbr.DBR_CHAR:
# Override waveform of chars to be treated as string
return CaLongStrConverter()
elif is_array and pv_dbr == dbr.DBR_STRING:
# Waveform of strings, check we wanted this
if datatype:
datatype_dtype = get_dtype(datatype)
if not datatype_dtype or not np.can_cast(datatype_dtype, np.str_):
raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
return CaArrayConverter(pv_dbr, None)
elif is_array:
pv_dtype = get_unique({k: v.dtype for k, v in values.items()}, "dtypes")
# This is an array
if datatype:
# Check we wanted an array of this type
dtype = get_dtype(datatype)
if not dtype:
raise TypeError(
f"{pv} has type [{pv_dtype}] not {datatype.__name__}"
)
if dtype != pv_dtype:
raise TypeError(f"{pv} has type [{pv_dtype}] not [{dtype}]")
return CaArrayConverter(pv_dbr, None)
elif pv_dbr == dbr.DBR_ENUM and datatype is bool:
# Database can't do bools, so are often representated as enums, CA can do int
pv_choices_len = get_unique(
{k: len(v.enums) for k, v in values.items()}, "number of choices"
)
if pv_choices_len != 2:
raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
return CaBoolConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
elif pv_dbr == dbr.DBR_ENUM:
# This is an Enum
pv_choices = get_unique(
{k: tuple(v.enums) for k, v in values.items()}, "choices"
)
return CaConverter(pv_dbr, None)
supported_values = get_supported_values(pv, datatype, pv_choices)
return CaEnumConverter(dbr.DBR_STRING, None, supported_values)
else:
value = list(values.values())[0]
# Done the dbr check, so enough to check one of the values
if datatype and not isinstance(value, datatype):
raise TypeError(
f"{pv} has type {type(value).__name__.replace('ca_', '')} "
+ f"not {datatype.__name__}"
)
return CaConverter(pv_dbr, None)


_tried_pyepics = False
Expand Down Expand Up @@ -280,7 +291,9 @@ async def connect(self, timeout: float = DEFAULT_TIMEOUT):
else:
# The same, so only need to connect one
await self._store_initial_value(self.read_pv, timeout=timeout)
self.converter = make_converter(self.datatype, self.initial_values)
self.converter = CaConverterFactory.make_converter(
self.datatype, self.initial_values
)

async def put(self, value: Optional[T], wait=True, timeout=None):
if value is None:
Expand Down
Loading

0 comments on commit 8bbdcdc

Please sign in to comment.