Skip to content

Commit

Permalink
cleaned up after rebase
Browse files Browse the repository at this point in the history
  • Loading branch information
evalott100 committed Aug 21, 2024
1 parent e1ed77b commit c4bece5
Show file tree
Hide file tree
Showing 9 changed files with 119 additions and 103 deletions.
2 changes: 0 additions & 2 deletions src/ophyd_async/core/_device_save_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@ def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.No
def pydantic_model_abstraction_representer(
dumper: yaml.Dumper, model: BaseModel
) -> yaml.Node:
"""Uses the protocol datatype since it has to be serializable."""

return dumper.represent_data(model.model_dump(mode="python"))


Expand Down
5 changes: 0 additions & 5 deletions src/ophyd_async/core/_soft_signal_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,8 +128,6 @@ def make_initial_value(self, datatype: Optional[Type[T]]) -> T:


class SoftPydanticModelConverter(SoftConverter):
"""Necessary for serializing soft signals."""

def __init__(self, datatype: Type[BaseModel]):
self.datatype = datatype

Expand All @@ -143,9 +141,6 @@ def value(self, value: Any) -> Any:
return value

def write_value(self, value):
if isinstance(value, dict):
# If the device is being deserialized
return self.datatype(**value).model_dump(mode="python")
if isinstance(value, self.datatype):
return value.model_dump(mode="python")
return value
Expand Down
3 changes: 1 addition & 2 deletions src/ophyd_async/epics/signal/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from ._common import LimitPair, Limits, get_supported_values
from ._p4p import PvaSignalBackend
from ._p4p_table_abstraction import PvaTable
from ._p4p_table_model import PvaTable
from ._signal import (
epics_signal_r,
epics_signal_rw,
Expand All @@ -15,7 +15,6 @@
"Limits",
"PvaSignalBackend",
"PvaTable",
"PvaTableAbstraction",
"epics_signal_r",
"epics_signal_rw",
"epics_signal_rw_rbv",
Expand Down
4 changes: 3 additions & 1 deletion src/ophyd_async/epics/signal/_aioca.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,9 @@ def make_converter(
# Allow int signals to represent float records when prec is 0
is_prec_zero_float = (
isinstance(value, float)
and get_unique({k: v.precision for k, v in values.items()}, "precision")
and get_unique(
{k: v.precision for k, v in values.items()}, "precision"
)
== 0
)
if not (datatype is int and is_prec_zero_float):
Expand Down
45 changes: 19 additions & 26 deletions src/ophyd_async/epics/signal/_p4p.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def _data_key_from_value(
*,
shape: Optional[list[int]] = None,
choices: Optional[list[str]] = None,
dtype: Optional[str] = None,
dtype: Optional[Dtype] = None,
) -> DataKey:
"""
Args:
Expand Down Expand Up @@ -256,6 +256,19 @@ def get_datakey(self, source: str, value) -> DataKey:
return _data_key_from_value(source, value, dtype="object")


class PvaPydanticModelConverter(PvaConverter):
def __init__(self, datatype: BaseModel):
self.datatype = datatype

def value(self, value: Value):
return self.datatype(**value.todict())

def write_value(self, value: Union[BaseModel, Dict[str, Any]]):
if isinstance(value, self.datatype):
return value.model_dump(mode="python")
return value


class PvaDictConverter(PvaConverter):
def reading(self, value):
ts = time.time()
Expand Down Expand Up @@ -287,28 +300,6 @@ def __getattribute__(self, __name: str) -> Any:
raise NotImplementedError("No PV has been set as connect() has not been called")


class PvaPydanticModelConverter(PvaConverter):
def __init__(self, datatype: BaseModel):
self.datatype = datatype

def reading(self, value: Value):
ts = time.time()
value = self.value(value)
return {"value": value, "timestamp": ts, "alarm_severity": 0}

def value(self, value: Value):
return self.datatype(**value.todict())

def write_value(self, value: Union[BaseModel, Dict[str, Any]]):
"""
A user can put whichever form to the signal.
This is required for yaml deserialization.
"""
if isinstance(value, self.datatype):
return value.model_dump(mode="python")
return value


class PvaConverterFactory(BackendConverterFactory):
_ALLOWED_TYPES = (
bool,
Expand Down Expand Up @@ -398,17 +389,19 @@ def make_converter(
== 0
)
if not (datatype is int and is_prec_zero_float):
raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
raise TypeError(
f"{pv} has type {typ.__name__} not {datatype.__name__}"
)
return PvaConverter()
elif "NTTable" in typeid:
return PvaTableConverter()
elif "structure" in typeid:
if (
datatype
and inspect.isclass(datatype)
and issubclass(datatype, BaseModel)
):
return PvaPydanticModelConverter(datatype)
return PvaTableConverter()
elif "structure" in typeid:
return PvaDictConverter()
else:
raise TypeError(f"{pv}: Unsupported typeid {typeid}")
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
from typing import Dict

import numpy as np
from pydantic import BaseModel, ConfigDict, model_validator
from pydantic_numpy.typing import NpNDArray


class PvaTable(BaseModel):
"""An abstraction of a PVA Table of str to python array."""
"""An abstraction of a PVA Table of str to numpy array."""

model_config = ConfigDict(validate_assignment=True, strict=False)

Expand All @@ -24,7 +21,7 @@ def row(cls, sub_cls, **kwargs) -> "PvaTable":
return sub_cls(**arrayified_kwargs)

def __add__(self, right: "PvaTable") -> "PvaTable":
"""Concatinate the arrays in field values."""
"""Concatenate the arrays in field values."""

assert isinstance(right, type(self)), (
f"{right} is not a `PvaTable`, or is not the same "
Expand Down Expand Up @@ -61,10 +58,3 @@ def validate_arrays(self) -> "PvaTable":
)

return self

def convert_to_pva_datatype(self) -> Dict[str, NpNDArray]:
return self.model_dump(mode="python")

@classmethod
def convert_from_pva_datatype(cls, pva_table: Dict[str, NpNDArray]):
return cls(**pva_table)
6 changes: 4 additions & 2 deletions tests/core/test_signal.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,6 @@ def __init__(self):
def some_function(self):
pass

# with pytest.raises(ValueError, match="Unknown datatype 'SomeClass'"):
err_str = (
"Given datatype <class "
"'test_signal.test_signal_unknown_datatype.<locals>.SomeClass'>"
Expand All @@ -425,4 +424,7 @@ def some_function(self):
epics_signal_rw(SomeClass, "ca://mock_signal", name="mock_signal")

# Any dtype allowed in soft signal
soft_signal_rw(SomeClass, SomeClass(), "soft_signal")
signal = soft_signal_rw(SomeClass, SomeClass(), "soft_signal")
assert isinstance((await signal.get_value()), SomeClass)
await signal.set(1)
assert (await signal.get_value()) == 1
123 changes: 82 additions & 41 deletions tests/fastcs/panda/test_panda_utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
import yaml
from bluesky import RunEngine

from ophyd_async.core import DEFAULT_TIMEOUT, DeviceCollector, load_device, save_device
Expand All @@ -7,7 +8,9 @@
from ophyd_async.fastcs.panda import (
CommonPandaBlocks,
DataBlock,
PcompDirectionOptions,
SeqTable,
TimeUnits,
phase_sorter,
)

Expand Down Expand Up @@ -55,48 +58,86 @@ def check_equal_with_seq_tables(actual, expected):
SeqTable.row(repeats=1),
)

"""
assert mock_save_to_yaml.call_args[0][0][0] == {
# Load the YAML content as a string
with open(str(tmp_path / "panda.yaml"), "r") as file:
yaml_content = file.read()

# Parse the YAML content
parsed_yaml = yaml.safe_load(yaml_content)

assert parsed_yaml[0] == {
"phase_1_signal_units": 0,
"seq.1.prescale_units": TimeUnits("min"),
"seq.2.prescale_units": TimeUnits("min"),
}
check_equal_with_seq_tables(mock_save_to_yaml.call_args[0][0][1],
{
"data.capture": False,
"data.create_directory": 0,
"data.flush_period": 0.0,
"data.hdf_directory": "",
"data.hdf_file_name": "",
"data.num_capture": 0,
"pcap.arm": False,
"pcomp.1.dir": PcompDirectionOptions.positive,
"pcomp.1.enable": "ZERO",
"pcomp.1.pulses": 0,
"pcomp.1.start": 0,
"pcomp.1.step": 0,
"pcomp.1.width": 0,
"pcomp.2.dir": PcompDirectionOptions.positive,
"pcomp.2.enable": "ZERO",
"pcomp.2.pulses": 0,
"pcomp.2.start": 0,
"pcomp.2.step": 0,
"pcomp.2.width": 0,
"pulse.1.delay": 0.0,
"pulse.1.width": 0.0,
"pulse.2.delay": 0.0,
"pulse.2.width": 0.0,
"seq.1.active": False,
"seq.1.table": SeqTable([]),
"seq.1.repeats": 0,
"seq.1.prescale": 0.0,
"seq.1.enable": "ZERO",
"seq.2.table": SeqTable([]),
"seq.2.active": False,
"seq.2.repeats": 0,
"seq.2.prescale": 0.0,
"seq.2.enable": "ZERO",
},
)
assert mock_save_to_yaml.call_args[0][1] == "path"
"""
assert parsed_yaml[1] == {
"data.capture": False,
"data.create_directory": 0,
"data.flush_period": 0.0,
"data.hdf_directory": "",
"data.hdf_file_name": "",
"data.num_capture": 0,
"pcap.arm": False,
"pcomp.1.dir": PcompDirectionOptions.positive,
"pcomp.1.enable": "ZERO",
"pcomp.1.pulses": 0,
"pcomp.1.start": 0,
"pcomp.1.step": 0,
"pcomp.1.width": 0,
"pcomp.2.dir": PcompDirectionOptions.positive,
"pcomp.2.enable": "ZERO",
"pcomp.2.pulses": 0,
"pcomp.2.start": 0,
"pcomp.2.step": 0,
"pcomp.2.width": 0,
"pulse.1.delay": 0.0,
"pulse.1.width": 0.0,
"pulse.2.delay": 0.0,
"pulse.2.width": 0.0,
"seq.1.active": False,
"seq.1.table": {
"outa1": [False],
"outa2": [False],
"outb1": [False],
"outb2": [False],
"outc1": [False],
"outc2": [False],
"outd1": [False],
"outd2": [False],
"oute1": [False],
"oute2": [False],
"outf1": [False],
"outf2": [False],
"position": [0],
"repeats": [1],
"time1": [0],
"time2": [0],
"trigger": [""],
},
"seq.1.repeats": 0,
"seq.1.prescale": 0.0,
"seq.1.enable": "ZERO",
"seq.2.table": {
"outa1": [],
"outa2": [],
"outb1": [],
"outb2": [],
"outc1": [],
"outc2": [],
"outd1": [],
"outd2": [],
"oute1": [],
"oute2": [],
"outf1": [],
"outf2": [],
"position": [],
"repeats": [],
"time1": [],
"time2": [],
"trigger": [],
},
"seq.2.active": False,
"seq.2.repeats": 0,
"seq.2.prescale": 0.0,
"seq.2.enable": "ZERO",
}
20 changes: 8 additions & 12 deletions tests/fastcs/panda/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def test_seq_table_validation_errors():


def test_seq_table_pva_conversion():
expected_pva_dict = {
pva_dict = {
"repeats": np.array([1, 2, 3, 4], dtype=np.int32),
"trigger": np.array(
["Immediate", "Immediate", "BITC=0", "Immediate"], dtype=np.dtype("U32")
Expand All @@ -98,7 +98,7 @@ def test_seq_table_pva_conversion():
"oute2": np.array([1, 0, 1, 0], dtype=np.bool_),
"outf2": np.array([1, 0, 1, 0], dtype=np.bool_),
}
expected_row_wise_dict = [
row_wise_dicts = [
{
"repeats": 1,
"trigger": "Immediate",
Expand Down Expand Up @@ -177,25 +177,21 @@ def test_seq_table_pva_conversion():
},
]

seq_table_from_pva_dict = SeqTable(**expected_pva_dict)
for (_, column1), column2 in zip(
seq_table_from_pva_dict, expected_pva_dict.values()
):
seq_table_from_pva_dict = SeqTable(**pva_dict)
for (_, column1), column2 in zip(seq_table_from_pva_dict, pva_dict.values()):
assert np.array_equal(column1, column2)
assert column1.dtype == column2.dtype

seq_table_from_rows = reduce(
lambda x, y: x + y,
[SeqTable.row(**row_kwargs) for row_kwargs in expected_row_wise_dict],
[SeqTable.row(**row_kwargs) for row_kwargs in row_wise_dicts],
)
for (_, column1), column2 in zip(seq_table_from_rows, expected_pva_dict.values()):
for (_, column1), column2 in zip(seq_table_from_rows, pva_dict.values()):
assert np.array_equal(column1, column2)
assert column1.dtype == column2.dtype

# Idempotency
applied_twice_to_pva_dict = SeqTable(**expected_pva_dict).model_dump(mode="python")
for column1, column2 in zip(
applied_twice_to_pva_dict.values(), expected_pva_dict.values()
):
applied_twice_to_pva_dict = SeqTable(**pva_dict).model_dump(mode="python")
for column1, column2 in zip(applied_twice_to_pva_dict.values(), pva_dict.values()):
assert np.array_equal(column1, column2)
assert column1.dtype == column2.dtype

0 comments on commit c4bece5

Please sign in to comment.