Skip to content

Commit

Permalink
cleaned up after rebase
Browse files Browse the repository at this point in the history
  • Loading branch information
evalott100 committed Aug 21, 2024
1 parent e1ed77b commit 42555ed
Show file tree
Hide file tree
Showing 9 changed files with 109 additions and 71 deletions.
1 change: 0 additions & 1 deletion src/ophyd_async/core/_device_save_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.No
def pydantic_model_abstraction_representer(
dumper: yaml.Dumper, model: BaseModel
) -> yaml.Node:
"""Uses the protocol datatype since it has to be serializable."""

return dumper.represent_data(model.model_dump(mode="python"))

Expand Down
1 change: 0 additions & 1 deletion src/ophyd_async/core/_soft_signal_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,6 @@ def make_initial_value(self, datatype: Optional[Type[T]]) -> T:


class SoftPydanticModelConverter(SoftConverter):
"""Necessary for serializing soft signals."""

def __init__(self, datatype: Type[BaseModel]):
self.datatype = datatype
Expand Down
3 changes: 1 addition & 2 deletions src/ophyd_async/epics/signal/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from ._common import LimitPair, Limits, get_supported_values
from ._p4p import PvaSignalBackend
from ._p4p_table_abstraction import PvaTable
from ._p4p_table_model import PvaTable
from ._signal import (
epics_signal_r,
epics_signal_rw,
Expand All @@ -15,7 +15,6 @@
"Limits",
"PvaSignalBackend",
"PvaTable",
"PvaTableAbstraction",
"epics_signal_r",
"epics_signal_rw",
"epics_signal_rw_rbv",
Expand Down
4 changes: 3 additions & 1 deletion src/ophyd_async/epics/signal/_aioca.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,9 @@ def make_converter(
# Allow int signals to represent float records when prec is 0
is_prec_zero_float = (
isinstance(value, float)
and get_unique({k: v.precision for k, v in values.items()}, "precision")
and get_unique(
{k: v.precision for k, v in values.items()}, "precision"
)
== 0
)
if not (datatype is int and is_prec_zero_float):
Expand Down
12 changes: 9 additions & 3 deletions src/ophyd_async/epics/signal/_p4p.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def _data_key_from_value(
*,
shape: Optional[list[int]] = None,
choices: Optional[list[str]] = None,
dtype: Optional[str] = None,
dtype: Optional[Dtype] = None,
) -> DataKey:
"""
Args:
Expand Down Expand Up @@ -301,14 +301,18 @@ def value(self, value: Value):

def write_value(self, value: Union[BaseModel, Dict[str, Any]]):
"""
A user can put whichever form to the signal.
A user can `signal.set` whichever form.
This is required for yaml deserialization.
"""
if isinstance(value, dict):
# If the device is being deserialized
return self.datatype(**value).model_dump(mode="python")
if isinstance(value, self.datatype):
return value.model_dump(mode="python")
return value



class PvaConverterFactory(BackendConverterFactory):
_ALLOWED_TYPES = (
bool,
Expand Down Expand Up @@ -398,7 +402,9 @@ def make_converter(
== 0
)
if not (datatype is int and is_prec_zero_float):
raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
raise TypeError(
f"{pv} has type {typ.__name__} not {datatype.__name__}"
)
return PvaConverter()
elif "NTTable" in typeid:
return PvaTableConverter()
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
from typing import Dict

import numpy as np
from pydantic import BaseModel, ConfigDict, model_validator
from pydantic_numpy.typing import NpNDArray


class PvaTable(BaseModel):
"""An abstraction of a PVA Table of str to python array."""
"""An abstraction of a PVA Table of str to numpy array."""

model_config = ConfigDict(validate_assignment=True, strict=False)

Expand All @@ -24,7 +21,7 @@ def row(cls, sub_cls, **kwargs) -> "PvaTable":
return sub_cls(**arrayified_kwargs)

def __add__(self, right: "PvaTable") -> "PvaTable":
"""Concatinate the arrays in field values."""
"""Concatenate the arrays in field values."""

assert isinstance(right, type(self)), (
f"{right} is not a `PvaTable`, or is not the same "
Expand Down Expand Up @@ -61,10 +58,3 @@ def validate_arrays(self) -> "PvaTable":
)

return self

def convert_to_pva_datatype(self) -> Dict[str, NpNDArray]:
return self.model_dump(mode="python")

@classmethod
def convert_from_pva_datatype(cls, pva_table: Dict[str, NpNDArray]):
return cls(**pva_table)
6 changes: 4 additions & 2 deletions tests/core/test_signal.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,6 @@ def __init__(self):
def some_function(self):
pass

# with pytest.raises(ValueError, match="Unknown datatype 'SomeClass'"):
err_str = (
"Given datatype <class "
"'test_signal.test_signal_unknown_datatype.<locals>.SomeClass'>"
Expand All @@ -425,4 +424,7 @@ def some_function(self):
epics_signal_rw(SomeClass, "ca://mock_signal", name="mock_signal")

# Any dtype allowed in soft signal
soft_signal_rw(SomeClass, SomeClass(), "soft_signal")
signal = soft_signal_rw(SomeClass, SomeClass(), "soft_signal")
assert isinstance((await signal.get_value()), SomeClass)
await signal.set(1)
assert (await signal.get_value()) == 1
123 changes: 82 additions & 41 deletions tests/fastcs/panda/test_panda_utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
import yaml
from bluesky import RunEngine

from ophyd_async.core import DEFAULT_TIMEOUT, DeviceCollector, load_device, save_device
Expand All @@ -7,7 +8,9 @@
from ophyd_async.fastcs.panda import (
CommonPandaBlocks,
DataBlock,
PcompDirectionOptions,
SeqTable,
TimeUnits,
phase_sorter,
)

Expand Down Expand Up @@ -55,48 +58,86 @@ def check_equal_with_seq_tables(actual, expected):
SeqTable.row(repeats=1),
)

"""
assert mock_save_to_yaml.call_args[0][0][0] == {
# Load the YAML content as a string
with open(str(tmp_path / "panda.yaml"), 'r') as file:
yaml_content = file.read()

# Parse the YAML content
parsed_yaml = yaml.safe_load(yaml_content)

assert parsed_yaml[0] == {
"phase_1_signal_units": 0,
"seq.1.prescale_units": TimeUnits("min"),
"seq.2.prescale_units": TimeUnits("min"),
}
check_equal_with_seq_tables(mock_save_to_yaml.call_args[0][0][1],
{
"data.capture": False,
"data.create_directory": 0,
"data.flush_period": 0.0,
"data.hdf_directory": "",
"data.hdf_file_name": "",
"data.num_capture": 0,
"pcap.arm": False,
"pcomp.1.dir": PcompDirectionOptions.positive,
"pcomp.1.enable": "ZERO",
"pcomp.1.pulses": 0,
"pcomp.1.start": 0,
"pcomp.1.step": 0,
"pcomp.1.width": 0,
"pcomp.2.dir": PcompDirectionOptions.positive,
"pcomp.2.enable": "ZERO",
"pcomp.2.pulses": 0,
"pcomp.2.start": 0,
"pcomp.2.step": 0,
"pcomp.2.width": 0,
"pulse.1.delay": 0.0,
"pulse.1.width": 0.0,
"pulse.2.delay": 0.0,
"pulse.2.width": 0.0,
"seq.1.active": False,
"seq.1.table": SeqTable([]),
"seq.1.repeats": 0,
"seq.1.prescale": 0.0,
"seq.1.enable": "ZERO",
"seq.2.table": SeqTable([]),
"seq.2.active": False,
"seq.2.repeats": 0,
"seq.2.prescale": 0.0,
"seq.2.enable": "ZERO",
},
)
assert mock_save_to_yaml.call_args[0][1] == "path"
"""
assert parsed_yaml[1] == {
"data.capture": False,
"data.create_directory": 0,
"data.flush_period": 0.0,
"data.hdf_directory": "",
"data.hdf_file_name": "",
"data.num_capture": 0,
"pcap.arm": False,
"pcomp.1.dir": PcompDirectionOptions.positive,
"pcomp.1.enable": "ZERO",
"pcomp.1.pulses": 0,
"pcomp.1.start": 0,
"pcomp.1.step": 0,
"pcomp.1.width": 0,
"pcomp.2.dir": PcompDirectionOptions.positive,
"pcomp.2.enable": "ZERO",
"pcomp.2.pulses": 0,
"pcomp.2.start": 0,
"pcomp.2.step": 0,
"pcomp.2.width": 0,
"pulse.1.delay": 0.0,
"pulse.1.width": 0.0,
"pulse.2.delay": 0.0,
"pulse.2.width": 0.0,
"seq.1.active": False,
"seq.1.table": {
"outa1": [False],
"outa2": [False],
"outb1": [False],
"outb2": [False],
"outc1": [False],
"outc2": [False],
"outd1": [False],
"outd2": [False],
"oute1": [False],
"oute2": [False],
"outf1": [False],
"outf2": [False],
"position": [0],
"repeats": [1],
"time1": [0],
"time2": [0],
"trigger": [""],
},
"seq.1.repeats": 0,
"seq.1.prescale": 0.0,
"seq.1.enable": "ZERO",
"seq.2.table": {
"outa1": [],
"outa2": [],
"outb1": [],
"outb2": [],
"outc1": [],
"outc2": [],
"outd1": [],
"outd2": [],
"oute1": [],
"oute2": [],
"outf1": [],
"outf2": [],
"position": [],
"repeats": [],
"time1": [],
"time2": [],
"trigger": [],
},
"seq.2.active": False,
"seq.2.repeats": 0,
"seq.2.prescale": 0.0,
"seq.2.enable": "ZERO",
}
16 changes: 8 additions & 8 deletions tests/fastcs/panda/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def test_seq_table_validation_errors():


def test_seq_table_pva_conversion():
expected_pva_dict = {
pva_dict = {
"repeats": np.array([1, 2, 3, 4], dtype=np.int32),
"trigger": np.array(
["Immediate", "Immediate", "BITC=0", "Immediate"], dtype=np.dtype("U32")
Expand All @@ -98,7 +98,7 @@ def test_seq_table_pva_conversion():
"oute2": np.array([1, 0, 1, 0], dtype=np.bool_),
"outf2": np.array([1, 0, 1, 0], dtype=np.bool_),
}
expected_row_wise_dict = [
row_wise_dicts = [
{
"repeats": 1,
"trigger": "Immediate",
Expand Down Expand Up @@ -177,25 +177,25 @@ def test_seq_table_pva_conversion():
},
]

seq_table_from_pva_dict = SeqTable(**expected_pva_dict)
seq_table_from_pva_dict = SeqTable(**pva_dict)
for (_, column1), column2 in zip(
seq_table_from_pva_dict, expected_pva_dict.values()
seq_table_from_pva_dict, pva_dict.values()
):
assert np.array_equal(column1, column2)
assert column1.dtype == column2.dtype

seq_table_from_rows = reduce(
lambda x, y: x + y,
[SeqTable.row(**row_kwargs) for row_kwargs in expected_row_wise_dict],
[SeqTable.row(**row_kwargs) for row_kwargs in row_wise_dicts],
)
for (_, column1), column2 in zip(seq_table_from_rows, expected_pva_dict.values()):
for (_, column1), column2 in zip(seq_table_from_rows, pva_dict.values()):
assert np.array_equal(column1, column2)
assert column1.dtype == column2.dtype

# Idempotency
applied_twice_to_pva_dict = SeqTable(**expected_pva_dict).model_dump(mode="python")
applied_twice_to_pva_dict = SeqTable(**pva_dict).model_dump(mode="python")
for column1, column2 in zip(
applied_twice_to_pva_dict.values(), expected_pva_dict.values()
applied_twice_to_pva_dict.values(), pva_dict.values()
):
assert np.array_equal(column1, column2)
assert column1.dtype == column2.dtype

0 comments on commit 42555ed

Please sign in to comment.