Skip to content

Commit

Permalink
initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
ZohebShaikh committed Sep 20, 2024
1 parent 31c7ded commit 2924fb0
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 20 deletions.
6 changes: 3 additions & 3 deletions src/ophyd_async/core/_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,13 @@ class TriggerInfo(BaseModel):
"""Minimal set of information required to setup triggering on a detector"""

#: Number of triggers that will be sent, (0 means infinite) Can be:
# - A single interger or
# - A list of intergers for mulitple triggers
# - A single integer or
# - A list of integers for multiple triggers
# Example for tomography: TriggerInfo(number=[2,3,100,3])
#: This would trigger:
#: - 2 times for dark field images
#: - 3 times for initial flat field images
#: - 1000 times for projections
#: - 100 times for projections
#: - 3 times for final flat field images
number: int | list[int]
#: Sort of triggers that will be sent
Expand Down
17 changes: 4 additions & 13 deletions src/ophyd_async/plan_stubs/_fly.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def prepare_static_pcomp_flyer_and_detectors(
def prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
flyer: StandardFlyer[SeqTableInfo],
detectors: list[StandardDetector],
number_of_frames: int | list[int],
number_of_frames: int,
exposure: float,
shutter_time: float,
repeats: int = 1,
Expand All @@ -59,24 +59,15 @@ def prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
raise ValueError("No detectors provided. There must be at least one.")

deadtime = max(det.controller.get_deadtime(exposure) for det in detectors)
if isinstance(number_of_frames, list):
assert (
[number_of_frames[0]] * len(number_of_frames) == number_of_frames
), "In fly scan number of frames should be same for each iteration"
number_of_frames = [frames * repeats for frames in number_of_frames]
single_frame = number_of_frames[0]
else:
number_of_frames = number_of_frames * repeats
single_frame = number_of_frames

trigger_info = TriggerInfo(
number=number_of_frames,
number=number_of_frames * repeats,
trigger=DetectorTrigger.constant_gate,
deadtime=deadtime,
livetime=exposure,
frame_timeout=frame_timeout,
)
trigger_time = single_frame * (exposure + deadtime)
trigger_time = number_of_frames * (exposure + deadtime)
pre_delay = max(period - 2 * shutter_time - trigger_time, 0)

table = (
Expand All @@ -89,7 +80,7 @@ def prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
+
# Keeping shutter open, do N triggers
SeqTable.row(
repeats=single_frame,
repeats=number_of_frames,
time1=in_micros(exposure),
outa1=True,
outb1=True,
Expand Down
8 changes: 4 additions & 4 deletions tests/fastcs/panda/test_hdf_panda.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,20 +209,20 @@ def append_and_print(name, doc):
flyer = StandardFlyer(trigger_logic, [], name="flyer")

def flying_plan():
iteration = 2
iteration = 4
yield from bps.stage_all(mock_hdf_panda, flyer)
yield from bps.open_run()
yield from prepare_static_seq_table_flyer_and_detectors_with_same_trigger( # noqa: E501
flyer,
[mock_hdf_panda],
number_of_frames=[1] * iteration,
number_of_frames=4,
exposure=exposure,
shutter_time=shutter_time,
)

yield from bps.declare_stream(mock_hdf_panda, name="main_stream", collect=True)

for _ in range(iteration):
for frame in range(1, iteration + 1):
set_mock_value(flyer.trigger_logic.seq.active, 1)
yield from bps.kickoff(flyer, wait=True)
yield from bps.kickoff(mock_hdf_panda)
Expand All @@ -231,7 +231,7 @@ def flying_plan():
yield from bps.complete(mock_hdf_panda, wait=False, group="complete")

# Manually incremenet the index as if a frame was taken
set_mock_value(mock_hdf_panda.data.num_captured, 1)
set_mock_value(mock_hdf_panda.data.num_captured, frame)
set_mock_value(flyer.trigger_logic.seq.active, 0)

done = False
Expand Down

0 comments on commit 2924fb0

Please sign in to comment.