Skip to content

Commit

Permalink
Fix root/path kwargs for Stream Resource documents generated by HDFWr…
Browse files Browse the repository at this point in the history
…iter (#86)

* Write root path from DirectoryProvider

* Allow DirectoryProvider to be configured with static root

---------

Co-authored-by: Ware, Joseph (DLSLtd,RAL,LSCI) <joseph.ware@diamond.ac.uk>
  • Loading branch information
rosesyrett and DiamondJoseph authored Mar 4, 2024
1 parent e51d1ad commit 701922a
Show file tree
Hide file tree
Showing 6 changed files with 105 additions and 53 deletions.
41 changes: 36 additions & 5 deletions src/ophyd_async/core/_providers.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,30 @@
from abc import abstractmethod
from dataclasses import dataclass
from typing import Protocol, Sequence
from pathlib import Path
from typing import Optional, Protocol, Sequence, Union


@dataclass
class DirectoryInfo:
directory_path: str
filename_prefix: str
"""
Information about where and how to write a file.
The bluesky event model splits the URI for a resource into two segments to aid in
different applications mounting filesystems at different mount points.
The portion of this path which is relevant only for the writer is the 'root',
while the path from an agreed upon mutual mounting is the resource_path.
The resource_dir is used with the filename to construct the resource_path.
:param root: Path of a root directory, relevant only for the file writer
:param resource_dir: Directory into which files should be written, relative to root
:param prefix: Optional filename prefix to add to all files
:param suffix: Optional filename suffix to add to all files
"""

root: Path
resource_dir: Path
prefix: Optional[str] = ""
suffix: Optional[str] = ""


class DirectoryProvider(Protocol):
Expand All @@ -16,8 +34,21 @@ def __call__(self) -> DirectoryInfo:


class StaticDirectoryProvider(DirectoryProvider):
def __init__(self, directory_path: str, filename_prefix: str) -> None:
self._directory_info = DirectoryInfo(directory_path, filename_prefix)
def __init__(
self,
directory_path: Union[str, Path],
filename_prefix: str = "",
filename_suffix: str = "",
resource_dir: Path = Path("."),
) -> None:
if isinstance(directory_path, str):
directory_path = Path(directory_path)
self._directory_info = DirectoryInfo(
root=directory_path,
resource_dir=resource_dir,
prefix=filename_prefix,
suffix=filename_suffix,
)

def __call__(self) -> DirectoryInfo:
return self._directory_info
Expand Down
20 changes: 17 additions & 3 deletions src/ophyd_async/epics/areadetector/writers/_hdffile.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,33 @@
from pathlib import Path
from typing import Iterator, List

from event_model import StreamDatum, StreamResource, compose_stream_resource

from ophyd_async.core import DirectoryInfo

from ._hdfdataset import _HDFDataset


class _HDFFile:
def __init__(self, full_file_name: str, datasets: List[_HDFDataset]) -> None:
"""
:param directory_info: Contains information about how to construct a StreamResource
:param full_file_name: Absolute path to the file to be written
:param datasets: Datasets to write into the file
"""

def __init__(
self,
directory_info: DirectoryInfo,
full_file_name: Path,
datasets: List[_HDFDataset],
) -> None:
self._last_emitted = 0
self._bundles = [
compose_stream_resource(
spec="AD_HDF5_SWMR_SLICE",
root="/",
root=str(directory_info.root),
data_key=ds.name,
resource_path=full_file_name,
resource_path=str(full_file_name.relative_to(directory_info.root)),
resource_kwargs={
"path": ds.path,
"multiplier": ds.multiplier,
Expand Down
13 changes: 9 additions & 4 deletions src/ophyd_async/epics/areadetector/writers/hdf_writer.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio
from pathlib import Path
from typing import AsyncIterator, Dict, List, Optional

from bluesky.protocols import Asset, Descriptor, Hints
Expand Down Expand Up @@ -45,15 +46,16 @@ async def open(self, multiplier: int = 1) -> Dict[str, Descriptor]:
self.hdf.num_extra_dims.set(0),
self.hdf.lazy_open.set(True),
self.hdf.swmr_mode.set(True),
self.hdf.file_path.set(info.directory_path),
self.hdf.file_name.set(f"{info.filename_prefix}{self.hdf.name}"),
# See https://github.com/bluesky/ophyd-async/issues/122
self.hdf.file_path.set(str(info.root / info.resource_dir)),
self.hdf.file_name.set(f"{info.prefix}{self.hdf.name}{info.suffix}"),
self.hdf.file_template.set("%s/%s.h5"),
self.hdf.file_write_mode.set(FileWriteMode.stream),
)

assert (
await self.hdf.file_path_exists.get_value()
), f"File path {info.directory_path} for hdf plugin does not exist"
), f"File path {self.hdf.file_path.get_value()} for hdf plugin does not exist"

# Overwrite num_capture to go forever
await self.hdf.num_capture.set(0)
Expand Down Expand Up @@ -107,7 +109,10 @@ async def collect_stream_docs(self, indices_written: int) -> AsyncIterator[Asset
if indices_written:
if not self._file:
self._file = _HDFFile(
await self.hdf.full_file_name.get_value(), self._datasets
self._directory_provider(),
# See https://github.com/bluesky/ophyd-async/issues/122
Path(await self.hdf.full_file_name.get_value()),
self._datasets,
)
for doc in self._file.stream_resources():
yield "stream_resource", doc
Expand Down
12 changes: 0 additions & 12 deletions tests/core/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,6 @@
from ophyd_async.epics.signal import epics_signal_rw


def test_static_directory_provider():
"""NOTE: this is a dummy test.
It should be removed once detectors actually implement directory providers.
This will happen in a soon to be developed PR.
"""
dir_path, filename = "some/path", "test_file"
provider = StaticDirectoryProvider(dir_path, filename)

assert provider() == DirectoryInfo(dir_path, filename)


class ValueErrorBackend(SimSignalBackend):
def __init__(self, exc_text=""):
self.exc_text = exc_text
Expand Down
9 changes: 5 additions & 4 deletions tests/epics/areadetector/test_scans.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import asyncio
from pathlib import Path
from typing import Optional
from unittest.mock import AsyncMock, Mock, patch
from unittest.mock import AsyncMock, patch

import bluesky.plan_stubs as bps
import bluesky.plans as bp
Expand All @@ -12,10 +13,10 @@
DetectorControl,
DetectorTrigger,
DeviceCollector,
DirectoryInfo,
HardwareTriggeredFlyable,
SameTriggerDetectorGroupLogic,
StandardDetector,
StaticDirectoryProvider,
TriggerInfo,
TriggerLogic,
set_sim_value,
Expand Down Expand Up @@ -67,13 +68,13 @@ def controller(RE) -> ADSimController:


@pytest.fixture
def writer(RE) -> HDFWriter:
def writer(RE, tmp_path: Path) -> HDFWriter:
with DeviceCollector(sim=True):
hdf = NDFileHDF("HDF")

return HDFWriter(
hdf,
directory_provider=Mock(return_value=DirectoryInfo("somepath", "someprefix")),
directory_provider=StaticDirectoryProvider(tmp_path),
name_provider=lambda: "test",
shape_provider=AsyncMock(),
)
Expand Down
63 changes: 38 additions & 25 deletions tests/epics/demo/test_demo_ad_sim_detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
DeviceCollector,
StandardDetector,
StaticDirectoryProvider,
set_sim_callback,
set_sim_value,
)
from ophyd_async.epics.areadetector.controllers import ADSimController
Expand All @@ -22,11 +23,9 @@
from ophyd_async.epics.areadetector.writers import HDFWriter, NDFileHDF
from ophyd_async.epics.demo.demo_ad_sim_detector import DemoADSimDetector

CURRENT_DIRECTORY = Path(__file__).parent


async def make_detector(prefix="", name="test"):
dp = StaticDirectoryProvider(CURRENT_DIRECTORY, f"test-{new_uid()}")
async def make_detector(prefix: str, name: str, tmp_path: Path):
dp = StaticDirectoryProvider(tmp_path, f"test-{new_uid()}")

async with DeviceCollector(sim=True):
drv = ADBase(f"{prefix}DRV:")
Expand All @@ -35,6 +34,11 @@ async def make_detector(prefix="", name="test"):
drv, hdf, dp, config_sigs=[drv.acquire_time, drv.acquire], name=name
)

def _set_full_file_name(_, val):
set_sim_value(hdf.full_file_name, str(tmp_path / val))

set_sim_callback(hdf.file_name, _set_full_file_name)

return det


Expand Down Expand Up @@ -75,18 +79,18 @@ def count_sim(dets: List[StandardDetector], times: int = 1):


@pytest.fixture
async def single_detector(RE: RunEngine) -> StandardDetector:
detector = await make_detector(prefix="TEST:")
async def single_detector(RE: RunEngine, tmp_path: Path) -> StandardDetector:
detector = await make_detector(prefix="TEST:", name="test", tmp_path=tmp_path)

set_sim_value(detector._controller.driver.array_size_x, 10)
set_sim_value(detector._controller.driver.array_size_y, 20)
return detector


@pytest.fixture
async def two_detectors():
deta = await make_detector(prefix="PREFIX1:", name="testa")
detb = await make_detector(prefix="PREFIX2:", name="testb")
async def two_detectors(tmp_path: Path):
deta = await make_detector(prefix="PREFIX1:", name="testa", tmp_path=tmp_path)
detb = await make_detector(prefix="PREFIX2:", name="testb", tmp_path=tmp_path)

# Simulate backend IOCs being in slightly different states
for i, det in enumerate((deta, detb)):
Expand Down Expand Up @@ -145,11 +149,19 @@ async def test_two_detectors_step(
info_a = writer_a._directory_provider()
info_b = writer_b._directory_provider()

assert await writer_a.hdf.file_path.get_value() == info_a.directory_path
assert (await writer_a.hdf.file_name.get_value()).startswith(info_a.filename_prefix)
assert await writer_a.hdf.file_path.get_value() == str(
info_a.root / info_a.resource_dir
)
file_name_a = await writer_a.hdf.file_name.get_value()
assert file_name_a.startswith(info_a.prefix)
assert file_name_a.endswith(info_a.suffix)

assert await writer_b.hdf.file_path.get_value() == info_b.directory_path
assert (await writer_b.hdf.file_name.get_value()).startswith(info_b.filename_prefix)
assert await writer_b.hdf.file_path.get_value() == str(
info_b.root / info_b.resource_dir
)
file_name_b = await writer_b.hdf.file_name.get_value()
assert file_name_b.startswith(info_b.prefix)
assert file_name_b.endswith(info_b.suffix)

_, descriptor, sra, sda, srb, sdb, event, _ = docs
assert descriptor["configuration"]["testa"]["data"]["testa-drv-acquire_time"] == 0.8
Expand All @@ -158,11 +170,15 @@ async def test_two_detectors_step(
assert descriptor["data_keys"]["testb"]["shape"] == (769, 1025)
assert sda["stream_resource"] == sra["uid"]
assert sdb["stream_resource"] == srb["uid"]
assert sra["root"] == str(info_a.root)
assert sra["resource_path"] == str(info_a.resource_dir / file_name_a)
assert srb["root"] == str(info_b.root)
assert srb["resource_path"] == str(info_b.resource_dir / file_name_b)
assert event["data"] == {}


async def test_detector_writes_to_file(
RE: RunEngine, single_detector: StandardDetector
RE: RunEngine, single_detector: StandardDetector, tmp_path: Path
):
names = []
docs = []
Expand All @@ -172,10 +188,9 @@ async def test_detector_writes_to_file(

RE(count_sim([single_detector], times=3))

assert (
await cast(HDFWriter, single_detector.writer).hdf.file_path.get_value()
== CURRENT_DIRECTORY
)
assert await cast(
HDFWriter, single_detector.writer
).hdf.file_path.get_value() == str(tmp_path)

descriptor_index = names.index("descriptor")

Expand Down Expand Up @@ -243,22 +258,20 @@ async def test_trigger_logic():
...


async def test_detector_with_unnamed_or_disconnected_config_sigs(
RE, prefix="", name="test"
):
dp = StaticDirectoryProvider(CURRENT_DIRECTORY, f"test-{new_uid()}")
drv = ADBase(f"{prefix}DRV:")
async def test_detector_with_unnamed_or_disconnected_config_sigs(RE, tmp_path: Path):
dp = StaticDirectoryProvider(tmp_path)
drv = ADBase("FOO:DRV:")

some_other_driver = ADBase("TEST")

async with DeviceCollector(sim=True):
hdf = NDFileHDF(f"{prefix}HDF:")
hdf = NDFileHDF("FOO:HDF:")
det = DemoADSimDetector(
drv,
hdf,
dp,
config_sigs=[some_other_driver.acquire_time, drv.acquire],
name=name,
name="foo",
)

with pytest.raises(Exception) as exc:
Expand Down

0 comments on commit 701922a

Please sign in to comment.