diff --git a/src/ophyd_async/fastcs/panda/_hdf_panda.py b/src/ophyd_async/fastcs/panda/_hdf_panda.py index 61d2f5a2b3..3469ccf639 100644 --- a/src/ophyd_async/fastcs/panda/_hdf_panda.py +++ b/src/ophyd_async/fastcs/panda/_hdf_panda.py @@ -26,7 +26,7 @@ def __init__( prefix=prefix, path_provider=path_provider, name_provider=lambda: name, - panda_device=self, + panda_data_block=self.data, ) super().__init__( controller=controller, diff --git a/src/ophyd_async/fastcs/panda/_writer.py b/src/ophyd_async/fastcs/panda/_writer.py index dd5c21993b..449354ddaf 100644 --- a/src/ophyd_async/fastcs/panda/_writer.py +++ b/src/ophyd_async/fastcs/panda/_writer.py @@ -16,7 +16,7 @@ wait_for_value, ) -from ._block import CommonPandaBlocks +from ._block import DataBlock class PandaHDFWriter(DetectorWriter): @@ -27,9 +27,9 @@ def __init__( prefix: str, path_provider: PathProvider, name_provider: NameProvider, - panda_device: CommonPandaBlocks, + panda_data_block: DataBlock, ) -> None: - self.panda_device = panda_device + self.panda_data_block = panda_data_block self._prefix = prefix self._path_provider = path_provider self._name_provider = name_provider @@ -42,23 +42,23 @@ async def open(self, multiplier: int = 1) -> Dict[str, DataKey]: """Retrieve and get descriptor of all PandA signals marked for capture""" # Ensure flushes are immediate - await self.panda_device.data.flush_period.set(0) + await self.panda_data_block.flush_period.set(0) self._file = None - info = self._path_provider(device_name=self.panda_device.name) + info = self._path_provider(device_name=self._name_provider()) # Set the initial values await asyncio.gather( - self.panda_device.data.hdf_directory.set(info.directory_path), - self.panda_device.data.hdf_file_name.set( + self.panda_data_block.hdf_directory.set(info.directory_path), + self.panda_data_block.hdf_file_name.set( f"{info.filename}.h5", ), - self.panda_device.data.num_capture.set(0), + self.panda_data_block.num_capture.set(0), # TODO: Set create_dir_depth once available # https://github.com/bluesky/ophyd-async/issues/317 ) # Wait for it to start, stashing the status that tells us when it finishes - await self.panda_device.data.capture.set(True) + await self.panda_data_block.capture.set(True) if multiplier > 1: raise ValueError( "All PandA datasets should be scalar, multiplier should be 1" @@ -74,7 +74,7 @@ async def _describe(self) -> Dict[str, DataKey]: await self._update_datasets() describe = { ds.data_key: DataKey( - source=self.panda_device.data.hdf_directory.source, + source=self.panda_data_block.hdf_directory.source, shape=ds.shape, dtype="array" if ds.shape != [1] else "number", dtype_numpy=" None: representation of datasets that the panda will write. """ - capture_table = await self.panda_device.data.datasets.get_value() + capture_table = await self.panda_data_block.datasets.get_value() self._datasets = [ HDFDataset(dataset_name, "/" + dataset_name, [1], multiplier=1) for dataset_name in capture_table["name"] @@ -106,18 +106,18 @@ def matcher(value: int) -> bool: matcher.__name__ = f"index_at_least_{index}" await wait_for_value( - self.panda_device.data.num_captured, matcher, timeout=timeout + self.panda_data_block.num_captured, matcher, timeout=timeout ) async def get_indices_written(self) -> int: - return await self.panda_device.data.num_captured.get_value() + return await self.panda_data_block.num_captured.get_value() async def observe_indices_written( self, timeout=DEFAULT_TIMEOUT ) -> AsyncGenerator[int, None]: """Wait until a specific index is ready to be collected""" async for num_captured in observe_value( - self.panda_device.data.num_captured, timeout + self.panda_data_block.num_captured, timeout ): yield num_captured // self._multiplier @@ -128,8 +128,8 @@ async def collect_stream_docs( if indices_written: if not self._file: self._file = HDFFile( - Path(await self.panda_device.data.hdf_directory.get_value()) - / Path(await self.panda_device.data.hdf_file_name.get_value()), + Path(await self.panda_data_block.hdf_directory.get_value()) + / Path(await self.panda_data_block.hdf_file_name.get_value()), self._datasets, ) for doc in self._file.stream_resources(): @@ -139,6 +139,6 @@ async def collect_stream_docs( # Could put this function as default for StandardDetector async def close(self): - await self.panda_device.data.capture.set( + await self.panda_data_block.capture.set( False, wait=True, timeout=DEFAULT_TIMEOUT ) diff --git a/tests/fastcs/panda/test_writer.py b/tests/fastcs/panda/test_writer.py index d3d666f5e4..6a3ca4a8c8 100644 --- a/tests/fastcs/panda/test_writer.py +++ b/tests/fastcs/panda/test_writer.py @@ -103,8 +103,8 @@ async def mock_writer(tmp_path, mock_panda) -> PandaHDFWriter: writer = PandaHDFWriter( prefix="TEST-PANDA", path_provider=dp, - name_provider=lambda: "test-panda", - panda_device=mock_panda, + name_provider=lambda: mock_panda.name, + panda_data_block=mock_panda.data, ) return writer @@ -114,9 +114,9 @@ async def mock_writer(tmp_path, mock_panda) -> PandaHDFWriter: async def test_open_returns_correct_descriptors( mock_writer: PandaHDFWriter, table: DatasetTable ): - assert hasattr(mock_writer.panda_device, "data") + assert hasattr(mock_writer, "panda_data_block") set_mock_value( - mock_writer.panda_device.data.datasets, + mock_writer.panda_data_block.datasets, table, ) description = await mock_writer.open() # to make capturing status not time out @@ -126,7 +126,7 @@ async def test_open_returns_correct_descriptors( ): assert key == expected_key assert entry == { - "source": mock_writer.panda_device.data.hdf_directory.source, + "source": mock_writer.panda_data_block.hdf_directory.source, "shape": [ 1, ], @@ -138,16 +138,16 @@ async def test_open_returns_correct_descriptors( async def test_open_close_sets_capture(mock_writer: PandaHDFWriter): assert isinstance(await mock_writer.open(), dict) - assert await mock_writer.panda_device.data.capture.get_value() + assert await mock_writer.panda_data_block.capture.get_value() await mock_writer.close() - assert not await mock_writer.panda_device.data.capture.get_value() + assert not await mock_writer.panda_data_block.capture.get_value() async def test_open_sets_file_path_and_name(mock_writer: PandaHDFWriter, tmp_path): await mock_writer.open() - path = await mock_writer.panda_device.data.hdf_directory.get_value() - assert path == tmp_path / mock_writer.panda_device.name - name = await mock_writer.panda_device.data.hdf_file_name.get_value() + path = await mock_writer.panda_data_block.hdf_directory.get_value() + assert path == tmp_path / mock_writer._name_provider() + name = await mock_writer.panda_data_block.hdf_file_name.get_value() assert name == "data.h5" @@ -158,16 +158,16 @@ async def test_open_errors_when_multiplier_not_one(mock_writer: PandaHDFWriter): async def test_get_indices_written(mock_writer: PandaHDFWriter): await mock_writer.open() - set_mock_value(mock_writer.panda_device.data.num_captured, 4) + set_mock_value(mock_writer.panda_data_block.num_captured, 4) written = await mock_writer.get_indices_written() assert written == 4 async def test_wait_for_index(mock_writer: PandaHDFWriter): await mock_writer.open() - set_mock_value(mock_writer.panda_device.data.num_captured, 3) + set_mock_value(mock_writer.panda_data_block.num_captured, 3) await mock_writer.wait_for_index(3, timeout=1) - set_mock_value(mock_writer.panda_device.data.num_captured, 2) + set_mock_value(mock_writer.panda_data_block.num_captured, 2) with pytest.raises(TimeoutError): await mock_writer.wait_for_index(3, timeout=0.1) @@ -179,7 +179,7 @@ async def test_collect_stream_docs( table: DatasetTable, ): # Give the mock writer datasets - set_mock_value(mock_writer.panda_device.data.datasets, table) + set_mock_value(mock_writer.panda_data_block.datasets, table) await mock_writer.open()