diff --git a/docs/api_changes.rst b/docs/api_changes.rst index 98ab950e8..d6c2760c8 100644 --- a/docs/api_changes.rst +++ b/docs/api_changes.rst @@ -2,6 +2,24 @@ Release History ================= +v1.14.3 (2025-08-26) +==================== + +Changed +------- + +- RunEngine now supports both sync and async functions as a `scan_id_source` + +Fixed +----- + +- Fix a Regression Related External Data Present in Multiple Streams + +v1.14.2 (2025-06-10) +==================== + +TO DO + v1.14.1 (2025-05-21) ==================== diff --git a/docs/documents.rst b/docs/documents.rst index d954f499b..6ee1cc89c 100644 --- a/docs/documents.rst +++ b/docs/documents.rst @@ -165,7 +165,7 @@ The most commonly useful fields here are 'time' and 'exit_status'. 'reason': '', # The RunEngine can provide reason for failure here. 'time': 1442521012.1021606, 'uid': '', - 'start': '', + 'run_start': '', 'num_events': {'primary': 16} } diff --git a/docs/plans.rst b/docs/plans.rst index dacf65ea7..3caecc528 100644 --- a/docs/plans.rst +++ b/docs/plans.rst @@ -484,6 +484,7 @@ Plans for interacting with hardware: unstage configure stop + prepare Plans for asynchronous acquisition: @@ -496,6 +497,7 @@ Plans for asynchronous acquisition: kickoff complete collect + collect_while_completing Plans that control the RunEngine: diff --git a/docs/run_engine.rst b/docs/run_engine.rst index 0b985a180..267cab127 100644 --- a/docs/run_engine.rst +++ b/docs/run_engine.rst @@ -163,7 +163,7 @@ reporting that the addition failed due to a ``TypeError`` finally: yield Msg('print', 'thanks for adding') -Compare the behavior of between ``adding_plan`` and ``addingplan`` in cases +Compare the behavior of between ``adding_plan`` and ``safe_adding_plan`` in cases where they succeed .. code:: python diff --git a/docs/tiled-writer.rst b/docs/tiled-writer.rst index d6bcb2865..c1af06d52 100644 --- a/docs/tiled-writer.rst +++ b/docs/tiled-writer.rst @@ -14,9 +14,9 @@ It implicitly distinguishes between "internal" and "external" data. The internal On the other hand, the external data are written by detectors directly on disk and usually take the form of images or multidimensional arrays. The references to the external files are provided in `StreamRsource` (`Resource` in legacy implementations) documents, which register the corresponding array-like `DataSources` in Tiled. `StreamDatum` (or `Datum`) documents are processed via the mechanism of `Consolidators` and determine the correspondence between the indexing within these external arrays and the physically-meaningful sequence of timestamps. -The time dimension (or the sequence of measurements) is typically shared between the internal and external data, which in Tiled is enforced by writing all data from the same Bluesky stream into a specialized `Composite` container (node in the Tiled catalog). The metadata on each stream node contains the specifications for the related data keys and the relevavt configuration parameters supplied in the `EventDescriptor` document. +The time dimension (or the sequence of measurements) is typically shared between the internal and external data, which in Tiled is enforced by writing all data from the same Bluesky stream into a specialized `Composite` container (node in the Tiled catalog). The metadata on each stream node contains the specifications for the related data keys and the relevant configuration parameters supplied in the `EventDescriptor` document. -Finally, nodes for multiple streams are groupped together and placed into a container for the entire run; its metadata contains the `Start` and `Stop` documents. While the strcuture of the `streams` container is fixed, the parent Run allows for optional user-controlled namespaces within `views` and `aux` containers. The Run container created by TiledWriter is designated with the `BlueskyRun` version `3.0` spec to enable its back-compatibility with legacy code via bluesky-tiled-plugins. +Finally, nodes for multiple streams are grouped together and placed into a container for the entire run; its metadata contains the `Start` and `Stop` documents. While the structure of the `streams` container is fixed, the parent Run allows for optional user-controlled namespaces within `views` and `aux` containers. The Run container created by TiledWriter is designated with the `BlueskyRun` version `3.0` spec to enable its back-compatibility with legacy code via bluesky-tiled-plugins. An example of the Tiled catalog structure for a Bluesky run might look like this: @@ -131,6 +131,7 @@ A minimal simulated example of using TiledWriter in a Bluesky plan is shown belo from bluesky import RunEngine import bluesky.plans as bp + from bluesky.callbacks.tiled_writer import TiledWriter from tiled.server import SimpleTiledServer from tiled.client import from_uri from ophyd.sim import det diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 0d371e4ec..064a7ef71 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -53,7 +53,7 @@ Before You Begin .. code-block:: bash - conda install -c nsls2forge bluesky ophyd databroker matplotlib pyqt=5 ipython + conda install -c conda-forge bluesky ophyd databroker matplotlib pyqt=5 ipython * Start IPython: diff --git a/src/bluesky/bundlers.py b/src/bluesky/bundlers.py index e970708b5..0f72f6b49 100644 --- a/src/bluesky/bundlers.py +++ b/src/bluesky/bundlers.py @@ -559,7 +559,8 @@ async def save(self, msg): # we do not have the descriptor cached, make it if descriptor_doc is None or d_objs is None: - for obj in objs_read: + # use the dequeue not the set to preserve order + for obj in self._objs_read: await self._ensure_cached(obj, collect=isinstance(obj, Collectable)) objs_dks[obj] = self._describe_cache[obj] diff --git a/src/bluesky/callbacks/tiled_writer.py b/src/bluesky/callbacks/tiled_writer.py index e76c971c6..bce68b358 100644 --- a/src/bluesky/callbacks/tiled_writer.py +++ b/src/bluesky/callbacks/tiled_writer.py @@ -387,7 +387,8 @@ def descriptor(self, doc: EventDescriptor): self._int_keys.update({k for k, v in data_keys.items() if "external" not in v.keys()}) self._ext_keys.update({k for k, v in data_keys.items() if "external" in v.keys()}) for key in self._ext_keys: - data_keys[key]["external"] = data_keys[key].pop("external", "") # Make sure the value is not None + if key in data_keys: + data_keys[key]["external"] = data_keys[key].pop("external", "") # Make sure the value is not None # Keep a reference to the descriptor name (stream) by its uid self._desc_name_by_uid[doc["uid"]] = doc["name"] diff --git a/src/bluesky/plan_stubs.py b/src/bluesky/plan_stubs.py index d040e4a90..28d1fc1b9 100644 --- a/src/bluesky/plan_stubs.py +++ b/src/bluesky/plan_stubs.py @@ -748,7 +748,7 @@ def input_plan(prompt: str = "") -> MsgGenerator[str]: @plan def prepare(obj: Preparable, *args, group: Optional[Hashable] = None, wait: bool = False, **kwargs): """ - Prepare a device. + Prepare a device ready for trigger or kickoff. Parameters ---------- diff --git a/src/bluesky/run_engine.py b/src/bluesky/run_engine.py index f7bd3c543..c7e5bb404 100644 --- a/src/bluesky/run_engine.py +++ b/src/bluesky/run_engine.py @@ -13,7 +13,7 @@ from dataclasses import dataclass from datetime import datetime from enum import Enum -from inspect import Parameter, Signature, iscoroutine +from inspect import iscoroutine from itertools import count from warnings import warn @@ -37,6 +37,7 @@ Stageable, Status, Stoppable, + SyncOrAsync, T, Triggerable, check_supports, @@ -58,6 +59,7 @@ RequestStop, RunEngineInterrupted, SigintHandler, + Subscribers, ensure_generator, normalize_subs_input, single_gen, @@ -195,17 +197,6 @@ def __get__(self, instance, owner): return super().__get__(instance, owner) -# See RunEngine.__call__. -_call_sig = Signature( - [ - Parameter("self", Parameter.POSITIONAL_ONLY), - Parameter("plan", Parameter.POSITIONAL_ONLY), - Parameter("subs", Parameter.POSITIONAL_ONLY, default=None), - Parameter("metadata_kw", Parameter.VAR_KEYWORD), - ] -) - - def default_scan_id_source(md): return md.get("scan_id", 0) + 1 @@ -270,9 +261,9 @@ class RunEngine: Expected return: normalized metadata scan_id_source : callable, optional - a function that will be used to calculate scan_id. Default is to - increment scan_id by 1 each time. However you could pass in a - customized function to get a scan_id from any source. + a (possibly async) function that will be used to calculate scan_id. + Default is to increment scan_id by 1 each time. However you could pass + in a customized function to get a scan_id from any source. Expected signature: f(md) Expected return: updated scan_id value @@ -418,7 +409,7 @@ def __init__( context_managers: typing.Optional[list] = None, md_validator: typing.Optional[typing.Callable] = None, md_normalizer: typing.Optional[typing.Callable] = None, - scan_id_source: typing.Optional[typing.Callable] = default_scan_id_source, + scan_id_source: typing.Callable[[dict], SyncOrAsync[int]] = default_scan_id_source, during_task: typing.Optional[DuringTask] = None, call_returns_result: bool = False, ): @@ -872,7 +863,13 @@ def _create_result(self, plan_return): ) return rs - def __call__(self, *args, **metadata_kw): + def __call__( + self, + plan: typing.Iterable[Msg], + subs: typing.Optional[Subscribers] = None, + /, + **metadata_kw: typing.Any, + ) -> typing.Union[RunEngineResult, tuple[str, ...]]: """Execute a plan. Any keyword arguments will be interpreted as metadata and recorded with @@ -905,12 +902,6 @@ def __call__(self, *args, **metadata_kw): """ if self.state == "panicked": raise RuntimeError("The RunEngine is panicked and cannot be recovered. You must restart bluesky.") - # This scheme lets us make 'plan' and 'subs' POSITIONAL ONLY, reserving - # all keyword arguments for user metadata. - arguments = _call_sig.bind(self, *args, **metadata_kw).arguments - plan = arguments["plan"] - subs = arguments.get("subs", None) - metadata_kw = arguments.get("metadata_kw", {}) if "raise_if_interrupted" in metadata_kw: warn( # noqa: B028 "The 'raise_if_interrupted' flag has been removed. The " @@ -993,8 +984,6 @@ def set_blocking_event(future): else: return tuple(self._run_start_uids) - __call__.__signature__ = _call_sig # type: ignore - def resume(self): """Resume a paused plan from the last checkpoint. @@ -1857,7 +1846,7 @@ async def _open_run(self, msg): raise IllegalMessageSequence("A 'close_run' message was not received before the 'open_run' message") # Run scan_id calculation method - self.md["scan_id"] = self.scan_id_source(self.md) + self.md["scan_id"] = await maybe_await(self.scan_id_source(self.md)) # For metadata below, info about plan passed to self.__call__ for. plan_type = type(self._plan).__name__ diff --git a/src/bluesky/tests/examples/external_assets.json b/src/bluesky/tests/examples/external_assets.json index c624bd9cd..8d8426159 100644 --- a/src/bluesky/tests/examples/external_assets.json +++ b/src/bluesky/tests/examples/external_assets.json @@ -8,7 +8,8 @@ "plan_type": "generator", "plan_name": "count", "detectors": [ - "det" + "det-obj1", + "det-obj2" ] } }, @@ -16,7 +17,7 @@ "name": "descriptor", "doc": { "configuration": { - "det": { + "det-obj1": { "data": {}, "timestamps": {}, "data_keys": {} @@ -31,7 +32,7 @@ 1 ], "external": "STREAM:", - "object_name": "det" + "object_name": "det-obj1" }, "det-key2": { "source": "file", @@ -43,32 +44,19 @@ 17 ], "external": "STREAM:", - "object_name": "det" - }, - "det-key3": { - "source": "file", - "dtype": "array", - "dtype_numpy": "|u1", - "shape": [ - 1, - 10, - 15 - ], - "external": "STREAM:", - "object_name": "det" + "object_name": "det-obj1" } }, "name": "primary", "object_keys": { - "det": [ + "det-obj1": [ "det-key1", - "det-key2", - "det-key3" + "det-key2" ] }, "run_start": "{{ uuid }}-9724b2201fe7", "time": 1745500521.79327, - "uid": "{{ uuid }}-8c00740d9771", + "uid": "{{ uuid }}-descriptor01", "hints": {} } }, @@ -92,7 +80,7 @@ "name": "stream_datum", "doc": { "stream_resource": "det-key1-uid", - "descriptor": "{{ uuid }}-8c00740d9771", + "descriptor": "{{ uuid }}-descriptor01", "uid": "det-key1-uid/0", "indices": { "start": 0, @@ -126,7 +114,7 @@ "name": "stream_datum", "doc": { "stream_resource": "det-key2-uid", - "descriptor": "{{ uuid }}-8c00740d9771", + "descriptor": "{{ uuid }}-descriptor01", "uid": "det-key2-uid/0", "indices": { "start": 0, @@ -138,6 +126,42 @@ } } }, + { + "name": "descriptor", + "doc": { + "configuration": { + "det-obj2": { + "data": {}, + "timestamps": {}, + "data_keys": {} + } + }, + "data_keys": { + "det-key3": { + "source": "file", + "dtype": "array", + "dtype_numpy": "|u1", + "shape": [ + 1, + 10, + 15 + ], + "external": "STREAM:", + "object_name": "det-obj2" + } + }, + "name": "secondary", + "object_keys": { + "det-obj2": [ + "det-key3" + ] + }, + "run_start": "{{ uuid }}-9724b2201fe7", + "time": 1745500521.79337, + "uid": "{{ uuid }}-descriptor02", + "hints": {} + } + }, { "name": "stream_resource", "doc": { @@ -161,7 +185,7 @@ "name": "stream_datum", "doc": { "stream_resource": "det-key3-uid", - "descriptor": "{{ uuid }}-8c00740d9771", + "descriptor": "{{ uuid }}-descriptor02", "uid": "det-key3-uid/0", "indices": { "start": 0, @@ -182,14 +206,14 @@ "timestamps": {}, "seq_num": 1, "filled": {}, - "descriptor": "{{ uuid }}-8c00740d9771" + "descriptor": "{{ uuid }}-descriptor01" } }, { "name": "stream_datum", "doc": { "stream_resource": "det-key1-uid", - "descriptor": "{{ uuid }}-8c00740d9771", + "descriptor": "{{ uuid }}-descriptor01", "uid": "det-key1-uid/1", "indices": { "start": 1, @@ -205,7 +229,7 @@ "name": "stream_datum", "doc": { "stream_resource": "det-key2-uid", - "descriptor": "{{ uuid }}-8c00740d9771", + "descriptor": "{{ uuid }}-descriptor01", "uid": "det-key2-uid/1", "indices": { "start": 1, @@ -221,7 +245,7 @@ "name": "stream_datum", "doc": { "stream_resource": "det-key3-uid", - "descriptor": "{{ uuid }}-8c00740d9771", + "descriptor": "{{ uuid }}-descriptor02", "uid": "det-key3-uid/1", "indices": { "start": 1, @@ -242,14 +266,14 @@ "timestamps": {}, "seq_num": 2, "filled": {}, - "descriptor": "{{ uuid }}-8c00740d9771" + "descriptor": "{{ uuid }}-descriptor01" } }, { "name": "stream_datum", "doc": { "stream_resource": "det-key1-uid", - "descriptor": "{{ uuid }}-8c00740d9771", + "descriptor": "{{ uuid }}-descriptor01", "uid": "det-key1-uid/2", "indices": { "start": 2, @@ -265,7 +289,7 @@ "name": "stream_datum", "doc": { "stream_resource": "det-key2-uid", - "descriptor": "{{ uuid }}-8c00740d9771", + "descriptor": "{{ uuid }}-descriptor01", "uid": "det-key2-uid/2", "indices": { "start": 2, @@ -281,7 +305,7 @@ "name": "stream_datum", "doc": { "stream_resource": "det-key3-uid", - "descriptor": "{{ uuid }}-8c00740d9771", + "descriptor": "{{ uuid }}-descriptor02", "uid": "det-key3-uid/2", "indices": { "start": 2, @@ -302,7 +326,7 @@ "timestamps": {}, "seq_num": 3, "filled": {}, - "descriptor": "{{ uuid }}-8c00740d9771" + "descriptor": "{{ uuid }}-descriptor01" } }, { @@ -314,7 +338,8 @@ "exit_status": "success", "reason": "", "num_events": { - "primary": 3 + "primary": 3, + "secondary": 0 } } } diff --git a/src/bluesky/tests/test_run_engine.py b/src/bluesky/tests/test_run_engine.py index d004108ab..8d51622bd 100644 --- a/src/bluesky/tests/test_run_engine.py +++ b/src/bluesky/tests/test_run_engine.py @@ -22,7 +22,7 @@ wait, wait_for, ) -from bluesky.plans import count, grid_scan +from bluesky.plans import count, grid_scan, scan from bluesky.preprocessors import ( SupplementalData, baseline_wrapper, @@ -2128,3 +2128,46 @@ def plan(det): assert len(d.event[desc["uid"]]) == 1 assert stop["num_events"]["primary"] == 2 + + +def test_sync_scan_id_source(RE): + def sync_scan_source(md: dict) -> int: + return 314159 + + RE.scan_id_source = sync_scan_source + RE([Msg("open_run")]) + assert RE.md["scan_id"] == 314159 + + +def test_async_scan_id_source(RE): + async def async_scan_source(md: dict) -> int: + return 42 + + RE.scan_id_source = async_scan_source + RE([Msg("open_run")]) + assert RE.md["scan_id"] == 42 + + +@requires_ophyd +def test_descriptor_order(RE): + from itertools import permutations + + from ophyd import Component, Device, Signal + + class Issue1930(Device): + alpha = Component(Signal, value=1, kind="hinted") + bravo = Component(Signal, value=2, kind="hinted") + charlie = Component(Signal, value=3, kind="hinted") + + i1930 = Issue1930(name="i1930") + + for dets in permutations([i1930.alpha, i1930.bravo, i1930.charlie]): + key_order = [d.name for d in dets] + + def check(key_order, name, doc): + if name == "event": + assert list(doc["data"]) == key_order + elif name == "descriptor": + assert list(doc["data_keys"]) == key_order + + RE(scan(dets, i1930.charlie, -1, 1, 2), lambda name, doc, key_order=key_order: check(key_order, name, doc)) diff --git a/src/bluesky/utils/__init__.py b/src/bluesky/utils/__init__.py index 40f982587..b0cd0dd36 100644 --- a/src/bluesky/utils/__init__.py +++ b/src/bluesky/utils/__init__.py @@ -15,7 +15,7 @@ import uuid import warnings from collections import namedtuple -from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Generator, Iterable +from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Generator, Iterable, Sequence from collections.abc import Iterable as TypingIterable from functools import partial, reduce, wraps from inspect import Parameter, Signature @@ -23,6 +23,7 @@ Any, Callable, Optional, + TypedDict, TypeVar, Union, ) @@ -32,6 +33,7 @@ import msgpack_numpy import numpy as np from cycler import Cycler, cycler +from event_model.documents import DocumentType, Event, EventDescriptor, RunStart, RunStop from tqdm import tqdm from tqdm.utils import _screen_shape_wrapper, _term_move_up, _unicode from typing_extensions import TypeIs @@ -93,6 +95,24 @@ def __repr__(self): #: Scalar or iterable of values, one to be applied to each point in a scan ScalarOrIterableFloat = Union[float, TypingIterable[float]] +# Single function to be used as an event listener +Subscriber = Callable[[str, P], Any] + +OneOrMany = Union[P, Sequence[P]] + + +# Mapping from event type to listener or list of listeners +class SubscriberMap(TypedDict, total=False): + all: OneOrMany[Subscriber[DocumentType]] + start: OneOrMany[Subscriber[RunStart]] + stop: OneOrMany[Subscriber[RunStop]] + event: OneOrMany[Subscriber[Event]] + descriptor: OneOrMany[Subscriber[EventDescriptor]] + + +# Single listener, multiple listeners or mapping of listeners by event type +Subscribers = Union[OneOrMany[Subscriber[DocumentType]], SubscriberMap] + class RunEngineControlException(Exception): """Exception for signaling within the RunEngine."""