Skip to content

Commit

Permalink
Merge pull request #235 from scipp/module-parsing
Browse files Browse the repository at this point in the history
Allow multiple fields names to parse dtype or units and add more tests.
  • Loading branch information
YooSunYoung authored Sep 27, 2024
2 parents c16643a + ae90ef7 commit f353cd4
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 13 deletions.
5 changes: 2 additions & 3 deletions src/beamlime/applications/_nexus_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,8 +232,8 @@ def collect_streaming_modules(
# Modules do not have name so we remove the last element(None)
path=(parent_path := path[:-1]),
parent=cast(dict, find_nexus_structure(structure, parent_path)),
dtype=config.get("dtype"),
value_units=config.get("value_units"),
dtype=config.get("dtype", config.get("type")),
value_units=config.get("value_units", config.get("units")),
),
)
for path, node in iter_nexus_structure(structure)
Expand All @@ -252,7 +252,6 @@ def collect_streaming_modules(
_validate_ev44_module_spec(value)
elif key.module_type == 'f144':
_validate_f144_module_spec(value)

return key_value_dict


Expand Down
44 changes: 34 additions & 10 deletions tests/applications/nexus_helpers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,14 @@ def ymir_streaming_modules(ymir: dict) -> dict[StreamModuleKey, StreamModuleValu
return collect_streaming_modules(ymir)


def _find_attributes(group: dict[str, list[dict]], attr_name: str) -> dict:
attributes = group.get("attributes", [])
try:
return next(attr for attr in attributes if attr["name"] == attr_name)
except StopIteration as e:
raise KeyError(f"Attribute {attr_name} not found in {group}") from e


def test_iter_nexus_structure() -> None:
expected_keys = [(), ('a',), ('a', 'c'), ('b',)]
test_structure = {
Expand Down Expand Up @@ -154,15 +162,15 @@ def _is_class(partial_structure: Mapping, cls_name: str) -> bool:
)


def _is_detector(c: Mapping) -> bool:
return _is_class(c, "NXdetector")


def _is_event_data(c: Mapping) -> bool:
return _is_class(c, "NXevent_data")


def _find_event_time_zerov_values(c: Mapping) -> np.ndarray:
def _get_values(c: Mapping) -> np.ndarray:
return c["config"]["values"]


def _find_event_time_zero_values(c: Mapping) -> np.ndarray:
return find_nexus_structure(c, ("event_time_zero",))["config"]["values"]


Expand Down Expand Up @@ -206,26 +214,40 @@ def test_ev44_module_merging(
assert stored_value['name'] == 'ymir_detector_events'
assert len(stored_value['children']) == 4 # 4 datasets
# Test event time zero
event_time_zero_values = _find_event_time_zerov_values(stored_value)
event_time_zero = find_nexus_structure(stored_value, ("event_time_zero",))
event_time_zero_values = _get_values(event_time_zero)
event_time_zero_unit = _find_attributes(event_time_zero, "units")
assert event_time_zero_unit["values"] == "ns"
inserted_event_time_zeros = np.concatenate(
[d["reference_time"] for d in stored_data[key]]
)
assert np.all(event_time_zero_values == inserted_event_time_zeros)
# Test event time offset
event_time_offset_values = _find_event_time_offset_values(stored_value)
event_time_offset = find_nexus_structure(stored_value, ("event_time_offset",))
event_time_offset_values = _get_values(event_time_offset)
event_time_offset_unit = _find_attributes(event_time_offset, "units")
assert event_time_offset_unit["values"] == "ns"
inserted_event_time_offsets = np.concatenate(
[d["time_of_flight"] for d in stored_data[key]]
)
assert np.all(event_time_offset_values == inserted_event_time_offsets)
# Test event id
event_id_values = _find_event_id_values(stored_value)
event_id = find_nexus_structure(stored_value, ("event_id",))
event_id_values = _get_values(event_id)
with pytest.raises(KeyError, match="units"):
# Event id should not have units
_find_attributes(event_id, "units")
inserted_event_ids = np.concatenate([d["pixel_id"] for d in stored_data[key]])
assert np.all(event_id_values == inserted_event_ids)
# Test event index
# event index values are calculated based on the length of the previous events
first_event_length = len(stored_data[key][0]["time_of_flight"])
expected_event_indices = np.array([0, first_event_length])
event_index_values = _find_event_index_values(stored_value)
event_index = find_nexus_structure(stored_value, ("event_index",))
event_index_values = _get_values(event_index)
with pytest.raises(KeyError, match="units"):
# Event index should not have units
_find_attributes(event_index, "units")
assert np.all(event_index_values == expected_event_indices)


Expand Down Expand Up @@ -351,7 +373,9 @@ def test_f144_merge(nexus_template_with_streamed_log, shape, dtype):
times = find_nexus_structure(stored_value, ('time',))
assert times['module'] == 'dataset'
assert values['config']['values'].shape[1:] == shape
assert values['attributes'][0]['values'] == 'km'
unit_attr = _find_attributes(values, 'units')
assert unit_attr['values'] == 'km'
assert unit_attr['dtype'] == 'string'


@pytest.fixture()
Expand Down
20 changes: 20 additions & 0 deletions tests/applications/streaming_module_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,3 +187,23 @@ def test_collect_streaming_modules_tdct(ymir: dict) -> None:
# but tdct parents have many children
for expected_key in expected_tdct_keys:
assert expected_key in modules


def test_collect_streaming_module_alternative_field_name_dtype() -> None:
group = _make_group_with_module_place_holder(
"", "f144", source="_", topic="_", type="double", value_units=""
)
modules = collect_streaming_modules({"children": [group]})
key = StreamModuleKey("f144", "_", "_")
assert key in modules
assert modules[key].dtype == "double"


def test_collect_streaming_module_alternative_field_name_units() -> None:
group = _make_group_with_module_place_holder(
"", "f144", source="_", topic="_", dtype="", units="s"
)
modules = collect_streaming_modules({"children": [group]})
key = StreamModuleKey("f144", "_", "_")
assert key in modules
assert modules[key].value_units == "s"

0 comments on commit f353cd4

Please sign in to comment.