Skip to content

Commit 66636d7

Browse files
committed
Update workflow example.
1 parent b3c6b7f commit 66636d7

File tree

1 file changed

+98
-15
lines changed

1 file changed

+98
-15
lines changed

docs/examples/workflow.ipynb

Lines changed: 98 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,8 @@
2424
"\n",
2525
"from ess.nmx.types import *\n",
2626
"from ess.nmx.reduction import NMXData, NMXReducedData, merge_panels\n",
27-
"from ess.nmx.nexus import export_as_nexus\n",
27+
"from ess.nmx.types import DetectorIndex\n",
28+
"\n",
2829
"\n",
2930
"wf = McStasWorkflow()\n",
3031
"# Replace with the path to your own file\n",
@@ -57,18 +58,25 @@
5758
]
5859
},
5960
{
60-
"cell_type": "code",
61-
"execution_count": null,
61+
"cell_type": "markdown",
6262
"metadata": {},
63-
"outputs": [],
6463
"source": [
64+
"If you want to reduce all three panels,\n",
65+
"map the relevant part of the workflow over a list of the three panels:\n",
66+
"\n",
67+
"```python\n",
6568
"# DetectorIndex selects what detector panels to include in the run\n",
6669
"# in this case we select all three panels.\n",
6770
"wf[NMXReducedData] = (\n",
6871
" wf[NMXReducedData]\n",
6972
" .map({DetectorIndex: sc.arange('panel', 3, unit=None)})\n",
7073
" .reduce(index=\"panel\", func=merge_panels)\n",
71-
")"
74+
")\n",
75+
"```\n",
76+
"\n",
77+
"However, we encountered memory issue processing dataset, which is often over 10GB.\n",
78+
"Therefore we will not merge the panels at the end of the workflow\n",
79+
"and iter over the detector index instead."
7280
]
7381
},
7482
{
@@ -100,12 +108,19 @@
100108
"metadata": {},
101109
"outputs": [],
102110
"source": [
103-
"from cyclebane.graph import NodeName, IndexValues\n",
111+
"import sciline as sl\n",
112+
"from contextlib import contextmanager\n",
113+
"from collections.abc import Generator\n",
104114
"\n",
105-
"# Event data grouped by pixel id for each of the selected detectors\n",
106-
"targets = [NodeName(NMXData, IndexValues((\"panel\",), (i,))) for i in range(3)]\n",
107-
"dg = merge_panels(*wf.compute(targets).values())\n",
108-
"dg"
115+
"\n",
116+
"@contextmanager\n",
117+
"def temp_parameter(\n",
118+
" wf: sl.Pipeline, parameter_type: type, value: Any\n",
119+
") -> Generator[sl.Pipeline]:\n",
120+
" copied = wf.copy()\n",
121+
" copied[parameter_type] = value\n",
122+
" yield copied\n",
123+
" del copied"
109124
]
110125
},
111126
{
@@ -114,8 +129,10 @@
114129
"metadata": {},
115130
"outputs": [],
116131
"source": [
117-
"# Data from all selected detectors binned by panel, pixel and timeslice\n",
118-
"binned_dg = wf.compute(NMXReducedData)\n",
132+
"# Data from the first detector binned by panel, pixel and timeslice\n",
133+
"with temp_parameter(wf, DetectorIndex, 0) as temp_wf:\n",
134+
" binned_dg = temp_wf.compute(NMXReducedData)\n",
135+
"\n",
119136
"binned_dg"
120137
]
121138
},
@@ -136,7 +153,73 @@
136153
"metadata": {},
137154
"outputs": [],
138155
"source": [
139-
"export_as_nexus(binned_dg, \"test.nxs\")"
156+
"from ess.nmx.nexus import export_as_nxlauetof\n",
157+
"\n",
158+
"dgs = []\n",
159+
"for i in range(3):\n",
160+
" with temp_parameter(wf, DetectorIndex, i) as temp_wf:\n",
161+
" reduced_data = temp_wf.compute(NMXReducedData)\n",
162+
" dgs.append(reduced_data)\n",
163+
" del reduced_data\n",
164+
"\n",
165+
"export_as_nxlauetof(*dgs, output_file=\"test.nxs\")"
166+
]
167+
},
168+
{
169+
"cell_type": "code",
170+
"execution_count": null,
171+
"metadata": {},
172+
"outputs": [],
173+
"source": [
174+
"dgs[0]"
175+
]
176+
},
177+
{
178+
"cell_type": "markdown",
179+
"metadata": {},
180+
"source": [
181+
"Legacy version of the exporting method\n",
182+
"```python\n",
183+
"from ess.nmx.nexus import export_as_nexus\n",
184+
"\n",
185+
"export_as_nexus(binned_dg, \"test.nxs\")\n",
186+
"```"
187+
]
188+
},
189+
{
190+
"cell_type": "markdown",
191+
"metadata": {},
192+
"source": [
193+
"## Merge All Panels\n",
194+
"\n",
195+
"If you simply want to compute all panels at once, you can use map/reduce on the workflow."
196+
]
197+
},
198+
{
199+
"cell_type": "code",
200+
"execution_count": null,
201+
"metadata": {},
202+
"outputs": [],
203+
"source": [
204+
"base_wf = wf.copy()\n",
205+
"detector_panel_ids = {DetectorIndex: sc.arange('panel', 3, unit=None)}\n",
206+
"pipeline = base_wf.map(detector_panel_ids)\n",
207+
"pipeline.visualize(\n",
208+
" sl.get_mapped_node_names(pipeline, NMXData),\n",
209+
" compact=True,\n",
210+
")"
211+
]
212+
},
213+
{
214+
"cell_type": "code",
215+
"execution_count": null,
216+
"metadata": {},
217+
"outputs": [],
218+
"source": [
219+
"dg = merge_panels(\n",
220+
" *pipeline.compute(sl.get_mapped_node_names(pipeline, NMXData)).values()\n",
221+
")\n",
222+
"dg"
140223
]
141224
},
142225
{
@@ -172,7 +255,7 @@
172255
],
173256
"metadata": {
174257
"kernelspec": {
175-
"display_name": "Python 3 (ipykernel)",
258+
"display_name": "nmx-dev-310",
176259
"language": "python",
177260
"name": "python3"
178261
},
@@ -186,7 +269,7 @@
186269
"name": "python",
187270
"nbconvert_exporter": "python",
188271
"pygments_lexer": "ipython3",
189-
"version": "3.10.12"
272+
"version": "3.10.13"
190273
}
191274
},
192275
"nbformat": 4,

0 commit comments

Comments
 (0)