|
24 | 24 | "\n", |
25 | 25 | "from ess.nmx.types import *\n", |
26 | 26 | "from ess.nmx.reduction import NMXData, NMXReducedData, merge_panels\n", |
27 | | - "from ess.nmx.nexus import export_as_nexus\n", |
| 27 | + "from ess.nmx.types import DetectorIndex\n", |
| 28 | + "\n", |
28 | 29 | "\n", |
29 | 30 | "wf = McStasWorkflow()\n", |
30 | 31 | "# Replace with the path to your own file\n", |
|
57 | 58 | ] |
58 | 59 | }, |
59 | 60 | { |
60 | | - "cell_type": "code", |
61 | | - "execution_count": null, |
| 61 | + "cell_type": "markdown", |
62 | 62 | "metadata": {}, |
63 | | - "outputs": [], |
64 | 63 | "source": [ |
| 64 | + "If you want to reduce all three panels,\n", |
| 65 | + "map the relevant part of the workflow over a list of the three panels:\n", |
| 66 | + "\n", |
| 67 | + "```python\n", |
65 | 68 | "# DetectorIndex selects what detector panels to include in the run\n", |
66 | 69 | "# in this case we select all three panels.\n", |
67 | 70 | "wf[NMXReducedData] = (\n", |
68 | 71 | " wf[NMXReducedData]\n", |
69 | 72 | " .map({DetectorIndex: sc.arange('panel', 3, unit=None)})\n", |
70 | 73 | " .reduce(index=\"panel\", func=merge_panels)\n", |
71 | | - ")" |
| 74 | + ")\n", |
| 75 | + "```\n", |
| 76 | + "\n", |
| 77 | + "However, we encountered memory issue processing dataset, which is often over 10GB.\n", |
| 78 | + "Therefore we will not merge the panels at the end of the workflow\n", |
| 79 | + "and iter over the detector index instead." |
72 | 80 | ] |
73 | 81 | }, |
74 | 82 | { |
|
100 | 108 | "metadata": {}, |
101 | 109 | "outputs": [], |
102 | 110 | "source": [ |
103 | | - "from cyclebane.graph import NodeName, IndexValues\n", |
| 111 | + "import sciline as sl\n", |
| 112 | + "from contextlib import contextmanager\n", |
| 113 | + "from collections.abc import Generator\n", |
104 | 114 | "\n", |
105 | | - "# Event data grouped by pixel id for each of the selected detectors\n", |
106 | | - "targets = [NodeName(NMXData, IndexValues((\"panel\",), (i,))) for i in range(3)]\n", |
107 | | - "dg = merge_panels(*wf.compute(targets).values())\n", |
108 | | - "dg" |
| 115 | + "\n", |
| 116 | + "@contextmanager\n", |
| 117 | + "def temp_parameter(\n", |
| 118 | + " wf: sl.Pipeline, parameter_type: type, value: Any\n", |
| 119 | + ") -> Generator[sl.Pipeline]:\n", |
| 120 | + " copied = wf.copy()\n", |
| 121 | + " copied[parameter_type] = value\n", |
| 122 | + " yield copied\n", |
| 123 | + " del copied" |
109 | 124 | ] |
110 | 125 | }, |
111 | 126 | { |
|
114 | 129 | "metadata": {}, |
115 | 130 | "outputs": [], |
116 | 131 | "source": [ |
117 | | - "# Data from all selected detectors binned by panel, pixel and timeslice\n", |
118 | | - "binned_dg = wf.compute(NMXReducedData)\n", |
| 132 | + "# Data from the first detector binned by panel, pixel and timeslice\n", |
| 133 | + "with temp_parameter(wf, DetectorIndex, 0) as temp_wf:\n", |
| 134 | + " binned_dg = temp_wf.compute(NMXReducedData)\n", |
| 135 | + "\n", |
119 | 136 | "binned_dg" |
120 | 137 | ] |
121 | 138 | }, |
|
136 | 153 | "metadata": {}, |
137 | 154 | "outputs": [], |
138 | 155 | "source": [ |
139 | | - "export_as_nexus(binned_dg, \"test.nxs\")" |
| 156 | + "from ess.nmx.nexus import export_as_nxlauetof\n", |
| 157 | + "\n", |
| 158 | + "dgs = []\n", |
| 159 | + "for i in range(3):\n", |
| 160 | + " with temp_parameter(wf, DetectorIndex, i) as temp_wf:\n", |
| 161 | + " reduced_data = temp_wf.compute(NMXReducedData)\n", |
| 162 | + " dgs.append(reduced_data)\n", |
| 163 | + " del reduced_data\n", |
| 164 | + "\n", |
| 165 | + "export_as_nxlauetof(*dgs, output_file=\"test.nxs\")" |
| 166 | + ] |
| 167 | + }, |
| 168 | + { |
| 169 | + "cell_type": "code", |
| 170 | + "execution_count": null, |
| 171 | + "metadata": {}, |
| 172 | + "outputs": [], |
| 173 | + "source": [ |
| 174 | + "dgs[0]" |
| 175 | + ] |
| 176 | + }, |
| 177 | + { |
| 178 | + "cell_type": "markdown", |
| 179 | + "metadata": {}, |
| 180 | + "source": [ |
| 181 | + "Legacy version of the exporting method\n", |
| 182 | + "```python\n", |
| 183 | + "from ess.nmx.nexus import export_as_nexus\n", |
| 184 | + "\n", |
| 185 | + "export_as_nexus(binned_dg, \"test.nxs\")\n", |
| 186 | + "```" |
| 187 | + ] |
| 188 | + }, |
| 189 | + { |
| 190 | + "cell_type": "markdown", |
| 191 | + "metadata": {}, |
| 192 | + "source": [ |
| 193 | + "## Merge All Panels\n", |
| 194 | + "\n", |
| 195 | + "If you simply want to compute all panels at once, you can use map/reduce on the workflow." |
| 196 | + ] |
| 197 | + }, |
| 198 | + { |
| 199 | + "cell_type": "code", |
| 200 | + "execution_count": null, |
| 201 | + "metadata": {}, |
| 202 | + "outputs": [], |
| 203 | + "source": [ |
| 204 | + "base_wf = wf.copy()\n", |
| 205 | + "detector_panel_ids = {DetectorIndex: sc.arange('panel', 3, unit=None)}\n", |
| 206 | + "pipeline = base_wf.map(detector_panel_ids)\n", |
| 207 | + "pipeline.visualize(\n", |
| 208 | + " sl.get_mapped_node_names(pipeline, NMXData),\n", |
| 209 | + " compact=True,\n", |
| 210 | + ")" |
| 211 | + ] |
| 212 | + }, |
| 213 | + { |
| 214 | + "cell_type": "code", |
| 215 | + "execution_count": null, |
| 216 | + "metadata": {}, |
| 217 | + "outputs": [], |
| 218 | + "source": [ |
| 219 | + "dg = merge_panels(\n", |
| 220 | + " *pipeline.compute(sl.get_mapped_node_names(pipeline, NMXData)).values()\n", |
| 221 | + ")\n", |
| 222 | + "dg" |
140 | 223 | ] |
141 | 224 | }, |
142 | 225 | { |
|
172 | 255 | ], |
173 | 256 | "metadata": { |
174 | 257 | "kernelspec": { |
175 | | - "display_name": "Python 3 (ipykernel)", |
| 258 | + "display_name": "nmx-dev-310", |
176 | 259 | "language": "python", |
177 | 260 | "name": "python3" |
178 | 261 | }, |
|
186 | 269 | "name": "python", |
187 | 270 | "nbconvert_exporter": "python", |
188 | 271 | "pygments_lexer": "ipython3", |
189 | | - "version": "3.10.12" |
| 272 | + "version": "3.10.13" |
190 | 273 | } |
191 | 274 | }, |
192 | 275 | "nbformat": 4, |
|
0 commit comments