diff --git a/.copier-answers.yml b/.copier-answers.yml index 1799bcae..16cc748d 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -5,7 +5,7 @@ description: Diffraction data reduction for the European Spallation Source max_python: '3.12' min_python: '3.10' namespace_package: ess -nightly_deps: scipp,scippnexus,sciline,plopp,scippneutron,essreduce +nightly_deps: scipp,scippnexus,sciline,plopp,scippneutron,essreduce,tof orgname: scipp prettyname: ESSdiffraction projectname: essdiffraction diff --git a/docs/user-guide/dream/dream-data-reduction.ipynb b/docs/user-guide/dream/dream-data-reduction.ipynb index f3a404c7..f0be972d 100644 --- a/docs/user-guide/dream/dream-data-reduction.ipynb +++ b/docs/user-guide/dream/dream-data-reduction.ipynb @@ -20,7 +20,8 @@ "outputs": [], "source": [ "import scipp as sc\n", - "\n", + "from scippneutron.chopper import DiskChopper\n", + "from ess.reduce import time_of_flight\n", "from ess import dream, powder\n", "import ess.dream.data # noqa: F401\n", "from ess.powder.types import *" @@ -71,10 +72,8 @@ "workflow[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop\n", "# Edges for binning in d-spacing\n", "workflow[DspacingBins] = sc.linspace(\"dspacing\", 0.0, 2.3434, 201, unit=\"angstrom\")\n", - "# Mask in time-of-flight to crop to valid range\n", - "workflow[TofMask] = lambda x: (x < sc.scalar(0.0, unit=\"ns\")) | (\n", - " x > sc.scalar(86e6, unit=\"ns\")\n", - ")\n", + "# Empty masks by default\n", + "workflow[TofMask] = None\n", "workflow[TwoThetaMask] = None\n", "workflow[WavelengthMask] = None\n", "# No pixel masks\n", @@ -86,9 +85,24 @@ "id": "6", "metadata": {}, "source": [ - "## Use the workflow\n", + "## Computing time-of-flight\n", "\n", - "We can visualize the graph for computing the final normalized result for intensity as a function of time-of-flight:" + "In the data files, the `event_time_offset` is recorded for every event.\n", + "This is not the same as the time-of-flight of the neutrons,\n", + "it is simply recording how much time has passed between the last neutron pulse and when the neutron arrived at the detector.\n", + "The detected neutron could have originated from a pulse prior to the last one,\n", + "and the data thus needs to be 'unwrapped'.\n", + "\n", + "In addition, Dream's pulse-shaping choppers provide additional information which can be used to better estimate the real time-of-flight (and hence wavelength) of the neutrons.\n", + "We use a dedicated sub-workflow to compute the time-of-flight (`tof`) of the neutrons.\n", + "\n", + "### Chopper information\n", + "\n", + "To be able to compute an accurate time-of-flight,\n", + "we require the parameters of the choppers in the beamline.\n", + "\n", + "Those would typically be read directly from the input NeXus file,\n", + "but we write them explicitly here." ] }, { @@ -98,13 +112,142 @@ "metadata": {}, "outputs": [], "source": [ - "workflow.visualize([IofTof, ReducedTofCIF], graph_attr={\"rankdir\": \"LR\"})" + "psc1 = DiskChopper(\n", + " frequency=sc.scalar(14.0, unit=\"Hz\"),\n", + " beam_position=sc.scalar(0.0, unit=\"deg\"),\n", + " phase=sc.scalar(286 - 180, unit=\"deg\"),\n", + " axle_position=sc.vector(value=[0, 0, 6.145], unit=\"m\"),\n", + " slit_begin=sc.array(\n", + " dims=[\"cutout\"],\n", + " values=[-1.23, 70.49, 84.765, 113.565, 170.29, 271.635, 286.035, 301.17],\n", + " unit=\"deg\",\n", + " ),\n", + " slit_end=sc.array(\n", + " dims=[\"cutout\"],\n", + " values=[1.23, 73.51, 88.035, 116.835, 175.31, 275.565, 289.965, 303.63],\n", + " unit=\"deg\",\n", + " ),\n", + " slit_height=sc.scalar(10.0, unit=\"cm\"),\n", + " radius=sc.scalar(30.0, unit=\"cm\"),\n", + ")\n", + "\n", + "psc2 = DiskChopper(\n", + " frequency=sc.scalar(-14.0, unit=\"Hz\"),\n", + " beam_position=sc.scalar(0.0, unit=\"deg\"),\n", + " phase=sc.scalar(-236, unit=\"deg\"),\n", + " axle_position=sc.vector(value=[0, 0, 6.155], unit=\"m\"),\n", + " slit_begin=sc.array(\n", + " dims=[\"cutout\"],\n", + " values=[-1.23, 27.0, 55.8, 142.385, 156.765, 214.115, 257.23, 315.49],\n", + " unit=\"deg\",\n", + " ),\n", + " slit_end=sc.array(\n", + " dims=[\"cutout\"],\n", + " values=[1.23, 30.6, 59.4, 145.615, 160.035, 217.885, 261.17, 318.11],\n", + " unit=\"deg\",\n", + " ),\n", + " slit_height=sc.scalar(10.0, unit=\"cm\"),\n", + " radius=sc.scalar(30.0, unit=\"cm\"),\n", + ")\n", + "\n", + "oc = DiskChopper(\n", + " frequency=sc.scalar(14.0, unit=\"Hz\"),\n", + " beam_position=sc.scalar(0.0, unit=\"deg\"),\n", + " phase=sc.scalar(297 - 180 - 90, unit=\"deg\"),\n", + " axle_position=sc.vector(value=[0, 0, 6.174], unit=\"m\"),\n", + " slit_begin=sc.array(dims=[\"cutout\"], values=[-27.6 * 0.5], unit=\"deg\"),\n", + " slit_end=sc.array(dims=[\"cutout\"], values=[27.6 * 0.5], unit=\"deg\"),\n", + " slit_height=sc.scalar(10.0, unit=\"cm\"),\n", + " radius=sc.scalar(30.0, unit=\"cm\"),\n", + ")\n", + "\n", + "bcc = DiskChopper(\n", + " frequency=sc.scalar(112.0, unit=\"Hz\"),\n", + " beam_position=sc.scalar(0.0, unit=\"deg\"),\n", + " phase=sc.scalar(215 - 180, unit=\"deg\"),\n", + " # phase=sc.scalar(240 - 180, unit=\"deg\"),\n", + " axle_position=sc.vector(value=[0, 0, 9.78], unit=\"m\"),\n", + " slit_begin=sc.array(dims=[\"cutout\"], values=[-36.875, 143.125], unit=\"deg\"),\n", + " slit_end=sc.array(dims=[\"cutout\"], values=[36.875, 216.875], unit=\"deg\"),\n", + " slit_height=sc.scalar(10.0, unit=\"cm\"),\n", + " radius=sc.scalar(30.0, unit=\"cm\"),\n", + ")\n", + "\n", + "t0 = DiskChopper(\n", + " frequency=sc.scalar(28.0, unit=\"Hz\"),\n", + " beam_position=sc.scalar(0.0, unit=\"deg\"),\n", + " phase=sc.scalar(280 - 180, unit=\"deg\"),\n", + " axle_position=sc.vector(value=[0, 0, 13.05], unit=\"m\"),\n", + " slit_begin=sc.array(dims=[\"cutout\"], values=[-314.9 * 0.5], unit=\"deg\"),\n", + " slit_end=sc.array(dims=[\"cutout\"], values=[314.9 * 0.5], unit=\"deg\"),\n", + " slit_height=sc.scalar(10.0, unit=\"cm\"),\n", + " radius=sc.scalar(30.0, unit=\"cm\"),\n", + ")\n", + "\n", + "disk_choppers = {\"psc1\": psc1, \"psc2\": psc2, \"oc\": oc, \"bcc\": bcc, \"t0\": t0}" ] }, { "cell_type": "markdown", "id": "8", "metadata": {}, + "source": [ + "### Setting up the time-of-flight sub-workflow\n", + "\n", + "The `tof` workflow requires a simulation of a source of neutrons illuminating the instrument chopper cascade which can then predict what is the neutron wavelength as a function of arrival time,\n", + "for any distance after the last chopper in the beamline.\n", + "\n", + "We will here use a simple simulation run using the [Tof](https://tof.readthedocs.io/) package,\n", + "but more advanced simulations using McStas can also be used." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9", + "metadata": {}, + "outputs": [], + "source": [ + "tof_wf = time_of_flight.TofWorkflow(\n", + " simulated_neutrons=time_of_flight.simulate_beamline(\n", + " choppers=disk_choppers,\n", + " neutrons=5_000_000,\n", + " ),\n", + " ltotal_range=(sc.scalar(70.0, unit=\"m\"), sc.scalar(80.0, unit=\"m\")),\n", + " error_threshold=0.013, # Relative error threshold to mask out regions of arrival time overlap\n", + ")\n", + "\n", + "# Save expensive steps from being re-computed many times\n", + "tof_wf.cache_results()\n", + "\n", + "# Insert the sub-workflow into the main reduction\n", + "workflow[TofWorkflow] = tof_wf" + ] + }, + { + "cell_type": "markdown", + "id": "10", + "metadata": {}, + "source": [ + "## Use the reduction workflow\n", + "\n", + "We can visualize the graph for computing the final normalized result for intensity as a function of time-of-flight:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11", + "metadata": {}, + "outputs": [], + "source": [ + "workflow.visualize([IofTof, ReducedTofCIF], graph_attr={\"rankdir\": \"LR\"})" + ] + }, + { + "cell_type": "markdown", + "id": "12", + "metadata": {}, "source": [ "We then call `compute()` to compute the result:\n", "(The `cif` object will later be used to write the result to disk.)" @@ -113,7 +256,7 @@ { "cell_type": "code", "execution_count": null, - "id": "9", + "id": "13", "metadata": {}, "outputs": [], "source": [ @@ -125,7 +268,7 @@ { "cell_type": "code", "execution_count": null, - "id": "10", + "id": "14", "metadata": {}, "outputs": [], "source": [ @@ -135,7 +278,7 @@ }, { "cell_type": "markdown", - "id": "11", + "id": "15", "metadata": {}, "source": [ "We can now save the result to disk:" @@ -144,7 +287,7 @@ { "cell_type": "code", "execution_count": null, - "id": "12", + "id": "16", "metadata": {}, "outputs": [], "source": [ @@ -152,12 +295,12 @@ "in the documentation of ESSdiffraction.\n", "See https://scipp.github.io/essdiffraction/\n", "\"\"\"\n", - "cif_data.save('reduced.cif')" + "cif_data.save(\"reduced.cif\")" ] }, { "cell_type": "markdown", - "id": "13", + "id": "17", "metadata": {}, "source": [ "## Compute intermediate results\n", @@ -170,7 +313,7 @@ { "cell_type": "code", "execution_count": null, - "id": "14", + "id": "18", "metadata": {}, "outputs": [], "source": [ @@ -187,19 +330,19 @@ { "cell_type": "code", "execution_count": null, - "id": "15", + "id": "19", "metadata": {}, "outputs": [], "source": [ "two_theta = sc.linspace(\"two_theta\", 0.8, 2.4, 301, unit=\"rad\")\n", - "intermediates[MaskedData[SampleRun]].hist(two_theta=two_theta, wavelength=300).plot(\n", - " norm=\"log\"\n", - ")" + "intermediates[MaskedData[SampleRun]].bins.concat(\"event_time_zero\").hist(\n", + " two_theta=two_theta, wavelength=300\n", + ").plot(norm=\"log\")" ] }, { "cell_type": "markdown", - "id": "16", + "id": "20", "metadata": {}, "source": [ "## Grouping by scattering angle\n", @@ -211,48 +354,30 @@ { "cell_type": "code", "execution_count": null, - "id": "17", + "id": "21", "metadata": {}, "outputs": [], "source": [ "workflow[TwoThetaBins] = sc.linspace(\n", - " dim=\"two_theta\", unit=\"rad\", start=0.8, stop=2.4, num=17\n", + " dim=\"two_theta\", unit=\"rad\", start=0.8, stop=2.4, num=201\n", ")" ] }, { "cell_type": "code", "execution_count": null, - "id": "18", + "id": "22", "metadata": {}, "outputs": [], "source": [ - "grouped_dspacing = workflow.compute(IofDspacingTwoTheta)\n", + "grouped_dspacing = workflow.compute(IofDspacingTwoTheta).bins.concat(\"event_time_zero\")\n", "grouped_dspacing" ] }, { "cell_type": "code", "execution_count": null, - "id": "19", - "metadata": {}, - "outputs": [], - "source": [ - "angle = sc.midpoints(grouped_dspacing.coords[\"two_theta\"])\n", - "sc.plot(\n", - " {\n", - " f\"{angle[group].value:.3f} {angle[group].unit}\": grouped_dspacing[\n", - " \"two_theta\", group\n", - " ].hist()\n", - " for group in range(2, 6)\n", - " }\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "20", + "id": "23", "metadata": {}, "outputs": [], "source": [ @@ -261,7 +386,7 @@ }, { "cell_type": "markdown", - "id": "21", + "id": "24", "metadata": {}, "source": [ "## Normalizing by monitor\n", @@ -283,7 +408,7 @@ { "cell_type": "code", "execution_count": null, - "id": "22", + "id": "25", "metadata": {}, "outputs": [], "source": [ @@ -292,7 +417,7 @@ }, { "cell_type": "markdown", - "id": "23", + "id": "26", "metadata": {}, "source": [ "In addition to the parameters used before, we also need to provide filenames for monitor data and a position of the monitor as that is not saved in the simulation files:" @@ -301,14 +426,14 @@ { "cell_type": "code", "execution_count": null, - "id": "24", + "id": "27", "metadata": {}, "outputs": [], "source": [ "workflow[MonitorFilename[SampleRun]] = dream.data.simulated_monitor_diamond_sample()\n", "workflow[MonitorFilename[VanadiumRun]] = dream.data.simulated_monitor_vanadium_sample()\n", "workflow[MonitorFilename[BackgroundRun]] = dream.data.simulated_monitor_empty_can()\n", - "workflow[CaveMonitorPosition] = sc.vector([0.0, 0.0, -4220.0], unit='mm')\n", + "workflow[CaveMonitorPosition] = sc.vector([0.0, 0.0, -4220.0], unit=\"mm\")\n", "\n", "# These are the same as at the top of the notebook:\n", "workflow[Filename[SampleRun]] = dream.data.simulated_diamond_sample()\n", @@ -318,18 +443,19 @@ "workflow[NeXusDetectorName] = \"mantle\"\n", "workflow[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop\n", "workflow[DspacingBins] = sc.linspace(\"dspacing\", 0.0, 2.3434, 201, unit=\"angstrom\")\n", - "workflow[TofMask] = lambda x: (x < sc.scalar(0.0, unit=\"ns\")) | (\n", - " x > sc.scalar(86e6, unit=\"ns\")\n", - ")\n", + "workflow[TofMask] = None\n", "workflow[TwoThetaMask] = None\n", "workflow[WavelengthMask] = None\n", - "workflow = powder.with_pixel_mask_filenames(workflow, [])" + "workflow = powder.with_pixel_mask_filenames(workflow, [])\n", + "\n", + "# Insert tof sub-workflow\n", + "workflow[TofWorkflow] = tof_wf" ] }, { "cell_type": "code", "execution_count": null, - "id": "25", + "id": "28", "metadata": {}, "outputs": [], "source": [ @@ -339,7 +465,7 @@ { "cell_type": "code", "execution_count": null, - "id": "26", + "id": "29", "metadata": {}, "outputs": [], "source": [ @@ -352,7 +478,7 @@ { "cell_type": "code", "execution_count": null, - "id": "27", + "id": "30", "metadata": {}, "outputs": [], "source": [ @@ -361,7 +487,7 @@ }, { "cell_type": "markdown", - "id": "28", + "id": "31", "metadata": {}, "source": [ "Comparing the final, normalized result shows that it agrees with the data that was normalized by proton-charge:" @@ -370,14 +496,11 @@ { "cell_type": "code", "execution_count": null, - "id": "29", + "id": "32", "metadata": {}, "outputs": [], "source": [ - "sc.plot({\n", - " 'By proton charge': histogram,\n", - " 'By monitor': normalized_by_monitor.hist()\n", - "})" + "sc.plot({\"By proton charge\": histogram, \"By monitor\": normalized_by_monitor.hist()})" ] } ], @@ -396,8 +519,7 @@ "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.14" + "pygments_lexer": "ipython3" } }, "nbformat": 4, diff --git a/pyproject.toml b/pyproject.toml index 01430733..aa62b69a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ requires-python = ">=3.10" # Make sure to list one dependency per line. dependencies = [ "dask", - "essreduce>=24.12.0", + "essreduce>=25.01.1", "graphviz", "numpy", "plopp", @@ -40,6 +40,7 @@ dependencies = [ "scipp>=24.09.1", "scippneutron>=24.11.0", "scippnexus>=23.12.0", + "tof>=25.01.2", ] dynamic = ["version"] diff --git a/requirements/base.in b/requirements/base.in index 6e322b76..ab36054a 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -3,7 +3,7 @@ # --- END OF CUSTOM SECTION --- # The following was generated by 'tox -e deps', DO NOT EDIT MANUALLY! dask -essreduce>=24.12.0 +essreduce>=25.01.1 graphviz numpy plopp @@ -12,3 +12,4 @@ sciline>=24.06.0 scipp>=24.09.1 scippneutron>=24.11.0 scippnexus>=23.12.0 +tof>=25.01.2 diff --git a/requirements/base.txt b/requirements/base.txt index 9db8b550..3f25a607 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:1c545a5dfb8b66d23509534f3ddf41f94ead839c +# SHA1:b80603bcaa4893e64f02c102f3aaa7da691745f8 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -7,9 +7,9 @@ # asttokens==3.0.0 # via stack-data -click==8.1.7 +click==8.1.8 # via dask -cloudpickle==3.1.0 +cloudpickle==3.1.1 # via dask comm==0.2.2 # via ipywidgets @@ -19,19 +19,19 @@ cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2024.12.1 +dask==2025.1.0 # via -r base.in decorator==5.1.1 # via ipython -essreduce==24.12.0 +essreduce==25.1.1 # via -r base.in exceptiongroup==1.2.2 # via ipython -executing==2.1.0 +executing==2.2.0 # via stack-data -fonttools==4.55.3 +fonttools==4.55.8 # via matplotlib -fsspec==2024.10.0 +fsspec==2024.12.0 # via dask graphviz==0.20.3 # via -r base.in @@ -39,11 +39,13 @@ h5py==3.12.1 # via # scippneutron # scippnexus -importlib-metadata==8.5.0 +importlib-metadata==8.6.1 # via dask +importlib-resources==6.5.2 + # via tof ipydatawidgets==4.3.5 # via pythreejs -ipython==8.30.0 +ipython==8.31.0 # via ipywidgets ipywidgets==8.1.5 # via @@ -53,7 +55,7 @@ jedi==0.19.2 # via ipython jupyterlab-widgets==3.0.13 # via ipywidgets -kiwisolver==1.4.7 +kiwisolver==1.4.8 # via matplotlib locket==1.0.0 # via partd @@ -67,7 +69,7 @@ mpltoolbox==24.5.1 # via scippneutron networkx==3.4.2 # via cyclebane -numpy==2.2.0 +numpy==2.2.2 # via # -r base.in # contourpy @@ -89,21 +91,22 @@ partd==1.4.2 # via dask pexpect==4.9.0 # via ipython -pillow==11.0.0 +pillow==11.1.0 # via matplotlib plopp==24.10.0 # via # -r base.in # scippneutron -prompt-toolkit==3.0.48 + # tof +prompt-toolkit==3.0.50 # via ipython ptyprocess==0.7.0 # via pexpect pure-eval==0.2.3 # via stack-data -pygments==2.18.0 +pygments==2.19.1 # via ipython -pyparsing==3.2.0 +pyparsing==3.2.1 # via matplotlib python-dateutil==2.9.0.post0 # via @@ -117,13 +120,14 @@ sciline==24.10.0 # via # -r base.in # essreduce -scipp==24.11.2 +scipp==25.1.0 # via # -r base.in # essreduce # scippneutron # scippnexus -scippneutron==24.12.0 + # tof +scippneutron==25.1.0 # via # -r base.in # essreduce @@ -132,14 +136,17 @@ scippnexus==24.11.1 # -r base.in # essreduce # scippneutron -scipy==1.14.1 +scipy==1.15.1 # via # scippneutron # scippnexus + # tof six==1.17.0 # via python-dateutil stack-data==0.6.3 # via ipython +tof==25.1.2 + # via -r base.in toolz==1.0.0 # via # dask diff --git a/requirements/basetest.txt b/requirements/basetest.txt index 906d87e8..86a65be3 100644 --- a/requirements/basetest.txt +++ b/requirements/basetest.txt @@ -5,9 +5,9 @@ # # pip-compile-multi # -certifi==2024.12.14 +certifi==2025.1.31 # via requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests exceptiongroup==1.2.2 # via pytest @@ -15,7 +15,7 @@ idna==3.10 # via requests iniconfig==2.0.0 # via pytest -numpy==2.2.0 +numpy==2.2.2 # via pandas packaging==24.2 # via @@ -33,7 +33,7 @@ pytest==8.3.4 # via -r basetest.in python-dateutil==2.9.0.post0 # via pandas -pytz==2024.2 +pytz==2025.1 # via pandas requests==2.32.3 # via pooch @@ -41,7 +41,7 @@ six==1.17.0 # via python-dateutil tomli==2.2.1 # via pytest -tzdata==2024.2 +tzdata==2025.1 # via pandas -urllib3==2.2.3 +urllib3==2.3.0 # via requests diff --git a/requirements/ci.txt b/requirements/ci.txt index c3ccdde1..10820857 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -5,25 +5,25 @@ # # pip-compile-multi # -cachetools==5.5.0 +cachetools==5.5.1 # via tox -certifi==2024.12.14 +certifi==2025.1.31 # via requests chardet==5.2.0 # via tox -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests colorama==0.4.6 # via tox distlib==0.3.9 # via virtualenv -filelock==3.16.1 +filelock==3.17.0 # via # tox # virtualenv -gitdb==4.0.11 +gitdb==4.0.12 # via gitpython -gitpython==3.1.43 +gitpython==3.1.44 # via -r ci.in idna==3.10 # via requests @@ -38,21 +38,21 @@ platformdirs==4.3.6 # virtualenv pluggy==1.5.0 # via tox -pyproject-api==1.8.0 +pyproject-api==1.9.0 # via tox requests==2.32.3 # via -r ci.in -smmap==5.0.1 +smmap==5.0.2 # via gitdb tomli==2.2.1 # via # pyproject-api # tox -tox==4.23.2 +tox==4.24.1 # via -r ci.in typing-extensions==4.12.2 # via tox -urllib3==2.2.3 +urllib3==2.3.0 # via requests -virtualenv==20.28.0 +virtualenv==20.29.1 # via tox diff --git a/requirements/dev.txt b/requirements/dev.txt index cfc7565c..406ef911 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -14,7 +14,7 @@ -r wheels.txt annotated-types==0.7.0 # via pydantic -anyio==4.7.0 +anyio==4.8.0 # via # httpx # jupyter-server @@ -59,7 +59,7 @@ jupyter-events==0.11.0 # via jupyter-server jupyter-lsp==2.2.5 # via jupyterlab -jupyter-server==2.14.2 +jupyter-server==2.15.0 # via # jupyter-lsp # jupyterlab @@ -67,7 +67,7 @@ jupyter-server==2.14.2 # notebook-shim jupyter-server-terminals==0.5.3 # via jupyter-server -jupyterlab==4.3.4 +jupyterlab==4.3.5 # via -r dev.in jupyterlab-server==2.27.3 # via jupyterlab @@ -87,13 +87,13 @@ prometheus-client==0.21.1 # via jupyter-server pycparser==2.22 # via cffi -pydantic==2.10.4 +pydantic==2.10.6 # via copier pydantic-core==2.27.2 # via pydantic python-json-logger==3.2.1 # via jupyter-events -questionary==1.10.0 +questionary==2.1.0 # via copier rfc3339-validator==0.1.4 # via diff --git a/requirements/docs.txt b/requirements/docs.txt index 94f3f008..0c51816b 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -10,7 +10,7 @@ accessible-pygments==0.0.5 # via pydata-sphinx-theme alabaster==1.0.0 # via sphinx -attrs==24.3.0 +attrs==25.1.0 # via # jsonschema # referencing @@ -22,13 +22,13 @@ beautifulsoup4==4.12.3 # via # nbconvert # pydata-sphinx-theme -bleach==6.2.0 +bleach[css]==6.2.0 # via nbconvert -certifi==2024.12.14 +certifi==2025.1.31 # via requests -charset-normalizer==3.4.0 +charset-normalizer==3.4.1 # via requests -debugpy==1.8.11 +debugpy==1.8.12 # via ipykernel defusedxml==0.7.1 # via nbconvert @@ -48,9 +48,9 @@ imagesize==1.4.1 # via sphinx ipykernel==6.29.5 # via -r docs.in -ipympl==0.9.5 +ipympl==0.9.6 # via -r docs.in -jinja2==3.1.4 +jinja2==3.1.5 # via # myst-parser # nbconvert @@ -87,20 +87,20 @@ mdit-py-plugins==0.4.2 # via myst-parser mdurl==0.1.2 # via markdown-it-py -mistune==3.0.2 +mistune==3.1.1 # via nbconvert myst-parser==4.0.0 # via -r docs.in nbclient==0.10.2 # via nbconvert -nbconvert==7.16.4 +nbconvert==7.16.6 # via nbsphinx nbformat==5.10.4 # via # nbclient # nbconvert # nbsphinx -nbsphinx==0.9.5 +nbsphinx==0.9.6 # via -r docs.in nest-asyncio==1.6.0 # via ipykernel @@ -114,9 +114,9 @@ platformdirs==4.3.6 # pooch pooch==1.8.2 # via -r docs.in -psutil==6.1.0 +psutil==6.1.1 # via ipykernel -pyarrow==18.1.0 +pyarrow==19.0.0 # via -r docs.in pybtex==0.24.0 # via @@ -126,13 +126,13 @@ pybtex-docutils==1.0.3 # via sphinxcontrib-bibtex pydata-sphinx-theme==0.16.1 # via -r docs.in -pytz==2024.2 +pytz==2025.1 # via pandas -pyzmq==26.2.0 +pyzmq==26.2.1 # via # ipykernel # jupyter-client -referencing==0.35.1 +referencing==0.36.2 # via # jsonschema # jsonschema-specifications @@ -158,7 +158,7 @@ sphinx==8.1.3 # sphinx-copybutton # sphinx-design # sphinxcontrib-bibtex -sphinx-autodoc-typehints==2.5.0 +sphinx-autodoc-typehints==3.0.1 # via -r docs.in sphinx-copybutton==0.5.2 # via -r docs.in @@ -179,16 +179,16 @@ sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 # via sphinx tinycss2==1.4.0 - # via nbconvert + # via bleach tomli==2.2.1 # via sphinx tornado==6.4.2 # via # ipykernel # jupyter-client -tzdata==2024.2 +tzdata==2025.1 # via pandas -urllib3==2.2.3 +urllib3==2.3.0 # via requests webencodings==0.5.1 # via diff --git a/requirements/mypy.txt b/requirements/mypy.txt index 0b6fa4cc..d7a49e8a 100644 --- a/requirements/mypy.txt +++ b/requirements/mypy.txt @@ -6,7 +6,7 @@ # pip-compile-multi # -r test.txt -mypy==1.13.0 +mypy==1.14.1 # via -r mypy.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/nightly.in b/requirements/nightly.in index a1ea3a88..b8f445c5 100644 --- a/requirements/nightly.in +++ b/requirements/nightly.in @@ -11,3 +11,4 @@ sciline @ git+https://github.com/scipp/sciline@main plopp @ git+https://github.com/scipp/plopp@main scippneutron @ git+https://github.com/scipp/scippneutron@main essreduce @ git+https://github.com/scipp/essreduce@main +tof @ git+https://github.com/scipp/tof@main diff --git a/requirements/nightly.txt b/requirements/nightly.txt index 6f5fbfdc..644cf9de 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -1,4 +1,4 @@ -# SHA1:14c661e4a5ccde65d56656d46e7d4e7077f040bf +# SHA1:5157f3e05efe327c782ce19e72fe54bd981e706c # # This file is autogenerated by pip-compile-multi # To update, run: @@ -8,9 +8,9 @@ -r basetest.txt asttokens==3.0.0 # via stack-data -click==8.1.7 +click==8.1.8 # via dask -cloudpickle==3.1.0 +cloudpickle==3.1.1 # via dask comm==0.2.2 # via ipywidgets @@ -20,17 +20,17 @@ cyclebane==24.10.0 # via sciline cycler==0.12.1 # via matplotlib -dask==2024.12.1 +dask==2025.1.0 # via -r nightly.in decorator==5.1.1 # via ipython essreduce @ git+https://github.com/scipp/essreduce@main # via -r nightly.in -executing==2.1.0 +executing==2.2.0 # via stack-data -fonttools==4.55.3 +fonttools==4.55.8 # via matplotlib -fsspec==2024.10.0 +fsspec==2024.12.0 # via dask graphviz==0.20.3 # via -r nightly.in @@ -38,11 +38,13 @@ h5py==3.12.1 # via # scippneutron # scippnexus -importlib-metadata==8.5.0 +importlib-metadata==8.6.1 # via dask +importlib-resources==6.5.2 + # via tof ipydatawidgets==4.3.5 # via pythreejs -ipython==8.30.0 +ipython==8.31.0 # via ipywidgets ipywidgets==8.1.5 # via @@ -52,8 +54,10 @@ jedi==0.19.2 # via ipython jupyterlab-widgets==3.0.13 # via ipywidgets -kiwisolver==1.4.7 +kiwisolver==1.4.8 # via matplotlib +lazy-loader==0.4 + # via plopp locket==1.0.0 # via partd matplotlib==3.10.0 @@ -72,21 +76,22 @@ partd==1.4.2 # via dask pexpect==4.9.0 # via ipython -pillow==11.0.0 +pillow==11.1.0 # via matplotlib plopp @ git+https://github.com/scipp/plopp@main # via # -r nightly.in # scippneutron -prompt-toolkit==3.0.48 + # tof +prompt-toolkit==3.0.50 # via ipython ptyprocess==0.7.0 # via pexpect pure-eval==0.2.3 # via stack-data -pygments==2.18.0 +pygments==2.19.1 # via ipython -pyparsing==3.2.0 +pyparsing==3.2.1 # via matplotlib pythreejs==2.4.2 # via -r nightly.in @@ -102,6 +107,7 @@ scipp @ https://github.com/scipp/scipp/releases/download/nightly/scipp-nightly-c # essreduce # scippneutron # scippnexus + # tof scippneutron @ git+https://github.com/scipp/scippneutron@main # via # -r nightly.in @@ -111,12 +117,15 @@ scippnexus @ git+https://github.com/scipp/scippnexus@main # -r nightly.in # essreduce # scippneutron -scipy==1.14.1 +scipy==1.15.1 # via # scippneutron # scippnexus + # tof stack-data==0.6.3 # via ipython +tof @ git+https://github.com/scipp/tof@main + # via -r nightly.in toolz==1.0.0 # via # dask diff --git a/requirements/static.txt b/requirements/static.txt index 1d6ae539..b7eb0124 100644 --- a/requirements/static.txt +++ b/requirements/static.txt @@ -9,17 +9,17 @@ cfgv==3.4.0 # via pre-commit distlib==0.3.9 # via virtualenv -filelock==3.16.1 +filelock==3.17.0 # via virtualenv -identify==2.6.3 +identify==2.6.6 # via pre-commit nodeenv==1.9.1 # via pre-commit platformdirs==4.3.6 # via virtualenv -pre-commit==4.0.1 +pre-commit==4.1.0 # via -r static.in pyyaml==6.0.2 # via pre-commit -virtualenv==20.28.0 +virtualenv==20.29.1 # via pre-commit diff --git a/src/ess/dream/io/geant4.py b/src/ess/dream/io/geant4.py index 67c82930..3c89de83 100644 --- a/src/ess/dream/io/geant4.py +++ b/src/ess/dream/io/geant4.py @@ -4,6 +4,7 @@ import numpy as np import sciline import scipp as sc +import scippneutron as scn import scippnexus as snx from ess.powder.types import ( @@ -85,14 +86,14 @@ def get_calibrated_geant4_detector( Since the Geant4 detectors already have computed positions as well as logical shape, this just extracts the relevant event data. """ - return detector['events'].copy(deep=False) + return detector["events"].copy(deep=False) def _load_raw_events(file_path: str) -> sc.DataArray: table = sc.io.load_csv( file_path, sep="\t", header_parser="bracket", data_columns=[] ) - table.coords['sumo'] = table.coords['det ID'] + table.coords["sumo"] = table.coords["det ID"] table.coords.pop("lambda", None) table = table.rename_dims(row="event") return sc.DataArray( @@ -119,7 +120,7 @@ def group(key: str, da: sc.DataArray) -> sc.DataArray: res = da.group("sumo", *elements) else: res = da.group(*elements) - res.coords['position'] = res.bins.coords.pop('position').bins.mean() + res.coords["position"] = res.bins.coords.pop("position").bins.mean() res.bins.coords.pop("sector", None) res.bins.coords.pop("sumo", None) return res @@ -243,18 +244,63 @@ def geant4_load_calibration(filename: CalibrationFilename) -> CalibrationData: return CalibrationData(None) -def dummy_assemble_detector_data( +def assemble_detector_data( detector: CalibratedBeamline[RunType], ) -> DetectorData[RunType]: - """Dummy assembly of detector data, detector already contains neutron data.""" - return DetectorData[RunType](detector) + """ + In the raw data, the tofs extend beyond 71ms, this is thus not an event_time_offset. + We convert the detector data to data which resembles NeXus data, with + event_time_zero and event_time_offset coordinates. + + Parameters + ---------- + detector: + The calibrated detector data. + """ + + da = detector.copy(deep=False) + da.bins.coords["tof"] = da.bins.coords["tof"].to(unit="us") + + period = (1.0 / sc.scalar(14.0, unit="Hz")).to(unit="us") + # Bin the data into bins with a 71ms period. + npulses = int((da.bins.coords["tof"].max() / period).value) + da = da.bin(tof=sc.arange("tof", npulses + 1) * period) + # Add a event_time_zero coord for each bin, but not as bin edges, + # as all events in the same pulse have the same event_time_zero, hence the `[:2]` + # We need to pick a start time. The actual value does not matter. We chose the + # random date of Friday, November 1, 2024 8:40:34.078 + da.coords["event_time_zero"] = ( + sc.scalar(1730450434078980000, unit="ns").to(unit="us") + da.coords["tof"] + )[:npulses] + # Remove the meaningless tof coord at the top level + del da.coords["tof"] + da = da.rename_dims(tof="event_time_zero") + # Compute a event_time_offset as tof % period + da.bins.coords["event_time_offset"] = (da.bins.coords.pop("tof") % period).to( + unit="us" + ) + # Add a useful Ltotal coordinate + graph = scn.conversion.graph.beamline.beamline(scatter=True) + da = da.transform_coords("Ltotal", graph=graph) + return DetectorData[RunType](da) -def dummy_assemble_monitor_data( +def assemble_monitor_data( monitor: CalibratedMonitor[RunType, MonitorType], ) -> MonitorData[RunType, MonitorType]: - """Dummy assembly of monitor data, monitor already contains neutron data.""" - return MonitorData[RunType, MonitorType](monitor) + """ + Dummy assembly of monitor data, monitor already contains neutron data. + We simply add a Ltotal coordinate necessary to calculate the time-of-flight. + + Parameters + ---------- + monitor: + The calibrated monitor data. + """ + graph = scn.conversion.graph.beamline.beamline(scatter=False) + return MonitorData[RunType, MonitorType]( + monitor.transform_coords("Ltotal", graph=graph) + ) def dummy_source_position() -> Position[snx.NXsource, RunType]: @@ -281,8 +327,8 @@ def LoadGeant4Workflow() -> sciline.Pipeline: wf.insert(load_mcstas_monitor) wf.insert(geant4_load_calibration) wf.insert(get_calibrated_geant4_detector) - wf.insert(dummy_assemble_detector_data) - wf.insert(dummy_assemble_monitor_data) + wf.insert(assemble_detector_data) + wf.insert(assemble_monitor_data) wf.insert(dummy_source_position) wf.insert(dummy_sample_position) return wf diff --git a/src/ess/powder/__init__.py b/src/ess/powder/__init__.py index 42b372b9..f7e314a3 100644 --- a/src/ess/powder/__init__.py +++ b/src/ess/powder/__init__.py @@ -45,8 +45,8 @@ "filtering", "grouping", "masking", - "transform", "providers", "smoothing", + "transform", "with_pixel_mask_filenames", ] diff --git a/src/ess/powder/conversion.py b/src/ess/powder/conversion.py index ed16f8e0..0b324aca 100644 --- a/src/ess/powder/conversion.py +++ b/src/ess/powder/conversion.py @@ -4,15 +4,19 @@ Coordinate transformations for powder diffraction. """ +import numpy as np import scipp as sc import scippneutron as scn +from ess.reduce import time_of_flight + from .calibration import OutputCalibrationData from .correction import merge_calibration from .logging import get_logger from .types import ( CalibrationData, DataWithScatteringCoordinates, + DetectorData, DspacingData, ElasticCoordTransformGraph, FilteredData, @@ -22,6 +26,9 @@ MonitorData, MonitorType, RunType, + TofData, + TofMonitorData, + TofWorkflow, WavelengthMonitor, ) @@ -213,11 +220,11 @@ def convert_to_dspacing( out = data.transform_coords(["dspacing"], graph=graph, keep_intermediate=False) else: out = to_dspacing_with_calibration(data, calibration=calibration) - for key in ('wavelength', 'two_theta'): + for key in ("wavelength", "two_theta"): if key in out.coords.keys(): out.coords.set_aligned(key, False) - out.bins.coords.pop('tof', None) - out.bins.coords.pop('wavelength', None) + out.bins.coords.pop("tof", None) + out.bins.coords.pop("wavelength", None) return DspacingData[RunType](out) @@ -229,8 +236,28 @@ def convert_reduced_to_tof( ) -def convert_monitor_do_wavelength( - monitor: MonitorData[RunType, MonitorType], +def compute_detector_time_of_flight( + detector_data: DetectorData[RunType], tof_workflow: TofWorkflow +) -> TofData[RunType]: + wf = tof_workflow.pipeline.copy() + wf[time_of_flight.RawData] = detector_data + return TofData[RunType](wf.compute(time_of_flight.TofData)) + + +def compute_monitor_time_of_flight( + monitor: MonitorData[RunType, MonitorType], tof_workflow: TofWorkflow +) -> TofMonitorData[RunType, MonitorType]: + wf = tof_workflow.pipeline.copy() + wf.insert(time_of_flight.resample_tof_data) + wf[time_of_flight.RawData] = monitor + out = wf.compute(time_of_flight.ResampledTofData) + inds = out.values == 0.0 + out.values[inds] = np.nan + return TofMonitorData[RunType, MonitorType](out) + + +def convert_monitor_to_wavelength( + monitor: TofMonitorData[RunType, MonitorType], ) -> WavelengthMonitor[RunType, MonitorType]: graph = { **scn.conversion.graph.beamline.beamline(scatter=False), @@ -246,5 +273,7 @@ def convert_monitor_do_wavelength( add_scattering_coordinates_from_positions, convert_to_dspacing, convert_reduced_to_tof, - convert_monitor_do_wavelength, + convert_monitor_to_wavelength, + compute_detector_time_of_flight, + compute_monitor_time_of_flight, ) diff --git a/src/ess/powder/correction.py b/src/ess/powder/correction.py index 7f46e8c1..0a7c37c1 100644 --- a/src/ess/powder/correction.py +++ b/src/ess/powder/correction.py @@ -55,7 +55,7 @@ def normalize_by_monitor_histogram( norm = broadcast_uncertainties( monitor, prototype=detector, mode=uncertainty_broadcast_mode ) - return detector.bins / sc.lookup(norm, dim="wavelength") + return NormalizedRunData[RunType](detector.bins / sc.lookup(norm, dim="wavelength")) def normalize_by_monitor_integrated( @@ -104,8 +104,8 @@ def normalize_by_monitor_integrated( det_coord = ( detector.coords[dim] if dim in detector.coords else detector.bins.coords[dim] ) - lo = det_coord.min() - hi = det_coord.max() + lo = det_coord.nanmin() + hi = det_coord.nanmax() monitor = monitor[dim, lo:hi] # Strictly limit `monitor` to the range of `detector`. edges = sc.concat([lo, monitor.coords[dim][1:-1], hi], dim=dim) diff --git a/src/ess/powder/filtering.py b/src/ess/powder/filtering.py index 4c7b0719..7ab27e8a 100644 --- a/src/ess/powder/filtering.py +++ b/src/ess/powder/filtering.py @@ -12,7 +12,7 @@ import scipp as sc -from .types import DetectorData, FilteredData, RunType +from .types import FilteredData, RunType, TofData def _equivalent_bin_indices(a, b) -> bool: @@ -72,7 +72,7 @@ def remove_bad_pulses( return filtered -def filter_events(data: DetectorData[RunType]) -> FilteredData[RunType]: +def filter_events(data: TofData[RunType]) -> FilteredData[RunType]: """Remove bad events. Attention diff --git a/src/ess/powder/types.py b/src/ess/powder/types.py index 0132b818..2a85a76e 100644 --- a/src/ess/powder/types.py +++ b/src/ess/powder/types.py @@ -16,6 +16,7 @@ from scippneutron.io import cif from ess.reduce.nexus import types as reduce_t +from ess.reduce.time_of_flight import TofWorkflow as _TofWorkflow from ess.reduce.uncertainty import UncertaintyBroadcastMode as _UncertaintyBroadcastMode # 1 TypeVars used to parametrize the generic parts of the workflow @@ -90,6 +91,10 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: """Detector calibration data.""" +class TofData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Data with time-of-flight coordinate.""" + + class DataWithScatteringCoordinates(sciline.Scope[RunType, sc.DataArray], sc.DataArray): """Data with scattering coordinates computed for all events: wavelength, 2theta, d-spacing.""" @@ -182,10 +187,21 @@ class RawDataAndMetadata(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup): """WavelengthMask is a callable that returns a mask for a given WavelengthData.""" -CIFAuthors = NewType('CIFAuthors', list[cif.Author]) +CIFAuthors = NewType("CIFAuthors", list[cif.Author]) """List of authors to save to output CIF files.""" -ReducedTofCIF = NewType('ReducedTofCIF', cif.CIF) +ReducedTofCIF = NewType("ReducedTofCIF", cif.CIF) """Reduced data in time-of-flight, ready to be saved to a CIF file.""" + +TofWorkflow = _TofWorkflow +"""Workflow for computing time-of-flight data.""" + + +class TofMonitorData( + sciline.ScopeTwoParams[RunType, MonitorType, sc.DataArray], sc.DataArray +): + """Monitor data with time-of-flight coordinate.""" + + del sc, sciline, NewType, TypeVar diff --git a/src/ess/snspowder/powgen/data.py b/src/ess/snspowder/powgen/data.py index 6ba18112..4d433fdf 100644 --- a/src/ess/snspowder/powgen/data.py +++ b/src/ess/snspowder/powgen/data.py @@ -10,11 +10,11 @@ CalibrationData, CalibrationFilename, DetectorBankSizes, - DetectorData, Filename, ProtonCharge, RawDataAndMetadata, RunType, + TofData, ) _version = "1" @@ -120,14 +120,14 @@ def pooch_load_calibration( def extract_raw_data( dg: RawDataAndMetadata[RunType], sizes: DetectorBankSizes -) -> DetectorData[RunType]: +) -> TofData[RunType]: """Return the events from a loaded data group.""" # Remove the tof binning and dimension, as it is not needed and it gets in the way # of masking. out = dg["data"].squeeze() out.coords.pop("tof", None) out = out.fold(dim="spectrum", sizes=sizes) - return DetectorData[RunType](out) + return TofData[RunType](out) def extract_proton_charge(dg: RawDataAndMetadata[RunType]) -> ProtonCharge[RunType]: @@ -136,7 +136,7 @@ def extract_proton_charge(dg: RawDataAndMetadata[RunType]) -> ProtonCharge[RunTy def extract_accumulated_proton_charge( - data: DetectorData[RunType], + data: TofData[RunType], ) -> AccumulatedProtonCharge[RunType]: """Return the stored accumulated proton charge from a loaded data group.""" return AccumulatedProtonCharge[RunType](data.coords["gd_prtn_chrg"]) diff --git a/tests/dream/geant4_reduction_test.py b/tests/dream/geant4_reduction_test.py index 6fb721fa..3b30d5be 100644 --- a/tests/dream/geant4_reduction_test.py +++ b/tests/dream/geant4_reduction_test.py @@ -8,6 +8,7 @@ import scipp as sc import scipp.testing import scippnexus as snx +from scippneutron.chopper import DiskChopper from scippneutron.io.cif import Author import ess.dream.data # noqa: F401 @@ -37,12 +38,14 @@ ReducedTofCIF, SampleRun, TofMask, + TofWorkflow, TwoThetaBins, TwoThetaMask, UncertaintyBroadcastMode, VanadiumRun, WavelengthMask, ) +from ess.reduce import time_of_flight from ess.reduce import workflow as reduce_workflow sample = sc.vector([0.0, 0.0, 0.0], unit='mm') @@ -59,8 +62,8 @@ CalibrationFilename: None, UncertaintyBroadcastMode: UncertaintyBroadcastMode.drop, DspacingBins: sc.linspace('dspacing', 0.0, 2.3434, 201, unit='angstrom'), - TofMask: lambda x: (x < sc.scalar(0.0, unit='ns')) - | (x > sc.scalar(86e6, unit='ns')), + TofMask: lambda x: (x < sc.scalar(0.0, unit='us')) + | (x > sc.scalar(86e3, unit='us')), Position[snx.NXsample, SampleRun]: sample, Position[snx.NXsample, VanadiumRun]: sample, Position[snx.NXsource, SampleRun]: source, @@ -80,6 +83,98 @@ } +def dream_choppers(): + psc1 = DiskChopper( + frequency=sc.scalar(14.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(286 - 180, unit="deg"), + axle_position=sc.vector(value=[0, 0, 6.145], unit="m"), + slit_begin=sc.array( + dims=["cutout"], + values=[-1.23, 70.49, 84.765, 113.565, 170.29, 271.635, 286.035, 301.17], + unit="deg", + ), + slit_end=sc.array( + dims=["cutout"], + values=[1.23, 73.51, 88.035, 116.835, 175.31, 275.565, 289.965, 303.63], + unit="deg", + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ) + + psc2 = DiskChopper( + frequency=sc.scalar(-14.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(-236, unit="deg"), + axle_position=sc.vector(value=[0, 0, 6.155], unit="m"), + slit_begin=sc.array( + dims=["cutout"], + values=[-1.23, 27.0, 55.8, 142.385, 156.765, 214.115, 257.23, 315.49], + unit="deg", + ), + slit_end=sc.array( + dims=["cutout"], + values=[1.23, 30.6, 59.4, 145.615, 160.035, 217.885, 261.17, 318.11], + unit="deg", + ), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ) + + oc = DiskChopper( + frequency=sc.scalar(14.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(297 - 180 - 90, unit="deg"), + axle_position=sc.vector(value=[0, 0, 6.174], unit="m"), + slit_begin=sc.array(dims=["cutout"], values=[-27.6 * 0.5], unit="deg"), + slit_end=sc.array(dims=["cutout"], values=[27.6 * 0.5], unit="deg"), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ) + + bcc = DiskChopper( + frequency=sc.scalar(112.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(215 - 180, unit="deg"), + # phase=sc.scalar(240 - 180, unit="deg"), + axle_position=sc.vector(value=[0, 0, 9.78], unit="m"), + slit_begin=sc.array(dims=["cutout"], values=[-36.875, 143.125], unit="deg"), + slit_end=sc.array(dims=["cutout"], values=[36.875, 216.875], unit="deg"), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ) + + t0 = DiskChopper( + frequency=sc.scalar(28.0, unit="Hz"), + beam_position=sc.scalar(0.0, unit="deg"), + phase=sc.scalar(280 - 180, unit="deg"), + axle_position=sc.vector(value=[0, 0, 13.05], unit="m"), + slit_begin=sc.array(dims=["cutout"], values=[-314.9 * 0.5], unit="deg"), + slit_end=sc.array(dims=["cutout"], values=[314.9 * 0.5], unit="deg"), + slit_height=sc.scalar(10.0, unit="cm"), + radius=sc.scalar(30.0, unit="cm"), + ) + + return {"psc1": psc1, "psc2": psc2, "oc": oc, "bcc": bcc, "t0": t0} + + +@pytest.fixture(scope="module") +def tof_workflow(): + tof_wf = time_of_flight.TofWorkflow( + simulated_neutrons=time_of_flight.simulate_beamline( + choppers=dream_choppers(), + neutrons=500_000, + seed=123, + ), + ltotal_range=(sc.scalar(70.0, unit='m'), sc.scalar(80.0, unit='m')), + error_threshold=1.0, + ) + # Save expensive steps from being re-computed many times + tof_wf.cache_results() + return tof_wf + + @pytest.fixture(params=["mantle", "endcap_backward", "endcap_forward"]) def params_for_det(request): # Not available in simulated data @@ -98,37 +193,43 @@ def make_workflow(params_for_det, *, run_norm): return wf -def test_pipeline_can_compute_dspacing_result(workflow): +def test_pipeline_can_compute_dspacing_result(workflow, tof_workflow): workflow = powder.with_pixel_mask_filenames(workflow, []) + workflow[TofWorkflow] = tof_workflow result = workflow.compute(IofDspacing) assert result.sizes == {'dspacing': len(params[DspacingBins]) - 1} assert sc.identical(result.coords['dspacing'], params[DspacingBins]) -def test_pipeline_can_compute_dspacing_result_with_hist_monitor_norm(params_for_det): +def test_pipeline_can_compute_dspacing_result_with_hist_monitor_norm( + params_for_det, tof_workflow +): workflow = make_workflow( params_for_det, run_norm=powder.RunNormalization.monitor_histogram ) workflow = powder.with_pixel_mask_filenames(workflow, []) + workflow[TofWorkflow] = tof_workflow result = workflow.compute(IofDspacing) assert result.sizes == {'dspacing': len(params[DspacingBins]) - 1} assert sc.identical(result.coords['dspacing'], params[DspacingBins]) def test_pipeline_can_compute_dspacing_result_with_integrated_monitor_norm( - params_for_det, + params_for_det, tof_workflow ): workflow = make_workflow( params_for_det, run_norm=powder.RunNormalization.monitor_integrated ) workflow = powder.with_pixel_mask_filenames(workflow, []) + workflow[TofWorkflow] = tof_workflow result = workflow.compute(IofDspacing) assert result.sizes == {'dspacing': len(params[DspacingBins]) - 1} assert sc.identical(result.coords['dspacing'], params[DspacingBins]) -def test_workflow_is_deterministic(workflow): +def test_workflow_is_deterministic(workflow, tof_workflow): workflow = powder.with_pixel_mask_filenames(workflow, []) + workflow[TofWorkflow] = tof_workflow # This is Sciline's default scheduler, but we want to be explicit here scheduler = sciline.scheduler.DaskScheduler() graph = workflow.get(IofTof, scheduler=scheduler) @@ -137,8 +238,9 @@ def test_workflow_is_deterministic(workflow): assert sc.identical(sc.values(result), sc.values(reference)) -def test_pipeline_can_compute_intermediate_results(workflow): +def test_pipeline_can_compute_intermediate_results(workflow, tof_workflow): workflow = powder.with_pixel_mask_filenames(workflow, []) + workflow[TofWorkflow] = tof_workflow results = workflow.compute((NormalizedRunData[SampleRun], NeXusDetectorName)) result = results[NormalizedRunData[SampleRun]] @@ -147,25 +249,25 @@ def test_pipeline_can_compute_intermediate_results(workflow): if detector_name in ('endcap_backward', 'endcap_forward'): expected_dims.add('sumo') - assert set(result.dims) == expected_dims + assert expected_dims.issubset(set(result.dims)) -def test_pipeline_group_by_two_theta(workflow): +def test_pipeline_group_by_two_theta(workflow, tof_workflow): + workflow[TofWorkflow] = tof_workflow two_theta_bins = sc.linspace( dim='two_theta', unit='rad', start=0.8, stop=2.4, num=17 ) workflow[TwoThetaBins] = two_theta_bins workflow = powder.with_pixel_mask_filenames(workflow, []) result = workflow.compute(IofDspacingTwoTheta) - assert result.sizes == { - 'two_theta': 16, - 'dspacing': len(params[DspacingBins]) - 1, - } + assert result.sizes['two_theta'] == 16 + assert result.sizes['dspacing'] == len(params[DspacingBins]) - 1 assert sc.identical(result.coords['dspacing'], params[DspacingBins]) assert sc.allclose(result.coords['two_theta'], two_theta_bins) -def test_pipeline_wavelength_masking(workflow): +def test_pipeline_wavelength_masking(workflow, tof_workflow): + workflow[TofWorkflow] = tof_workflow wmin = sc.scalar(0.18, unit="angstrom") wmax = sc.scalar(0.21, unit="angstrom") workflow[WavelengthMask] = lambda x: (x > wmin) & (x < wmax) @@ -183,7 +285,8 @@ def test_pipeline_wavelength_masking(workflow): ) -def test_pipeline_two_theta_masking(workflow): +def test_pipeline_two_theta_masking(workflow, tof_workflow): + workflow[TofWorkflow] = tof_workflow tmin = sc.scalar(1.0, unit="rad") tmax = sc.scalar(1.2, unit="rad") workflow[TwoThetaMask] = lambda x: (x > tmin) & (x < tmax) @@ -199,14 +302,8 @@ def test_pipeline_two_theta_masking(workflow): ) -def test_use_workflow_helper(workflow): - workflow = powder.with_pixel_mask_filenames(workflow, []) - result = workflow.compute(IofDspacing) - assert result.sizes == {'dspacing': len(params[DspacingBins]) - 1} - assert sc.identical(result.coords['dspacing'], params[DspacingBins]) - - -def test_pipeline_can_save_data(workflow): +def test_pipeline_can_save_data(workflow, tof_workflow): + workflow[TofWorkflow] = tof_workflow workflow = powder.with_pixel_mask_filenames(workflow, []) result = workflow.compute(ReducedTofCIF) diff --git a/tox.ini b/tox.ini index c933dc84..64468128 100644 --- a/tox.ini +++ b/tox.ini @@ -67,5 +67,5 @@ deps = tomli skip_install = true changedir = requirements -commands = python ./make_base.py --nightly scipp,scippnexus,sciline,plopp,scippneutron,essreduce +commands = python ./make_base.py --nightly scipp,scippnexus,sciline,plopp,scippneutron,essreduce,tof pip-compile-multi -d . --backtracking