diff --git a/conda/meta.yaml b/conda/meta.yaml index 3d49709..f62f25e 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -12,6 +12,7 @@ requirements: - setuptools_scm run: - dask + - python-dateutil - graphviz - plopp - pythreejs diff --git a/docs/api-reference/index.md b/docs/api-reference/index.md index 96c6889..0e4f20b 100644 --- a/docs/api-reference/index.md +++ b/docs/api-reference/index.md @@ -29,7 +29,8 @@ :template: module-template.rst :recursive: - types amor + orso supermirror + types ``` diff --git a/docs/examples/amor.ipynb b/docs/examples/amor.ipynb index a7697b3..5c8a28c 100644 --- a/docs/examples/amor.ipynb +++ b/docs/examples/amor.ipynb @@ -29,7 +29,7 @@ "metadata": {}, "outputs": [], "source": [ - "params={\n", + "params = {\n", " **default_parameters,\n", " QBins: sc.geomspace(dim='Q', start=0.008, stop=0.075, num=200, unit='1/angstrom'),\n", " SampleRotation[Sample]: sc.scalar(0.7989, unit='deg'),\n", @@ -118,6 +118,150 @@ "source": [ "This plot can be used to check if the value of the sample rotation angle $\\omega$ is correct. The bright triangles should be pointing back to the origin $\\lambda = \\theta = 0$." ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Save data\n", + "\n", + "We can save the computed $I(Q)$ to an [ORSO](https://www.reflectometry.org) [.ort](https://github.com/reflectivity/file_format/blob/master/specification.md) file using the [orsopy](https://orsopy.readthedocs.io/en/latest/index.html) package.\n", + "\n", + "First, we need to collect the metadata for that file.\n", + "To this end, we build a pipeline with additional providers.\n", + "We also insert a parameter to indicate the creator of the processed data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from essreflectometry import orso\n", + "from essreflectometry.amor import orso as amor_orso\n", + "from orsopy import fileio" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "providers_with_metadata = (\n", + " *providers,\n", + " *orso.providers,\n", + " *amor_orso.providers,\n", + ")\n", + "\n", + "params[orso.OrsoCreator] = orso.OrsoCreator(fileio.base.Person(\n", + " name='Max Mustermann',\n", + " affiliation='European Spallation Source ERIC',\n", + " contact='max.mustermann@ess.eu',\n", + "))\n", + "\n", + "metadata_pipeline = sciline.Pipeline(\n", + " providers_with_metadata,\n", + " params=params\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Then, we recompute $I(Q)$ and and combine it with the ORSO metadata:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "iofq_dataset = metadata_pipeline.compute(orso.OrsoIofQDataset)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Unfortunately, some metadata could not be determined autoamtically.\n", + "In particular, we need to specify the sample manually:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "iofq_dataset.info.data_source.sample" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "iofq_dataset.info.data_source.sample = fileio.data_source.Sample(\n", + " name='Ni / Ti Multilayer',\n", + " model=fileio.data_source.SampleModel(\n", + " stack='air | (Ni | Ti) * 5 | Si',\n", + " ),\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And we also add the URL of this notebook to make it easier to reproduce the data:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "iofq_dataset.info.reduction.script = 'https://scipp.github.io/essreflectometry/examples/amor.html'" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we can save the data to a file.\n", + "Note that `iofq_dataset` is an [orsopy.fileio.orso.OrsoDataset](https://orsopy.readthedocs.io/en/latest/orsopy.fileio.orso.html#orsopy.fileio.orso.OrsoDataset)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "iofq_dataset.save('amor_reduced_iofq.ort')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Look at the first 50 lines of the file to inspect the metadata:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!head amor_reduced_iofq.ort -n50" + ] } ], "metadata": { @@ -136,7 +280,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.10.13" } }, "nbformat": 4, diff --git a/pyproject.toml b/pyproject.toml index 8c1fbbc..8f3b407 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,7 @@ requires-python = ">=3.10" # Make sure to list one dependency per line. dependencies = [ "dask", + "python-dateutil", "graphviz", "plopp", "pythreejs", diff --git a/requirements/base.in b/requirements/base.in index d2212ef..65421de 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -6,6 +6,7 @@ ipython==8.9.0 # --- END OF CUSTOM SECTION --- # The following was generated by 'tox -e deps', DO NOT EDIT MANUALLY! dask +python-dateutil graphviz plopp pythreejs diff --git a/requirements/base.txt b/requirements/base.txt index f17c685..8294d51 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:301651c34ea6fe2705e74c10ff13dedb897c14ff +# SHA1:df5a1bd38eaeab3b3099d7d737171e2192615a39 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -9,7 +9,7 @@ asttokens==2.4.1 # via stack-data backcall==0.2.0 # via ipython -certifi==2023.11.17 +certifi==2024.2.2 # via requests charset-normalizer==3.3.2 # via requests @@ -23,13 +23,13 @@ contourpy==1.2.0 # via matplotlib cycler==0.12.1 # via matplotlib -dask==2023.12.1 +dask==2024.1.1 # via -r base.in decorator==5.1.1 # via ipython executing==2.0.1 # via stack-data -fonttools==4.47.0 +fonttools==4.47.2 # via matplotlib fsspec==2023.12.2 # via dask @@ -92,9 +92,9 @@ pickleshare==0.7.5 # via ipython pillow==10.2.0 # via matplotlib -platformdirs==4.1.0 +platformdirs==4.2.0 # via pooch -plopp==23.11.0 +plopp==24.1.1 # via -r base.in pooch==1.8.0 # via scippneutron @@ -112,6 +112,7 @@ pyparsing==3.1.1 # via matplotlib python-dateutil==2.8.2 # via + # -r base.in # matplotlib # scippnexus pythreejs==2.4.2 @@ -122,18 +123,18 @@ pyyaml==6.0.1 # orsopy requests==2.31.0 # via pooch -sciline==24.1.0 +sciline==24.1.1 # via -r base.in scipp==23.12.0 # via # -r base.in # scippneutron # scippnexus -scippneutron==23.11.0 +scippneutron==24.1.0 # via -r base.in -scippnexus==23.12.0 +scippnexus==23.12.1 # via scippneutron -scipy==1.11.4 +scipy==1.12.0 # via # scippneutron # scippnexus @@ -143,7 +144,7 @@ six==1.16.0 # python-dateutil stack-data==0.6.3 # via ipython -toolz==0.12.0 +toolz==0.12.1 # via # dask # partd @@ -157,9 +158,9 @@ traitlets==5.14.1 # traittypes traittypes==0.2.1 # via ipydatawidgets -urllib3==2.1.0 +urllib3==2.2.0 # via requests -wcwidth==0.2.12 +wcwidth==0.2.13 # via prompt-toolkit widgetsnbextension==4.0.9 # via ipywidgets diff --git a/requirements/basetest.txt b/requirements/basetest.txt index c2562f8..3a058ef 100644 --- a/requirements/basetest.txt +++ b/requirements/basetest.txt @@ -11,9 +11,9 @@ iniconfig==2.0.0 # via pytest packaging==23.2 # via pytest -pluggy==1.3.0 +pluggy==1.4.0 # via pytest -pytest==7.4.4 +pytest==8.0.0 # via -r basetest.in tomli==2.0.1 # via pytest diff --git a/requirements/ci.txt b/requirements/ci.txt index e33fbbc..d6f215e 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -7,7 +7,7 @@ # cachetools==5.3.2 # via tox -certifi==2023.11.17 +certifi==2024.2.2 # via requests chardet==5.2.0 # via tox @@ -23,7 +23,7 @@ filelock==3.13.1 # virtualenv gitdb==4.0.11 # via gitpython -gitpython==3.1.40 +gitpython==3.1.41 # via -r ci.in idna==3.6 # via requests @@ -32,11 +32,11 @@ packaging==23.2 # -r ci.in # pyproject-api # tox -platformdirs==4.1.0 +platformdirs==4.2.0 # via # tox # virtualenv -pluggy==1.3.0 +pluggy==1.4.0 # via tox pyproject-api==1.6.1 # via tox @@ -48,9 +48,9 @@ tomli==2.0.1 # via # pyproject-api # tox -tox==4.11.4 +tox==4.12.1 # via -r ci.in -urllib3==2.1.0 +urllib3==2.2.0 # via requests virtualenv==20.25.0 # via tox diff --git a/requirements/dev.txt b/requirements/dev.txt index dc88310..14741b5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,7 +26,7 @@ async-lru==2.0.4 # via jupyterlab cffi==1.16.0 # via argon2-cffi-bindings -copier==9.1.0 +copier==9.1.1 # via -r dev.in dunamai==1.19.0 # via copier @@ -42,30 +42,30 @@ json5==0.9.14 # via jupyterlab-server jsonpointer==2.4 # via jsonschema -jsonschema[format-nongpl]==4.20.0 +jsonschema[format-nongpl]==4.21.1 # via # jupyter-events # jupyterlab-server # nbformat jupyter-events==0.9.0 # via jupyter-server -jupyter-lsp==2.2.1 +jupyter-lsp==2.2.2 # via jupyterlab -jupyter-server==2.12.1 +jupyter-server==2.12.5 # via # jupyter-lsp # jupyterlab # jupyterlab-server # notebook-shim -jupyter-server-terminals==0.5.1 +jupyter-server-terminals==0.5.2 # via jupyter-server -jupyterlab==4.0.10 +jupyterlab==4.0.12 # via -r dev.in jupyterlab-server==2.25.2 # via jupyterlab notebook-shim==0.2.3 # via jupyterlab -overrides==7.4.0 +overrides==7.7.0 # via jupyter-server pathspec==0.12.1 # via copier @@ -79,9 +79,9 @@ prometheus-client==0.19.0 # via jupyter-server pycparser==2.21 # via cffi -pydantic==2.5.3 +pydantic==2.6.0 # via copier -pydantic-core==2.14.6 +pydantic-core==2.16.1 # via pydantic python-json-logger==2.0.7 # via jupyter-events @@ -107,7 +107,7 @@ terminado==0.18.0 # jupyter-server-terminals toposort==1.10 # via pip-compile-multi -types-python-dateutil==2.8.19.14 +types-python-dateutil==2.8.19.20240106 # via arrow uri-template==1.3.0 # via jsonschema diff --git a/requirements/docs.txt b/requirements/docs.txt index 9c5c7b3..0bac14b 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -8,7 +8,7 @@ -r base.txt accessible-pygments==0.0.4 # via pydata-sphinx-theme -alabaster==0.7.13 +alabaster==0.7.16 # via sphinx attrs==23.2.0 # via @@ -18,7 +18,7 @@ babel==2.14.0 # via # pydata-sphinx-theme # sphinx -beautifulsoup4==4.12.2 +beautifulsoup4==4.12.3 # via # nbconvert # pydata-sphinx-theme @@ -38,15 +38,15 @@ fastjsonschema==2.19.1 # via nbformat imagesize==1.4.1 # via sphinx -ipykernel==6.28.0 +ipykernel==6.29.0 # via -r docs.in -jinja2==3.1.2 +jinja2==3.1.3 # via # myst-parser # nbconvert # nbsphinx # sphinx -jsonschema==4.20.0 +jsonschema==4.21.1 # via nbformat jsonschema-specifications==2023.12.1 # via jsonschema @@ -54,7 +54,7 @@ jupyter-client==8.6.0 # via # ipykernel # nbclient -jupyter-core==5.6.1 +jupyter-core==5.7.1 # via # ipykernel # jupyter-client @@ -67,7 +67,7 @@ markdown-it-py==3.0.0 # via # mdit-py-plugins # myst-parser -markupsafe==2.1.3 +markupsafe==2.1.4 # via # jinja2 # nbconvert @@ -81,7 +81,7 @@ myst-parser==2.0.0 # via -r docs.in nbclient==0.9.0 # via nbconvert -nbconvert==7.14.0 +nbconvert==7.14.2 # via nbsphinx nbformat==5.9.2 # via @@ -90,23 +90,23 @@ nbformat==5.9.2 # nbsphinx nbsphinx==0.9.3 # via -r docs.in -nest-asyncio==1.5.8 +nest-asyncio==1.6.0 # via ipykernel -pandocfilters==1.5.0 +pandocfilters==1.5.1 # via nbconvert -psutil==5.9.7 +psutil==5.9.8 # via ipykernel -pydata-sphinx-theme==0.14.4 +pydata-sphinx-theme==0.15.2 # via -r docs.in pyzmq==25.1.2 # via # ipykernel # jupyter-client -referencing==0.32.0 +referencing==0.33.0 # via # jsonschema # jsonschema-specifications -rpds-py==0.16.2 +rpds-py==0.17.1 # via # jsonschema # referencing @@ -123,28 +123,23 @@ sphinx==7.2.6 # sphinx-autodoc-typehints # sphinx-copybutton # sphinx-design - # sphinxcontrib-applehelp - # sphinxcontrib-devhelp - # sphinxcontrib-htmlhelp - # sphinxcontrib-qthelp - # sphinxcontrib-serializinghtml -sphinx-autodoc-typehints==1.25.2 +sphinx-autodoc-typehints==1.25.3 # via -r docs.in sphinx-copybutton==0.5.2 # via -r docs.in sphinx-design==0.5.0 # via -r docs.in -sphinxcontrib-applehelp==1.0.7 +sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-devhelp==1.0.5 +sphinxcontrib-devhelp==1.0.6 # via sphinx -sphinxcontrib-htmlhelp==2.0.4 +sphinxcontrib-htmlhelp==2.0.5 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.6 +sphinxcontrib-qthelp==1.0.7 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 +sphinxcontrib-serializinghtml==1.1.10 # via sphinx tinycss2==1.2.1 # via nbconvert diff --git a/requirements/nightly.in b/requirements/nightly.in index df92035..abd07ec 100644 --- a/requirements/nightly.in +++ b/requirements/nightly.in @@ -2,6 +2,7 @@ # --- END OF CUSTOM SECTION --- # The following was generated by 'tox -e deps', DO NOT EDIT MANUALLY! dask +python-dateutil graphviz pythreejs orsopy diff --git a/requirements/nightly.txt b/requirements/nightly.txt index 5ba98dd..dffe194 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -1,4 +1,4 @@ -# SHA1:d3f5454a10d0a8c1b7343a01b321768806db1053 +# SHA1:32b8662e93850ff7332a96796cff863431faf020 # # This file is autogenerated by pip-compile-multi # To update, run: @@ -8,7 +8,7 @@ -r basetest.txt asttokens==2.4.1 # via stack-data -certifi==2023.11.17 +certifi==2024.2.2 # via requests charset-normalizer==3.3.2 # via requests @@ -22,13 +22,13 @@ contourpy==1.2.0 # via matplotlib cycler==0.12.1 # via matplotlib -dask==2023.12.1 +dask==2024.1.1 # via -r nightly.in decorator==5.1.1 # via ipython executing==2.0.1 # via stack-data -fonttools==4.47.0 +fonttools==4.47.2 # via matplotlib fsspec==2023.12.2 # via dask @@ -44,7 +44,7 @@ importlib-metadata==7.0.1 # via dask ipydatawidgets==4.3.5 # via pythreejs -ipython==8.19.0 +ipython==8.21.0 # via ipywidgets ipywidgets==8.1.1 # via @@ -82,7 +82,7 @@ pexpect==4.9.0 # via ipython pillow==10.2.0 # via matplotlib -platformdirs==4.1.0 +platformdirs==4.2.0 # via pooch plopp @ git+https://github.com/scipp/plopp@main # via -r nightly.in @@ -100,6 +100,7 @@ pyparsing==3.1.1 # via matplotlib python-dateutil==2.8.2 # via + # -r nightly.in # matplotlib # scippnexus pythreejs==2.4.2 @@ -123,7 +124,7 @@ scippnexus @ git+https://github.com/scipp/scippnexus@main # via # -r nightly.in # scippneutron -scipy==1.11.4 +scipy==1.12.0 # via # scippneutron # scippnexus @@ -133,7 +134,7 @@ six==1.16.0 # python-dateutil stack-data==0.6.3 # via ipython -toolz==0.12.0 +toolz==0.12.1 # via # dask # partd @@ -147,9 +148,9 @@ traitlets==5.14.1 # traittypes traittypes==0.2.1 # via ipydatawidgets -urllib3==2.1.0 +urllib3==2.2.0 # via requests -wcwidth==0.2.12 +wcwidth==0.2.13 # via prompt-toolkit widgetsnbextension==4.0.9 # via ipywidgets diff --git a/requirements/static.txt b/requirements/static.txt index fa660fb..53c7664 100644 --- a/requirements/static.txt +++ b/requirements/static.txt @@ -15,7 +15,7 @@ identify==2.5.33 # via pre-commit nodeenv==1.8.0 # via pre-commit -platformdirs==4.1.0 +platformdirs==4.2.0 # via virtualenv pre-commit==3.6.0 # via -r static.in diff --git a/src/essreflectometry/__init__.py b/src/essreflectometry/__init__.py index 3e759bd..b48ac33 100644 --- a/src/essreflectometry/__init__.py +++ b/src/essreflectometry/__init__.py @@ -3,22 +3,20 @@ # flake8: noqa: F401 import importlib.metadata -import itertools -from . import calibrations, conversions, corrections, normalize +from . import calibrations, conversions, corrections, normalize, reductions try: __version__ = importlib.metadata.version(__package__ or __name__) except importlib.metadata.PackageNotFoundError: __version__ = "0.0.0" -providers = list( - itertools.chain( - conversions.providers, - corrections.providers, - calibrations.providers, - normalize.providers, - ) +providers = ( + *conversions.providers, + *corrections.providers, + *calibrations.providers, + *normalize.providers, + *reductions.providers, ) """ List of providers for setting up a Sciline pipeline. @@ -30,4 +28,3 @@ """ del importlib -del itertools diff --git a/src/essreflectometry/amor/__init__.py b/src/essreflectometry/amor/__init__.py index 2dd29b0..de9d5c0 100644 --- a/src/essreflectometry/amor/__init__.py +++ b/src/essreflectometry/amor/__init__.py @@ -1,8 +1,6 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2023 Scipp contributors (https://github.com/scipp) # flake8: noqa: F401 -import itertools - import scipp as sc from .. import providers as reflectometry_providers @@ -29,14 +27,12 @@ WavelengthResolution, ) -providers = list( - itertools.chain( - reflectometry_providers, - load.providers, - conversions.providers, - resolution.providers, - beamline.providers, - ) +providers = ( + *reflectometry_providers, + *load.providers, + *conversions.providers, + *resolution.providers, + *beamline.providers, ) """ List of providers for setting up a Sciline pipeline. @@ -59,4 +55,3 @@ } del sc -del itertools diff --git a/src/essreflectometry/amor/beamline.py b/src/essreflectometry/amor/beamline.py index 30479b8..b7fbc40 100644 --- a/src/essreflectometry/amor/beamline.py +++ b/src/essreflectometry/amor/beamline.py @@ -131,4 +131,4 @@ def instrument_view_components(da: sc.DataArray) -> dict: } -providers = [make_beamline] +providers = (make_beamline,) diff --git a/src/essreflectometry/amor/conversions.py b/src/essreflectometry/amor/conversions.py index f3ed443..4c94148 100644 --- a/src/essreflectometry/amor/conversions.py +++ b/src/essreflectometry/amor/conversions.py @@ -32,4 +32,4 @@ def specular_reflection() -> SpecularReflectionCoordTransformGraph: return SpecularReflectionCoordTransformGraph(graph) -providers = [specular_reflection] +providers = (specular_reflection,) diff --git a/src/essreflectometry/amor/load.py b/src/essreflectometry/amor/load.py index bcfc793..d218324 100644 --- a/src/essreflectometry/amor/load.py +++ b/src/essreflectometry/amor/load.py @@ -7,7 +7,7 @@ import scippnexus as snx from ..logging import get_logger -from ..types import Filename, RawData, Run +from ..types import Filename, RawData, RawEvents, Run from .data import get_path from .types import BeamlineParams @@ -64,7 +64,7 @@ def _assemble_event_data(dg: sc.DataGroup) -> sc.DataArray: : A data array with the events extracted from ``dg``. """ - events = dg['instrument']['multiblade_detector']['data'] + events = dg['instrument']['multiblade_detector']['data'].copy(deep=False) events.bins.coords['tof'] = events.bins.coords.pop('event_time_offset') events.coords['position'] = sc.spatial.as_vectors( events.coords.pop('x_pixel_offset'), @@ -85,28 +85,45 @@ def _load_nexus_entry(filename: Union[str, Path]) -> sc.DataGroup: return f['entry'][()] -def load(filename: Filename[Run], beamline: BeamlineParams[Run]) -> RawData[Run]: - """Load a single Amor data file. +def load_raw_nexus(filename: Filename[Run]) -> RawData[Run]: + """Load unprocessed data and metadata from an Amor NeXus file. Parameters ---------- filename: - Path of the file to load. - beamline: - A dict defining the beamline parameters. + Filename of the NeXus file. Returns ------- : - Data array object for Amor dataset. + Data and metadata. """ filename = get_path(filename) get_logger('amor').info( "Loading '%s' as an Amor NeXus file", filename.filename if hasattr(filename, 'filename') else filename, ) - full_data = _load_nexus_entry(filename) - data = _assemble_event_data(full_data) + return RawData(_load_nexus_entry(filename)) + + +def extract_events( + raw_data: RawData[Run], beamline: BeamlineParams[Run] +) -> RawEvents[Run]: + """Extract the events from unprocessed NeXus data. + + Parameters + ---------- + raw_data: + Data in a form representing an Amor NeXus file. + beamline: + A dict defining the beamline parameters. + + Returns + ------- + : + Data array object for Amor dataset. + """ + data = _assemble_event_data(raw_data) # Recent versions of scippnexus no longer add variances for events by default, so # we add them here if they are missing. @@ -136,32 +153,7 @@ def load(filename: Filename[Run], beamline: BeamlineParams[Run]) -> RawData[Run] data.coords['position'].fields.y += data.coords['position'].fields.z * sc.tan( 2.0 * data.coords['sample_rotation'] - (0.955 * sc.units.deg) ) - return data - - -# TODO -# def populate_orso(orso: Any, data: sc.DataGroup, filename: str) -> Any: -# """ -# Populate the Orso object, by calling the :code:`base_orso` and adding data from the -# file. -# -# Parameters -# ---------- -# orso: -# The orso object to be populated by additional information from the loaded file. -# data: -# Data group to source information from. -# Should mimic the structure of the NeXus file. -# filename: -# Path of the file to load. -# """ -# orso.data_source.experiment.title = data['title'] -# orso.data_source.experiment.instrument = data['name'] -# orso.data_source.experiment.start_date = datetime.strftime( -# datetime.strptime(data['start_time'][:-3], '%Y-%m-%dT%H:%M:%S.%f'), -# '%Y-%m-%d', -# ) -# orso.data_source.measurement.data_files = [filename] - - -providers = [load] + return RawEvents[Run](data) + + +providers = (extract_events, load_raw_nexus) diff --git a/src/essreflectometry/amor/orso.py b/src/essreflectometry/amor/orso.py index 99bb687..d80415b 100644 --- a/src/essreflectometry/amor/orso.py +++ b/src/essreflectometry/amor/orso.py @@ -1,60 +1,112 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2023 Scipp contributors (https://github.com/scipp) -import platform -from datetime import datetime +"""ORSO utilities for Amor.""" +from typing import Optional -from orsopy import fileio +import numpy as np +import scipp as sc +from orsopy.fileio import base as orso_base +from orsopy.fileio import data_source as orso_data_source +from orsopy.fileio.orso import Column, Orso, OrsoDataset -from .. import __version__ +from ..orso import OrsoDataSource, OrsoInstrument, OrsoIofQDataset, OrsoReduction +from ..types import NormalizedIofQ1D, QResolution, Sample, ThetaData, WavelengthData -def make_orso( - owner: fileio.base.Person, - sample: fileio.data_source.Sample, - creator: fileio.base.Person, - reduction_script: str, -) -> fileio.orso.Orso: - """ - Generate the base Orso object for the Amor instrument. - Populate the Orso object for metadata storage. - - Parameters - ---------- - owner: - The owner of the data set, i.e. the main proposer of the measurement. - sample: - A description of the sample. - creator: - The creator of the reduced data, the person responsible for the - reduction process. - reduction_script: - The script or notebook used for reduction. - - Returns - ------- - : - Orso object with the default parameters for the Amor instrument. +def build_orso_instrument( + events_in_wavelength: WavelengthData[Sample], events_in_theta: ThetaData[Sample] +) -> OrsoInstrument: + """Build ORSO instrument metadata from intermediate reduction results for Amor. + + This assumes specular reflection and sets the incident angle equal to the computed + scattering angle. """ - orso = fileio.orso.Orso.empty() - orso.data_source.experiment.probe = 'neutrons' - orso.data_source.experiment.facility = 'Paul Scherrer Institut' - orso.data_source.measurement.scheme = 'angle- and energy-dispersive' - orso.reduction.software = fileio.reduction.Software( - 'scipp-ess', __version__, platform.platform() + return OrsoInstrument( + orso_data_source.InstrumentSettings( + wavelength=orso_base.ValueRange( + *_limits_of_coord(events_in_wavelength, 'wavelength') + ), + incident_angle=orso_base.ValueRange( + *_limits_of_coord(events_in_theta, 'theta') + ), + polarization=None, # TODO how can we determine this from the inputs? + ) + ) + + +def build_orso_iofq_dataset( + iofq: NormalizedIofQ1D, + sigma_q: QResolution, + data_source: OrsoDataSource, + reduction: OrsoReduction, +) -> OrsoIofQDataset: + """Build an ORSO dataset for reduced I-of-Q data and associated metadata.""" + header = Orso( + data_source=data_source, + reduction=reduction, + columns=[ + Column('Qz', '1/angstrom', 'wavevector transfer'), + Column('R', None, 'reflectivity'), + Column('sR', None, 'standard deviation of reflectivity'), + Column( + 'sQz', + '1/angstrom', + 'standard deviation of wavevector transfer resolution', + ), + ], ) - orso.reduction.timestep = datetime.now() - orso.reduction.corrections = [] - orso.reduction.computer = platform.node() - orso.columns = [ - fileio.base.Column('Qz', '1/angstrom', 'wavevector transfer'), - fileio.base.Column('R', None, 'reflectivity'), - fileio.base.Column('sR', None, 'standard deivation of reflectivity'), - fileio.base.Column( - 'sQz', '1/angstrom', 'standard deviation of wavevector transfer resolution' - ), - ] - orso.data_source.owner = owner - orso.data_source.sample = sample - orso.reduction.creator = creator - orso.reduction.script = reduction_script - return orso + + qz = iofq.coords['Q'].to(unit='1/angstrom', copy=False) + if iofq.coords.is_edges('Q'): + qz = sc.midpoints(qz) + r = sc.values(iofq.data) + sr = sc.stddevs(iofq.data) + sqz = sigma_q.to(unit='1/angstrom', copy=False) + data = (qz, r, sr, sqz) + + return OrsoIofQDataset( + OrsoDataset(header, np.column_stack([_extract_values_array(d) for d in data])) + ) + + +def _extract_values_array(var: sc.Variable) -> np.ndarray: + if var.variances is not None: + raise sc.VariancesError( + "ORT columns must not have variances. " + "Store the uncertainties as standard deviations in a separate column." + ) + if var.ndim != 1: + raise sc.DimensionError(f"ORT columns must be one-dimensional, got {var.sizes}") + return var.values + + +def _limits_of_coord( + data: sc.DataArray, name: str +) -> Optional[tuple[float, float, str]]: + if (coord := _get_coord(data, name)) is None: + return None + min_ = coord.min().value + max_ = coord.max().value + # Explicit conversions to float because orsopy does not like np.float* types. + return float(min_), float(max_), _ascii_unit(min_) + + +def _get_coord(data: sc.DataArray, name: str) -> Optional[sc.Variable]: + try: + return data.coords[name] + except KeyError: + try: + return data.bins.coords[name] + except (KeyError, TypeError): + # KeyError if coord does not exist, TypeError if data is not binned. + return None + + +def _ascii_unit(unit: sc.Unit) -> str: + unit = str(unit) + if unit == 'Å': + return 'angstrom' + return unit + + +providers = (build_orso_instrument, build_orso_iofq_dataset) diff --git a/src/essreflectometry/amor/resolution.py b/src/essreflectometry/amor/resolution.py index 4596b92..02f6ca7 100644 --- a/src/essreflectometry/amor/resolution.py +++ b/src/essreflectometry/amor/resolution.py @@ -166,4 +166,4 @@ def sigma_Q( ).max('detector_number') * sc.midpoints(q_bins) -providers = [sigma_Q, angular_resolution, wavelength_resolution, sample_size_resolution] +providers = (sigma_Q, angular_resolution, wavelength_resolution, sample_size_resolution) diff --git a/src/essreflectometry/calibrations.py b/src/essreflectometry/calibrations.py index 0567c6b..ea57088 100644 --- a/src/essreflectometry/calibrations.py +++ b/src/essreflectometry/calibrations.py @@ -42,4 +42,4 @@ def calibration_factor( return calibration_factor -providers = [calibration_factor] +providers = (calibration_factor,) diff --git a/src/essreflectometry/conversions.py b/src/essreflectometry/conversions.py index 5e86f4c..747a657 100644 --- a/src/essreflectometry/conversions.py +++ b/src/essreflectometry/conversions.py @@ -13,7 +13,7 @@ HistogrammedQData, QBins, QData, - RawData, + RawEvents, Run, SpecularReflectionCoordTransformGraph, ThetaData, @@ -119,7 +119,7 @@ def specular_reflection() -> SpecularReflectionCoordTransformGraph: def tof_to_wavelength( - data_array: RawData[Run], + data_array: RawEvents[Run], graph: SpecularReflectionCoordTransformGraph, wavelength_edges: Optional[WavelengthEdges], ) -> WavelengthData[Run]: @@ -256,9 +256,9 @@ def histogram(data_array: QData[Run]) -> HistogrammedQData[Run]: return HistogrammedQData[Run](data_array.hist()) -providers = [ +providers = ( tof_to_wavelength, wavelength_to_theta, theta_to_q, histogram, -] +) diff --git a/src/essreflectometry/corrections.py b/src/essreflectometry/corrections.py index e01632b..82e4b1a 100644 --- a/src/essreflectometry/corrections.py +++ b/src/essreflectometry/corrections.py @@ -125,8 +125,8 @@ def beam_on_sample(beam_size: sc.Variable, theta: sc.Variable) -> sc.Variable: return beam_size / sc.sin(theta) -providers = [ +providers = ( footprint_correction, normalize_sample, normalize_reference, -] +) diff --git a/src/essreflectometry/io.py b/src/essreflectometry/io.py deleted file mode 100644 index df2e982..0000000 --- a/src/essreflectometry/io.py +++ /dev/null @@ -1,41 +0,0 @@ -# SPDX-License-Identifier: BSD-3-Clause -# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) - -from typing import Optional - -import numpy as np -import scipp as sc - - -def save_ort( - data_array: sc.DataArray, filename: str, dimension: Optional[str] = None -) -> None: - """ - Save a data array with the ORSO .ort file format. - - Parameters - ---------- - data_array: - Scipp-data array to save. - filename: - Filename. - dimension: - String for dimension to perform mean over. - """ - from orsopy import fileio - - if filename[:-4] == '.ort': - raise ValueError("The expected output file ending is .ort.") - if dimension is not None: - data_array = data_array.mean(dimension) - q = data_array.coords['Q'] - if data_array.coords.is_edges('Q'): - q = sc.midpoints(q) - R = data_array.data - sR = sc.stddevs(data_array.data) - sq = data_array.coords['sigma_Q'] - dataset = fileio.orso.OrsoDataset( - data_array.attrs['orso'].value, - np.array([q.values, R.values, sR.values, sq.values]).T, - ) - fileio.orso.save_orso([dataset], filename) diff --git a/src/essreflectometry/normalize.py b/src/essreflectometry/normalize.py index 3fd699b..bbe0a70 100644 --- a/src/essreflectometry/normalize.py +++ b/src/essreflectometry/normalize.py @@ -37,14 +37,9 @@ def normalize_by_supermirror( # + supermirror.attrs['orso'].value.reduction.corrections # ) # ) - # normalized.attrs[ - # 'orso' - # ].value.data_source.measurement.reference = supermirror.attrs[ - # 'orso' - # ].value.data_source.measurement.data_files # except KeyError: # orso.not_found_warning() return NormalizedIofQ(normalized) -providers = [normalize_by_supermirror] +providers = (normalize_by_supermirror,) diff --git a/src/essreflectometry/orso.py b/src/essreflectometry/orso.py index c824ca0..74cc76d 100644 --- a/src/essreflectometry/orso.py +++ b/src/essreflectometry/orso.py @@ -1,15 +1,152 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2023 Scipp contributors (https://github.com/scipp) -import warnings +"""ORSO utilities for reflectometry. +The Sciline providers and types in this module largely ignore the metadata +of reference runs and only use the metadata of the sample run. +""" -def not_found_warning(): - """ - A function to raise a orso specific error if necessary. +import os +import platform +from datetime import datetime, timezone +from typing import NewType, Optional + +from dateutil.parser import parse as parse_datetime +from orsopy.fileio import base as orso_base +from orsopy.fileio import data_source, orso, reduction + +from .types import Filename, RawData, Reference, Sample + +OrsoCreator = NewType('OrsoCreator', orso_base.Person) +"""ORSO creator, that is, the person who processed the data.""" + +OrsoDataSource = NewType('OrsoDataSource', data_source.DataSource) +"""ORSO data source.""" + +OrsoExperiment = NewType('OrsoExperiment', data_source.Experiment) +"""ORSO experiment for the sample run.""" + +OrsoInstrument = NewType('OrsoInstrument', data_source.InstrumentSettings) +"""ORSO instrument settings for the sample run.""" + +OrsoIofQDataset = NewType('OrsoIofQDataset', orso.OrsoDataset) +"""ORSO dataset for reduced I-of-Q data.""" + +OrsoMeasurement = NewType('OrsoMeasurement', data_source.Measurement) +"""ORSO measurement.""" + +OrsoOwner = NewType('OrsoOwner', orso_base.Person) +"""ORSO owner of a measurement.""" + +OrsoReduction = NewType('OrsoReduction', reduction.Reduction) +"""ORSO data reduction metadata.""" + +OrsoSample = NewType('OrsoSample', data_source.Sample) +"""ORSO sample.""" + + +def parse_orso_experiment(raw_data: RawData[Sample]) -> OrsoExperiment: + """Parse ORSO experiment metadata from raw NeXus data.""" + return OrsoExperiment( + data_source.Experiment( + title=raw_data['title'], + instrument=raw_data['instrument']['name'], + facility=raw_data.get('facility'), + start_date=parse_datetime(raw_data['start_time']), + probe='neutron', + ) + ) + + +def parse_orso_owner(raw_data: RawData[Sample]) -> OrsoOwner: + """Parse ORSO owner metadata from raw NeXus data.""" + return OrsoOwner( + orso_base.Person( + name=raw_data['user']['name'], + contact=raw_data['user']['email'], + affiliation=raw_data['user'].get('affiliation'), + ) + ) + + +def parse_orso_sample(raw_data: RawData[Sample]) -> OrsoSample: + """Parse ORSO sample metadata from raw NeXus data.""" + if not raw_data.get('sample'): + return OrsoSample(data_source.Sample.empty()) + raise NotImplementedError('NeXus sample parsing is not implemented') + + +def build_orso_measurement( + sample_filename: Filename[Sample], + reference_filename: Optional[Filename[Reference]], + instrument: Optional[OrsoInstrument], +) -> OrsoMeasurement: + """Assemble ORSO measurement metadata.""" + # TODO populate timestamp + # doesn't work with a local file because we need the timestamp of the original, + # SciCat can provide that + if reference_filename: + additional_files = [ + orso_base.File( + file=os.path.basename(reference_filename), comment='supermirror' + ) + ] + else: + additional_files = [] + return OrsoMeasurement( + data_source.Measurement( + instrument_settings=instrument, + data_files=[orso_base.File(file=os.path.basename(sample_filename))], + additional_files=additional_files, + ) + ) + + +def build_orso_reduction(creator: Optional[OrsoCreator]) -> OrsoReduction: + """Construct ORSO reduction metadata. + + This assumes that ess.reflectometry is the primary piece of software + used to reduce the data. """ - warnings.warn( - "For metadata to be logged in the data array, " - "it is necessary to install the orsopy package.", - UserWarning, - stacklevel=2, + # Import here to break cycle __init__ -> io -> orso -> __init__ + from . import __version__ + + return OrsoReduction( + reduction.Reduction( + software=reduction.Software( + name='ess.reflectometry', + version=str(__version__), + platform=platform.system(), + ), + timestamp=datetime.now(tz=timezone.utc), + creator=creator, + corrections=[], + ) + ) + + +def build_orso_data_source( + owner: Optional[OrsoOwner], + sample: Optional[OrsoSample], + experiment: Optional[OrsoExperiment], + measurement: Optional[OrsoMeasurement], +) -> OrsoDataSource: + """Assemble an ORSO DataSource.""" + return OrsoDataSource( + data_source.DataSource( + owner=owner, + sample=sample, + experiment=experiment, + measurement=measurement, + ) ) + + +providers = ( + build_orso_data_source, + build_orso_measurement, + build_orso_reduction, + parse_orso_experiment, + parse_orso_owner, + parse_orso_sample, +) diff --git a/src/essreflectometry/reductions.py b/src/essreflectometry/reductions.py new file mode 100644 index 0000000..7f33f49 --- /dev/null +++ b/src/essreflectometry/reductions.py @@ -0,0 +1,12 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) + +from .types import NormalizedIofQ, NormalizedIofQ1D + + +def average_over_detectors(iofq: NormalizedIofQ) -> NormalizedIofQ1D: + """Average over all detector pixels.""" + return iofq.mean(dim='detector_number') + + +providers = (average_over_detectors,) diff --git a/src/essreflectometry/types.py b/src/essreflectometry/types.py index 72c7586..2a2bece 100644 --- a/src/essreflectometry/types.py +++ b/src/essreflectometry/types.py @@ -8,7 +8,11 @@ Run = TypeVar('Run', Reference, Sample) -class RawData(sciline.Scope[Run, sc.DataArray], sc.DataArray): +class RawData(sciline.Scope[Run, sc.DataGroup], sc.DataGroup): + """Data as loaded from a NeXus file.""" + + +class RawEvents(sciline.Scope[Run, sc.DataArray], sc.DataArray): """Event time data from nexus file, binned by `detector_number` (pixel of the detector frame).""" @@ -55,8 +59,12 @@ class IofQ(sciline.Scope[Run, sc.DataArray], sc.DataArray): NormalizedIofQ = NewType('NormalizedIofQ', sc.DataArray) -'''Normalized histogram over momentrum transfer and detector number, +'''Normalized histogram over momentum transfer and detector number, normalized by the calibrated reference measurement.''' + +NormalizedIofQ1D = NewType('NormalizedIofQ1D', sc.DataArray) +'''Normalized histogram reduced to 1 dimension.''' + QResolution = NewType('QResolution', sc.Variable) '''Resolution term for the momentum transfer for each bin of QBins.''' diff --git a/tests/orso_test.py b/tests/orso_test.py new file mode 100644 index 0000000..51b16f1 --- /dev/null +++ b/tests/orso_test.py @@ -0,0 +1,63 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2023 Scipp contributors (https://github.com/scipp) +from datetime import datetime + +import sciline +from orsopy import fileio + +import essreflectometry +from essreflectometry import orso +from essreflectometry.amor.load import providers as amor_load_providers +from essreflectometry.types import Filename, Sample + + +def test_build_orso_data_source(): + pipeline = sciline.Pipeline( + ( + *amor_load_providers, + *orso.providers, + ), + params={Filename[Sample]: 'sample.nxs'}, + ) + data_source = pipeline.compute(orso.OrsoDataSource) + expected = fileio.data_source.DataSource( + owner=fileio.base.Person( + name='J. Stahn', contact='jochen.stahn@psi.ch', affiliation=None + ), + sample=fileio.data_source.Sample.empty(), + experiment=fileio.data_source.Experiment( + title='commissioning', + instrument='AMOR', + start_date=datetime(2020, 11, 25, 16, 3, 10), + probe='neutron', + facility='SINQ', + ), + measurement=fileio.data_source.Measurement( + data_files=[fileio.base.File(file='sample.nxs')], + # We would need the full pipeline to determine this: + additional_files=[], + instrument_settings=None, + ), + ) + assert data_source == expected + + +def test_build_orso_reduction_without_creator(): + pipeline = sciline.Pipeline(orso.providers) + reduction = pipeline.compute(orso.OrsoReduction) + assert reduction.software.name == 'ess.reflectometry' + assert reduction.software.version == str(essreflectometry.__version__) + assert reduction.creator is None + + +def test_build_orso_reduction_with_creator(): + creator = fileio.base.Person( + name='Erika Mustermann', affiliation='ESS', contact='erika.mustermann@ess.eu' + ) + pipeline = sciline.Pipeline( + orso.providers, params={orso.OrsoCreator: orso.OrsoCreator(creator)} + ) + reduction = pipeline.compute(orso.OrsoReduction) + assert reduction.software.name == 'ess.reflectometry' + assert reduction.software.version == str(essreflectometry.__version__) + assert reduction.creator == creator